org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector Java Examples

The following examples show how to use org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OrcBatchReader.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private static <T> void readNonNullDoubleColumn(Object[] vals, int fieldIdx, DoubleColumnVector vector,
												int childCount, DoubleFunction<T> reader) {

	if (vector.isRepeating) { // fill complete column with first value
		T repeatingValue = reader.apply(vector.vector[0]);
		fillColumnWithRepeatingValue(vals, fieldIdx, repeatingValue, childCount);
	} else {
		if (fieldIdx == -1) { // set as an object
			for (int i = 0; i < childCount; i++) {
				vals[i] = reader.apply(vector.vector[i]);
			}
		} else { // set as a field of Row
			Row[] rows = (Row[]) vals;
			for (int i = 0; i < childCount; i++) {
				rows[i].setField(fieldIdx, reader.apply(vector.vector[i]));
			}
		}
	}
}
 
Example #2
Source File: OrcBatchReader.java    From flink with Apache License 2.0 6 votes vote down vote up
private static <T> void readNonNullDoubleColumn(Object[] vals, int fieldIdx, DoubleColumnVector vector,
												int childCount, DoubleFunction<T> reader) {

	if (vector.isRepeating) { // fill complete column with first value
		T repeatingValue = reader.apply(vector.vector[0]);
		fillColumnWithRepeatingValue(vals, fieldIdx, repeatingValue, childCount);
	} else {
		if (fieldIdx == -1) { // set as an object
			for (int i = 0; i < childCount; i++) {
				vals[i] = reader.apply(vector.vector[i]);
			}
		} else { // set as a field of Row
			Row[] rows = (Row[]) vals;
			for (int i = 0; i < childCount; i++) {
				rows[i].setField(fieldIdx, reader.apply(vector.vector[i]));
			}
		}
	}
}
 
Example #3
Source File: OrcBatchReader.java    From flink with Apache License 2.0 6 votes vote down vote up
private static <T> void readNonNullDoubleColumn(Object[] vals, int fieldIdx, DoubleColumnVector vector,
												int childCount, DoubleFunction<T> reader) {

	if (vector.isRepeating) { // fill complete column with first value
		T repeatingValue = reader.apply(vector.vector[0]);
		fillColumnWithRepeatingValue(vals, fieldIdx, repeatingValue, childCount);
	} else {
		if (fieldIdx == -1) { // set as an object
			for (int i = 0; i < childCount; i++) {
				vals[i] = reader.apply(vector.vector[i]);
			}
		} else { // set as a field of Row
			Row[] rows = (Row[]) vals;
			for (int i = 0; i < childCount; i++) {
				rows[i].setField(fieldIdx, reader.apply(vector.vector[i]));
			}
		}
	}
}
 
Example #4
Source File: OrcBatchReader.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private static <T> void readDoubleColumn(Object[] vals, int fieldIdx, DoubleColumnVector vector,
											int childCount, DoubleFunction<T> reader) {

	if (vector.isRepeating) { // fill complete column with first value
		if (vector.isNull[0]) {
			// fill vals with null values
			fillColumnWithRepeatingValue(vals, fieldIdx, null, childCount);
		} else {
			// read repeating non-null value by forwarding call
			readNonNullDoubleColumn(vals, fieldIdx, vector, childCount, reader);
		}
	} else {
		boolean[] isNullVector = vector.isNull;
		if (fieldIdx == -1) { // set as an object
			for (int i = 0; i < childCount; i++) {
				if (isNullVector[i]) {
					vals[i] = null;
				} else {
					vals[i] = reader.apply(vector.vector[i]);
				}
			}
		} else { // set as a field of Row
			Row[] rows = (Row[]) vals;
			for (int i = 0; i < childCount; i++) {
				if (isNullVector[i]) {
					rows[i].setField(fieldIdx, null);
				} else {
					rows[i].setField(fieldIdx, reader.apply(vector.vector[i]));
				}
			}
		}
	}
}
 
Example #5
Source File: VectorColumnFiller.java    From secor with Apache License 2.0 5 votes vote down vote up
public void convert(JsonElement value, ColumnVector vect, int row) {
    if (value == null || value.isJsonNull()) {
        vect.noNulls = false;
        vect.isNull[row] = true;
    } else {
        DoubleColumnVector vector = (DoubleColumnVector) vect;
        vector.vector[row] = value.getAsDouble();
    }
}
 
Example #6
Source File: AbstractOrcColumnVector.java    From flink with Apache License 2.0 5 votes vote down vote up
private static DoubleColumnVector createDoubleVector(int batchSize, Object value) {
	DoubleColumnVector dcv = new DoubleColumnVector(batchSize);
	if (value == null) {
		dcv.noNulls = false;
		dcv.isNull[0] = true;
		dcv.isRepeating = true;
	} else {
		dcv.fill(((Number) value).doubleValue());
		dcv.isNull[0] = false;
	}
	return dcv;
}
 
Example #7
Source File: OrcBatchReader.java    From flink with Apache License 2.0 5 votes vote down vote up
private static <T> void readDoubleColumn(Object[] vals, int fieldIdx, DoubleColumnVector vector,
											int childCount, DoubleFunction<T> reader) {

	if (vector.isRepeating) { // fill complete column with first value
		if (vector.isNull[0]) {
			// fill vals with null values
			fillColumnWithRepeatingValue(vals, fieldIdx, null, childCount);
		} else {
			// read repeating non-null value by forwarding call
			readNonNullDoubleColumn(vals, fieldIdx, vector, childCount, reader);
		}
	} else {
		boolean[] isNullVector = vector.isNull;
		if (fieldIdx == -1) { // set as an object
			for (int i = 0; i < childCount; i++) {
				if (isNullVector[i]) {
					vals[i] = null;
				} else {
					vals[i] = reader.apply(vector.vector[i]);
				}
			}
		} else { // set as a field of Row
			Row[] rows = (Row[]) vals;
			for (int i = 0; i < childCount; i++) {
				if (isNullVector[i]) {
					rows[i].setField(fieldIdx, null);
				} else {
					rows[i].setField(fieldIdx, reader.apply(vector.vector[i]));
				}
			}
		}
	}
}
 
Example #8
Source File: HiveORCVectorizedReader.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
private ColumnVector getPrimitiveColumnVector(PrimitiveObjectInspector poi) {
    switch (poi.getPrimitiveCategory()) {
    case BOOLEAN:
    case BYTE:
    case SHORT:
    case INT:
    case LONG:
    case DATE:
      return new LongColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
    case TIMESTAMP:
      return new TimestampColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
    case FLOAT:
    case DOUBLE:
      return new DoubleColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
    case BINARY:
    case STRING:
    case CHAR:
    case VARCHAR:
      return new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
    case DECIMAL:
      DecimalTypeInfo tInfo = (DecimalTypeInfo) poi.getTypeInfo();
      return new DecimalColumnVector(VectorizedRowBatch.DEFAULT_SIZE,
        tInfo.precision(), tInfo.scale()
      );
    default:
      throw UserException.unsupportedError()
        .message("Vectorized ORC reader is not supported for datatype: %s", poi.getPrimitiveCategory())
        .build(logger);
    }
}
 
Example #9
Source File: OrcBatchReader.java    From flink with Apache License 2.0 5 votes vote down vote up
private static <T> void readDoubleColumn(Object[] vals, int fieldIdx, DoubleColumnVector vector,
											int childCount, DoubleFunction<T> reader) {

	if (vector.isRepeating) { // fill complete column with first value
		if (vector.isNull[0]) {
			// fill vals with null values
			fillColumnWithRepeatingValue(vals, fieldIdx, null, childCount);
		} else {
			// read repeating non-null value by forwarding call
			readNonNullDoubleColumn(vals, fieldIdx, vector, childCount, reader);
		}
	} else {
		boolean[] isNullVector = vector.isNull;
		if (fieldIdx == -1) { // set as an object
			for (int i = 0; i < childCount; i++) {
				if (isNullVector[i]) {
					vals[i] = null;
				} else {
					vals[i] = reader.apply(vector.vector[i]);
				}
			}
		} else { // set as a field of Row
			Row[] rows = (Row[]) vals;
			for (int i = 0; i < childCount; i++) {
				if (isNullVector[i]) {
					rows[i].setField(fieldIdx, null);
				} else {
					rows[i].setField(fieldIdx, reader.apply(vector.vector[i]));
				}
			}
		}
	}
}
 
Example #10
Source File: DoubleColumnVectorAssignor.java    From multiple-dimension-spread with Apache License 2.0 5 votes vote down vote up
@Override
public void setColumnVector( final ColumnVector vector , final IExpressionIndex indexList , final int start , final int length ) throws IOException{
  DoubleColumnVector columnVector = (DoubleColumnVector)vector;
  PrimitiveObject[] primitiveObjectArray = column.getPrimitiveObjectArray( indexList , start , length );
  for( int i = 0 ; i < length ; i++ ){
    if( primitiveObjectArray[i] == null ){
      VectorizedBatchUtil.setNullColIsNullValue( columnVector , i );
    }
    else{
      setter.set( primitiveObjectArray , columnVector , i );
    }
  }
}
 
Example #11
Source File: FloatPrimitiveSetter.java    From multiple-dimension-spread with Apache License 2.0 5 votes vote down vote up
@Override
public void set( final PrimitiveObject[] primitiveObjectArray , final DoubleColumnVector columnVector , final int index ) throws IOException{
  try{
    double doubleNumber = (double)( primitiveObjectArray[index].getFloat() );
    columnVector.vector[index] = doubleNumber;
  }catch( NumberFormatException | NullPointerException e ){
    VectorizedBatchUtil.setNullColIsNullValue( columnVector , index );
  }
}
 
Example #12
Source File: HiveORCVectorizedReader.java    From dremio-oss with Apache License 2.0 5 votes vote down vote up
private ColumnVector getPrimitiveColumnVector(PrimitiveObjectInspector poi) {
    switch (poi.getPrimitiveCategory()) {
    case BOOLEAN:
    case BYTE:
    case SHORT:
    case INT:
    case LONG:
    case DATE:
      return new LongColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
    case TIMESTAMP:
      return new TimestampColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
    case FLOAT:
    case DOUBLE:
      return new DoubleColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
    case BINARY:
    case STRING:
    case CHAR:
    case VARCHAR:
      return new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
    case DECIMAL:
      DecimalTypeInfo tInfo = (DecimalTypeInfo) poi.getTypeInfo();
      return new DecimalColumnVector(VectorizedRowBatch.DEFAULT_SIZE,
        tInfo.precision(), tInfo.scale()
      );
    default:
      throw UserException.unsupportedError()
        .message("Vectorized ORC reader is not supported for datatype: %s", poi.getPrimitiveCategory())
        .build(logger);
    }
}
 
Example #13
Source File: DoublePrimitiveSetter.java    From multiple-dimension-spread with Apache License 2.0 5 votes vote down vote up
@Override
public void set( final PrimitiveObject[] primitiveObjectArray , final DoubleColumnVector columnVector , final int index ) throws IOException{
  try{
    double doubleNumber = primitiveObjectArray[index].getDouble();
    columnVector.vector[index] = doubleNumber;
  }catch( NumberFormatException | NullPointerException e ){
    VectorizedBatchUtil.setNullColIsNullValue( columnVector , index );
  }
}
 
Example #14
Source File: HiveORCCopiers.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
DoubleToVarWidthCopier(DoubleColumnVector inputVector, BaseVariableWidthVector outputVector) {
  this.inputVector = inputVector;
  this.outputVector = outputVector;
}
 
Example #15
Source File: OrcConverter.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
protected static Object convertFromSourceToTargetDataType( ColumnVector columnVector, int currentBatchRow,
                                                           int orcValueMetaInterface ) {

  if ( columnVector.isNull[ currentBatchRow ] ) {
    return null;
  }
  switch ( orcValueMetaInterface ) {
    case ValueMetaInterface.TYPE_INET:
      try {
        return InetAddress.getByName( new String( ( (BytesColumnVector) columnVector ).vector[ currentBatchRow ],
          ( (BytesColumnVector) columnVector ).start[ currentBatchRow ],
          ( (BytesColumnVector) columnVector ).length[ currentBatchRow ] ) );
      } catch ( UnknownHostException e ) {
        e.printStackTrace();
      }

    case ValueMetaInterface.TYPE_STRING:
      return new String( ( (BytesColumnVector) columnVector ).vector[ currentBatchRow ],
        ( (BytesColumnVector) columnVector ).start[ currentBatchRow ],
        ( (BytesColumnVector) columnVector ).length[ currentBatchRow ] );

    case ValueMetaInterface.TYPE_INTEGER:
      return (long) ( (LongColumnVector) columnVector ).vector[ currentBatchRow ];

    case ValueMetaInterface.TYPE_NUMBER:
      return ( (DoubleColumnVector) columnVector ).vector[ currentBatchRow ];

    case ValueMetaInterface.TYPE_BIGNUMBER:
      HiveDecimalWritable obj = ( (DecimalColumnVector) columnVector ).vector[ currentBatchRow ];
      return obj.getHiveDecimal().bigDecimalValue();

    case ValueMetaInterface.TYPE_TIMESTAMP:
      Timestamp timestamp = new Timestamp( ( (TimestampColumnVector) columnVector ).time[ currentBatchRow ] );
      timestamp.setNanos( ( (TimestampColumnVector) columnVector ).nanos[ currentBatchRow ] );
      return timestamp;

    case ValueMetaInterface.TYPE_DATE:
      LocalDate localDate =
        LocalDate.ofEpochDay( 0 ).plusDays( ( (LongColumnVector) columnVector ).vector[ currentBatchRow ] );
      Date dateValue = Date.from( localDate.atStartOfDay( ZoneId.systemDefault() ).toInstant() );
      return dateValue;

    case ValueMetaInterface.TYPE_BOOLEAN:
      return ( (LongColumnVector) columnVector ).vector[ currentBatchRow ] == 0 ? false : true;

    case ValueMetaInterface.TYPE_BINARY:
      byte[] origBytes = ( (BytesColumnVector) columnVector ).vector[ currentBatchRow ];
      int startPos = ( (BytesColumnVector) columnVector ).start[ currentBatchRow ];
      byte[] newBytes = Arrays.copyOfRange( origBytes, startPos,
        startPos + ( (BytesColumnVector) columnVector ).length[ currentBatchRow ] );
      return newBytes;
  }

  //if none of the cases match return a null
  return null;
}
 
Example #16
Source File: ORCRecordExtractorTest.java    From incubator-pinot with Apache License 2.0 4 votes vote down vote up
/**
 * Create an ORC input file using the input records
 */
@Override
protected void createInputFile()
    throws IOException {
  TypeDescription schema = TypeDescription.fromString(
      "struct<user_id:int,firstName:string,lastName:string,bids:array<int>,campaignInfo:string,cost:double,timestamp:bigint>");
  Writer writer = OrcFile.createWriter(new Path(_dataFile.getAbsolutePath()),
      OrcFile.writerOptions(new Configuration()).setSchema(schema));

  int numRecords = _inputRecords.size();
  VectorizedRowBatch rowBatch = schema.createRowBatch(numRecords);
  LongColumnVector userIdVector = (LongColumnVector) rowBatch.cols[0];
  userIdVector.noNulls = false;
  BytesColumnVector firstNameVector = (BytesColumnVector) rowBatch.cols[1];
  firstNameVector.noNulls = false;
  BytesColumnVector lastNameVector = (BytesColumnVector) rowBatch.cols[2];
  ListColumnVector bidsVector = (ListColumnVector) rowBatch.cols[3];
  bidsVector.noNulls = false;
  LongColumnVector bidsElementVector = (LongColumnVector) bidsVector.child;
  bidsElementVector.ensureSize(6, false);
  BytesColumnVector campaignInfoVector = (BytesColumnVector) rowBatch.cols[4];
  DoubleColumnVector costVector = (DoubleColumnVector) rowBatch.cols[5];
  LongColumnVector timestampVector = (LongColumnVector) rowBatch.cols[6];

  for (int i = 0; i < numRecords; i++) {
    Map<String, Object> record = _inputRecords.get(i);

    Integer userId = (Integer) record.get("user_id");
    if (userId != null) {
      userIdVector.vector[i] = userId;
    } else {
      userIdVector.isNull[i] = true;
    }
    String firstName = (String) record.get("firstName");
    if (firstName != null) {
      firstNameVector.setVal(i, StringUtils.encodeUtf8(firstName));
    } else {
      firstNameVector.isNull[i] = true;
    }
    lastNameVector.setVal(i, StringUtils.encodeUtf8((String) record.get("lastName")));
    List<Integer> bids = (List<Integer>) record.get("bids");
    if (bids != null) {
      bidsVector.offsets[i] = bidsVector.childCount;
      bidsVector.lengths[i] = bids.size();
      for (int bid : bids) {
        bidsElementVector.vector[bidsVector.childCount++] = bid;
      }
    } else {
      bidsVector.isNull[i] = true;
    }
    campaignInfoVector.setVal(i, StringUtils.encodeUtf8((String) record.get("campaignInfo")));
    costVector.vector[i] = (double) record.get("cost");
    timestampVector.vector[i] = (long) record.get("timestamp");

    rowBatch.size++;
  }

  writer.addRowBatch(rowBatch);
  rowBatch.reset();
  writer.close();
}
 
Example #17
Source File: JsonFieldFiller.java    From secor with Apache License 2.0 4 votes vote down vote up
static void setValue(JSONWriter writer, ColumnVector vector,
        TypeDescription schema, int row) throws JSONException {
    if (vector.isRepeating) {
        row = 0;
    }
    if (vector.noNulls || !vector.isNull[row]) {
        switch (schema.getCategory()) {
        case BOOLEAN:
            writer.value(((LongColumnVector) vector).vector[row] != 0);
            break;
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
            writer.value(((LongColumnVector) vector).vector[row]);
            break;
        case FLOAT:
        case DOUBLE:
            writer.value(((DoubleColumnVector) vector).vector[row]);
            break;
        case STRING:
        case CHAR:
        case VARCHAR:
            writer.value(((BytesColumnVector) vector).toString(row));
            break;
        case DECIMAL:
            writer.value(((DecimalColumnVector) vector).vector[row]
                    .toString());
            break;
        case DATE:
            writer.value(new DateWritable(
                    (int) ((LongColumnVector) vector).vector[row])
                    .toString());
            break;
        case TIMESTAMP:
            writer.value(((TimestampColumnVector) vector)
                    .asScratchTimestamp(row).toString());
            break;
        case LIST:
            setList(writer, (ListColumnVector) vector, schema, row);
            break;
        case STRUCT:
            setStruct(writer, (StructColumnVector) vector, schema, row);
            break;
        case UNION:
            setUnion(writer, (UnionColumnVector) vector, schema, row);
            break;
        case BINARY:
            // To prevent similar mistakes like the one described in https://github.com/pinterest/secor/pull/1018,
            // it would be better to explicitly throw an exception here rather than ignore the incoming values,
            // which causes silent failures in a later stage.
            throw new UnsupportedOperationException();
        case MAP:
            setMap(writer, (MapColumnVector) vector, schema, row);
            break;
        default:
            throw new IllegalArgumentException("Unknown type "
                    + schema.toString());
        }
    } else {
        writer.value(null);
    }
}
 
Example #18
Source File: OrcColumnarRowSplitReaderTest.java    From flink with Apache License 2.0 4 votes vote down vote up
protected void prepareReadFileWithTypes(String file, int rowSize) throws IOException {
	// NOTE: orc has field name information, so name should be same as orc
	TypeDescription schema =
			TypeDescription.fromString(
					"struct<" +
							"f0:float," +
							"f1:double," +
							"f2:timestamp," +
							"f3:tinyint," +
							"f4:smallint" +
							">");

	org.apache.hadoop.fs.Path filePath = new org.apache.hadoop.fs.Path(file);
	Configuration conf = new Configuration();

	Writer writer =
			OrcFile.createWriter(filePath,
					OrcFile.writerOptions(conf).setSchema(schema));

	VectorizedRowBatch batch = schema.createRowBatch(rowSize);
	DoubleColumnVector col0 = (DoubleColumnVector) batch.cols[0];
	DoubleColumnVector col1 = (DoubleColumnVector) batch.cols[1];
	TimestampColumnVector col2 = (TimestampColumnVector) batch.cols[2];
	LongColumnVector col3 = (LongColumnVector) batch.cols[3];
	LongColumnVector col4 = (LongColumnVector) batch.cols[4];

	col0.noNulls = false;
	col1.noNulls = false;
	col2.noNulls = false;
	col3.noNulls = false;
	col4.noNulls = false;
	for (int i = 0; i < rowSize - 1; i++) {
		col0.vector[i] = i;
		col1.vector[i] = i;

		Timestamp timestamp = toTimestamp(i);
		col2.time[i] = timestamp.getTime();
		col2.nanos[i] = timestamp.getNanos();

		col3.vector[i] = i;
		col4.vector[i] = i;
	}

	col0.isNull[rowSize - 1] = true;
	col1.isNull[rowSize - 1] = true;
	col2.isNull[rowSize - 1] = true;
	col3.isNull[rowSize - 1] = true;
	col4.isNull[rowSize - 1] = true;

	batch.size = rowSize;
	writer.addRowBatch(batch);
	batch.reset();
	writer.close();
}
 
Example #19
Source File: OrcDoubleColumnVector.java    From flink with Apache License 2.0 4 votes vote down vote up
public OrcDoubleColumnVector(DoubleColumnVector vector) {
	super(vector);
	this.vector = vector;
}
 
Example #20
Source File: HiveORCCopiers.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
DoubleToFloat8Copier(DoubleColumnVector inputVector, Float8Vector outputVector) {
  this.inputVector = inputVector;
  this.outputVector = outputVector;
}
 
Example #21
Source File: HiveORCCopiers.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
DoubleToFloat4Copier(DoubleColumnVector inputVector, Float4Vector outputVector) {
  this.inputVector = inputVector;
  this.outputVector = outputVector;
}
 
Example #22
Source File: HiveORCCopiers.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
DoubleToDecimalCopier(DoubleColumnVector inputVector, DecimalVector outputVector) {
  this.inputVector = inputVector;
  this.outputVector = outputVector;
}
 
Example #23
Source File: HiveORCCopiers.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
DoubleToVarWidthCopier(DoubleColumnVector inputVector, BaseVariableWidthVector outputVector) {
  this.inputVector = inputVector;
  this.outputVector = outputVector;
}
 
Example #24
Source File: HiveORCCopiers.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
DoubleToFloat8Copier(DoubleColumnVector inputVector, Float8Vector outputVector) {
  this.inputVector = inputVector;
  this.outputVector = outputVector;
}
 
Example #25
Source File: HiveORCCopiers.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
DoubleToFloat4Copier(DoubleColumnVector inputVector, Float4Vector outputVector) {
  this.inputVector = inputVector;
  this.outputVector = outputVector;
}
 
Example #26
Source File: HiveORCCopiers.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
DoubleToDecimalCopier(DoubleColumnVector inputVector, DecimalVector outputVector) {
  this.inputVector = inputVector;
  this.outputVector = outputVector;
}
 
Example #27
Source File: IDecimalPrimitiveSetter.java    From multiple-dimension-spread with Apache License 2.0 votes vote down vote up
void set( final PrimitiveObject[] primitiveObjectArray , final DoubleColumnVector columnVector , final int index ) throws IOException;