org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector Java Examples

The following examples show how to use org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestOrcReaderMemoryUsage.java    From presto with Apache License 2.0 6 votes vote down vote up
/**
 * Write a file that contains a number of rows with 1 VARCHAR column, and all values are not null.
 */
private static TempFile createSingleColumnVarcharFile(int count, int length)
        throws Exception
{
    Serializer serde = new OrcSerde();
    TempFile tempFile = new TempFile();
    FileSinkOperator.RecordWriter writer = createOrcRecordWriter(tempFile.getFile(), ORC_12, CompressionKind.NONE, VARCHAR);
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", VARCHAR);
    Object row = objectInspector.create();
    StructField field = objectInspector.getAllStructFieldRefs().get(0);

    for (int i = 0; i < count; i++) {
        objectInspector.setStructFieldData(row, field, Strings.repeat("0", length));
        Writable record = serde.serialize(row, objectInspector);
        writer.write(record);
    }

    writer.close(false);
    return tempFile;
}
 
Example #2
Source File: TestOrcReaderPositions.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
private static void createSequentialFile(File file, int count)
        throws IOException, ReflectiveOperationException, SerDeException
{
    FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, OrcTester.Compression.NONE, javaLongObjectInspector);

    @SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", javaLongObjectInspector);
    Object row = objectInspector.create();
    StructField field = objectInspector.getAllStructFieldRefs().get(0);

    for (int i = 0; i < count; i++) {
        objectInspector.setStructFieldData(row, field, (long) i);
        Writable record = serde.serialize(row, objectInspector);
        writer.write(record);
    }

    writer.close(false);
}
 
Example #3
Source File: TestOrcReaderPositions.java    From spliceengine with GNU Affero General Public License v3.0 6 votes vote down vote up
private static void createMultiStripeFile(File file)
        throws IOException, ReflectiveOperationException, SerDeException
{
    FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, OrcTester.Compression.NONE, javaLongObjectInspector);

    @SuppressWarnings("deprecation") Serializer serde = new OrcSerde();
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", javaLongObjectInspector);
    Object row = objectInspector.create();
    StructField field = objectInspector.getAllStructFieldRefs().get(0);

    for (int i = 0; i < 300; i += 3) {
        if ((i > 0) && (i % 60 == 0)) {
            flushWriter(writer);
        }

        objectInspector.setStructFieldData(row, field, (long) i);
        Writable record = serde.serialize(row, objectInspector);
        writer.write(record);
    }

    writer.close(false);
}
 
Example #4
Source File: CacheableObjectInspectorConverters.java    From transport with BSD 2-Clause "Simplified" License 6 votes vote down vote up
public StructConverter(ObjectInspector inputOI, SettableStructObjectInspector outputOI) {
  if (inputOI instanceof StructObjectInspector) {
    this.inputOI = (StructObjectInspector) inputOI;
    this.outputOI = outputOI;
    inputFields = this.inputOI.getAllStructFieldRefs();
    outputFields = outputOI.getAllStructFieldRefs();

    // If the output has some extra fields, set them to NULL.
    int minFields = Math.min(inputFields.size(), outputFields.size());
    fieldConverters = new ArrayList<Converter>(minFields);
    for (int f = 0; f < minFields; f++) {
      fieldConverters.add(getConverter(inputFields.get(f).getFieldObjectInspector(),
          outputFields.get(f).getFieldObjectInspector()));
    }
  } else if (!(inputOI instanceof VoidObjectInspector)) {
    throw new UnsupportedOperationException(
        "Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName()
            + "not supported yet.");
  }
}
 
Example #5
Source File: TestOrcReaderPositions.java    From presto with Apache License 2.0 6 votes vote down vote up
private static void createSequentialFile(File file, int count)
        throws IOException, SerDeException
{
    FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, CompressionKind.NONE, BIGINT);

    Serializer serde = new OrcSerde();
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", BIGINT);
    Object row = objectInspector.create();
    StructField field = objectInspector.getAllStructFieldRefs().get(0);

    for (int i = 0; i < count; i++) {
        objectInspector.setStructFieldData(row, field, (long) i);
        Writable record = serde.serialize(row, objectInspector);
        writer.write(record);
    }

    writer.close(false);
}
 
Example #6
Source File: TestOrcReaderMemoryUsage.java    From presto with Apache License 2.0 6 votes vote down vote up
/**
 * Write a file that contains a number of rows with 1 BIGINT column, and some rows have null values.
 */
private static TempFile createSingleColumnFileWithNullValues(int rows)
        throws IOException, SerDeException
{
    Serializer serde = new OrcSerde();
    TempFile tempFile = new TempFile();
    FileSinkOperator.RecordWriter writer = createOrcRecordWriter(tempFile.getFile(), ORC_12, CompressionKind.NONE, BIGINT);
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", BIGINT);
    Object row = objectInspector.create();
    StructField field = objectInspector.getAllStructFieldRefs().get(0);

    for (int i = 0; i < rows; i++) {
        if (i % 10 == 0) {
            objectInspector.setStructFieldData(row, field, null);
        }
        else {
            objectInspector.setStructFieldData(row, field, (long) i);
        }

        Writable record = serde.serialize(row, objectInspector);
        writer.write(record);
    }

    writer.close(false);
    return tempFile;
}
 
Example #7
Source File: TestOrcReaderPositions.java    From presto with Apache License 2.0 6 votes vote down vote up
private static void createMultiStripeFile(File file)
        throws IOException, ReflectiveOperationException, SerDeException
{
    FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, CompressionKind.NONE, BIGINT);

    Serializer serde = new OrcSerde();
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", BIGINT);
    Object row = objectInspector.create();
    StructField field = objectInspector.getAllStructFieldRefs().get(0);

    for (int i = 0; i < 300; i += 3) {
        if ((i > 0) && (i % 60 == 0)) {
            flushWriter(writer);
        }

        objectInspector.setStructFieldData(row, field, (long) i);
        Writable record = serde.serialize(row, objectInspector);
        writer.write(record);
    }

    writer.close(false);
}
 
Example #8
Source File: OrcTester.java    From presto with Apache License 2.0 6 votes vote down vote up
public static DataSize writeOrcFileColumnHive(File outputFile, RecordWriter recordWriter, Type type, Iterator<?> values)
        throws Exception
{
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", type);
    Object row = objectInspector.create();

    List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());
    Serializer serializer = new OrcSerde();

    while (values.hasNext()) {
        Object value = values.next();
        value = preprocessWriteValueHive(type, value);
        objectInspector.setStructFieldData(row, fields.get(0), value);

        Writable record = serializer.serialize(row, objectInspector);
        recordWriter.write(record);
    }

    recordWriter.close(false);
    return succinctBytes(outputFile.length());
}
 
Example #9
Source File: CacheableObjectInspectorConverters.java    From transport with BSD 2-Clause "Simplified" License 5 votes vote down vote up
/**
 * Returns a converter that converts objects from one OI to another OI. The
 * returned (converted) object does not belong to the converter. Hence once convertor can be used
 * multiple times within one eval invocation.
 */
public Converter getConverter(ObjectInspector inputOI, ObjectInspector outputOI) {
  // If the inputOI is the same as the outputOI, just return an
  // IdentityConverter.
  if (inputOI.equals(outputOI)) {
    return new ObjectInspectorConverters.IdentityConverter();
  }
  Converter c = getConverterFromCache(inputOI, outputOI);
  if (c != null) {
    return c;
  }
  switch (outputOI.getCategory()) {
    case PRIMITIVE:
      return getConverter((PrimitiveObjectInspector) inputOI, (PrimitiveObjectInspector) outputOI);
    case STRUCT:
      c = new StructConverter(inputOI, (SettableStructObjectInspector) outputOI);
      break;
    case LIST:
      c = new ListConverter(inputOI, (SettableListObjectInspector) outputOI);
      break;
    case MAP:
      c = new MapConverter(inputOI, (SettableMapObjectInspector) outputOI);
      break;
    default:
      throw new UnsupportedOperationException(
          "Hive internal error: conversion of " + inputOI.getTypeName() + " to " + outputOI.getTypeName()
              + " not supported yet.");
  }
  cacheConverter(inputOI, outputOI, c);
  return c;
}
 
Example #10
Source File: ParquetTester.java    From presto with Apache License 2.0 5 votes vote down vote up
private static void writeParquetColumn(
        JobConf jobConf,
        File outputFile,
        CompressionCodecName compressionCodecName,
        Properties tableProperties,
        SettableStructObjectInspector objectInspector,
        Iterator<?>[] valuesByField,
        Optional<MessageType> parquetSchema,
        boolean singleLevelArray)
        throws Exception
{
    RecordWriter recordWriter = new TestMapredParquetOutputFormat(parquetSchema, singleLevelArray)
            .getHiveRecordWriter(
                    jobConf,
                    new Path(outputFile.toURI()),
                    Text.class,
                    compressionCodecName != UNCOMPRESSED,
                    tableProperties,
                    () -> {});
    Object row = objectInspector.create();
    List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());
    while (stream(valuesByField).allMatch(Iterator::hasNext)) {
        for (int field = 0; field < fields.size(); field++) {
            Object value = valuesByField[field].next();
            objectInspector.setStructFieldData(row, fields.get(field), value);
        }
        ParquetHiveSerDe serde = new ParquetHiveSerDe();
        serde.initialize(jobConf, tableProperties, null);
        Writable record = serde.serialize(row, objectInspector);
        recordWriter.write(record);
    }
    recordWriter.close(false);
}
 
Example #11
Source File: HiveStruct.java    From transport with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Override
public void setField(int index, StdData value) {
  if (_structObjectInspector instanceof SettableStructObjectInspector) {
    StructField field = _structObjectInspector.getAllStructFieldRefs().get(index);
    ((SettableStructObjectInspector) _structObjectInspector).setStructFieldData(_object,
        field, ((HiveData) value).getUnderlyingDataForObjectInspector(field.getFieldObjectInspector())
    );
    _isObjectModified = true;
  } else {
    throw new RuntimeException("Attempt to modify an immutable Hive object of type: "
        + _structObjectInspector.getClass());
  }
}
 
Example #12
Source File: HiveStruct.java    From transport with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Override
public void setField(String name, StdData value) {
  if (_structObjectInspector instanceof SettableStructObjectInspector) {
    StructField field = _structObjectInspector.getStructFieldRef(name);
    ((SettableStructObjectInspector) _structObjectInspector).setStructFieldData(_object,
        field, ((HiveData) value).getUnderlyingDataForObjectInspector(field.getFieldObjectInspector()));
    _isObjectModified = true;
  } else {
    throw new RuntimeException("Attempt to modify an immutable Hive object of type: "
        + _structObjectInspector.getClass());
  }
}
 
Example #13
Source File: NiFiOrcUtils.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Create an object of OrcStruct given a TypeInfo and a list of objects
 *
 * @param typeInfo The TypeInfo object representing the ORC record schema
 * @param objs     ORC objects/Writables
 * @return an OrcStruct containing the specified objects for the specified schema
 */
public static OrcStruct createOrcStruct(TypeInfo typeInfo, Object... objs) {
    SettableStructObjectInspector oi = (SettableStructObjectInspector) OrcStruct
            .createObjectInspector(typeInfo);
    List<StructField> fields = (List<StructField>) oi.getAllStructFieldRefs();
    OrcStruct result = (OrcStruct) oi.create();
    result.setNumFields(fields.size());
    for (int i = 0; i < fields.size(); i++) {
        oi.setStructFieldData(result, fields.get(i), objs[i]);
    }
    return result;
}
 
Example #14
Source File: NiFiOrcUtils.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Create an object of OrcStruct given a TypeInfo and a list of objects
 *
 * @param typeInfo The TypeInfo object representing the ORC record schema
 * @param objs     ORC objects/Writables
 * @return an OrcStruct containing the specified objects for the specified schema
 */
@SuppressWarnings("unchecked")
public static OrcStruct createOrcStruct(TypeInfo typeInfo, Object... objs) {
    SettableStructObjectInspector oi = (SettableStructObjectInspector) OrcStruct
            .createObjectInspector(typeInfo);
    List<StructField> fields = (List<StructField>) oi.getAllStructFieldRefs();
    OrcStruct result = (OrcStruct) oi.create();
    result.setNumFields(fields.size());
    for (int i = 0; i < fields.size(); i++) {
        oi.setStructFieldData(result, fields.get(i), objs[i]);
    }
    return result;
}
 
Example #15
Source File: NiFiOrcUtils.java    From nifi with Apache License 2.0 5 votes vote down vote up
/**
 * Create an object of OrcStruct given a TypeInfo and a list of objects
 *
 * @param typeInfo The TypeInfo object representing the ORC record schema
 * @param objs     ORC objects/Writables
 * @return an OrcStruct containing the specified objects for the specified schema
 */
public static OrcStruct createOrcStruct(TypeInfo typeInfo, Object... objs) {
    SettableStructObjectInspector oi = (SettableStructObjectInspector) OrcStruct
            .createObjectInspector(typeInfo);
    List<StructField> fields = (List<StructField>) oi.getAllStructFieldRefs();
    OrcStruct result = (OrcStruct) oi.create();
    result.setNumFields(fields.size());
    for (int i = 0; i < fields.size(); i++) {
        oi.setStructFieldData(result, fields.get(i), objs[i]);
    }
    return result;
}
 
Example #16
Source File: TestOrcReaderMemoryUsage.java    From presto with Apache License 2.0 5 votes vote down vote up
/**
 * Write a file that contains a given number of maps where each row has 10 entries in total
 * and some entries have null keys/values.
 */
private static TempFile createSingleColumnMapFileWithNullValues(Type mapType, int rows)
        throws IOException, SerDeException
{
    Serializer serde = new OrcSerde();
    TempFile tempFile = new TempFile();
    FileSinkOperator.RecordWriter writer = createOrcRecordWriter(tempFile.getFile(), ORC_12, CompressionKind.NONE, mapType);
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", mapType);
    Object row = objectInspector.create();
    StructField field = objectInspector.getAllStructFieldRefs().get(0);

    for (int i = 1; i <= rows; i++) {
        HashMap<Long, Long> map = new HashMap<>();

        for (int j = 1; j <= 8; j++) {
            Long value = (long) j;
            map.put(value, value);
        }

        // Add null values so that the StreamReader nullVectors are not empty.
        map.put(null, 0L);
        map.put(0L, null);

        objectInspector.setStructFieldData(row, field, map);
        Writable record = serde.serialize(row, objectInspector);
        writer.write(record);
    }
    writer.close(false);
    return tempFile;
}
 
Example #17
Source File: RcFileTester.java    From presto with Apache License 2.0 5 votes vote down vote up
private static DataSize writeRcFileColumnOld(File outputFile, Format format, Compression compression, Type type, Iterator<?> values)
        throws Exception
{
    ObjectInspector columnObjectInspector = getJavaObjectInspector(type);
    RecordWriter recordWriter = createRcFileWriterOld(outputFile, compression, columnObjectInspector);

    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", columnObjectInspector);
    Object row = objectInspector.create();

    List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());
    Serializer serializer = format.createSerializer();

    Properties tableProperties = new Properties();
    tableProperties.setProperty("columns", "test");
    tableProperties.setProperty("columns.types", objectInspector.getTypeName());
    serializer.initialize(new JobConf(false), tableProperties);

    while (values.hasNext()) {
        Object value = values.next();
        value = preprocessWriteValueOld(type, value);
        objectInspector.setStructFieldData(row, fields.get(0), value);

        Writable record = serializer.serialize(row, objectInspector);
        recordWriter.write(record);
    }

    recordWriter.close(false);
    return DataSize.ofBytes(outputFile.length()).succinct();
}
 
Example #18
Source File: TestOrcReaderPositions.java    From presto with Apache License 2.0 5 votes vote down vote up
private static void createGrowingSequentialFile(File file, int count, int step, int initialLength)
        throws IOException, SerDeException
{
    FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, CompressionKind.NONE, VARCHAR);

    Serializer serde = new OrcSerde();
    SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", VARCHAR);
    Object row = objectInspector.create();
    StructField field = objectInspector.getAllStructFieldRefs().get(0);

    StringBuilder builder = new StringBuilder();
    for (int i = 0; i < initialLength; i++) {
        builder.append("0");
    }
    String seedString = builder.toString();

    // gradually grow the length of a cell
    int previousLength = initialLength;
    for (int i = 0; i < count; i++) {
        if ((i / step + 1) * initialLength > previousLength) {
            previousLength = (i / step + 1) * initialLength;
            builder.append(seedString);
        }
        objectInspector.setStructFieldData(row, field, builder.toString());
        Writable record = serde.serialize(row, objectInspector);
        writer.write(record);
    }

    writer.close(false);
}
 
Example #19
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public DateFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
    super(rowInspector, row, field);
}
 
Example #20
Source File: AbstractTestHiveFileFormats.java    From presto with Apache License 2.0 4 votes vote down vote up
public static FileSplit createTestFile(
        String filePath,
        HiveStorageFormat storageFormat,
        HiveCompressionCodec compressionCodec,
        List<TestColumn> testColumns,
        int numRows)
        throws Exception
{
    HiveOutputFormat<?, ?> outputFormat = newInstance(storageFormat.getOutputFormat(), HiveOutputFormat.class);
    Serializer serializer = newInstance(storageFormat.getSerDe(), Serializer.class);

    // filter out partition keys, which are not written to the file
    testColumns = testColumns.stream()
            .filter(column -> !column.isPartitionKey())
            .collect(toImmutableList());

    Properties tableProperties = new Properties();
    tableProperties.setProperty(
            "columns",
            testColumns.stream()
                    .map(TestColumn::getName)
                    .collect(Collectors.joining(",")));
    tableProperties.setProperty(
            "columns.types",
            testColumns.stream()
                    .map(TestColumn::getType)
                    .collect(Collectors.joining(",")));
    serializer.initialize(new Configuration(false), tableProperties);

    JobConf jobConf = new JobConf();
    configureCompression(jobConf, compressionCodec);

    RecordWriter recordWriter = outputFormat.getHiveRecordWriter(
            jobConf,
            new Path(filePath),
            Text.class,
            compressionCodec != HiveCompressionCodec.NONE,
            tableProperties,
            () -> {});

    try {
        serializer.initialize(new Configuration(false), tableProperties);

        SettableStructObjectInspector objectInspector = getStandardStructObjectInspector(
                testColumns.stream()
                        .map(TestColumn::getName)
                        .collect(toImmutableList()),
                testColumns.stream()
                        .map(TestColumn::getObjectInspector)
                        .collect(toImmutableList()));

        Object row = objectInspector.create();

        List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());

        for (int rowNumber = 0; rowNumber < numRows; rowNumber++) {
            for (int i = 0; i < testColumns.size(); i++) {
                Object writeValue = testColumns.get(i).getWriteValue();
                if (writeValue instanceof Slice) {
                    writeValue = ((Slice) writeValue).getBytes();
                }
                objectInspector.setStructFieldData(row, fields.get(i), writeValue);
            }

            Writable record = serializer.serialize(row, objectInspector);
            recordWriter.write(record);
        }
    }
    finally {
        recordWriter.close(false);
    }

    // todo to test with compression, the file must be renamed with the compression extension
    Path path = new Path(filePath);
    path.getFileSystem(new Configuration(false)).setVerifyChecksum(true);
    File file = new File(filePath);
    return new FileSplit(path, 0, file.length(), new String[0]);
}
 
Example #21
Source File: TestOrcPageSourceMemoryTracking.java    From presto with Apache License 2.0 4 votes vote down vote up
public static FileSplit createTestFile(
        String filePath,
        Serializer serializer,
        String compressionCodec,
        List<TestColumn> testColumns,
        int numRows,
        int stripeRows)
        throws Exception
{
    // filter out partition keys, which are not written to the file
    testColumns = testColumns.stream()
            .filter(column -> !column.isPartitionKey())
            .collect(toImmutableList());

    Properties tableProperties = new Properties();
    tableProperties.setProperty(
            "columns",
            testColumns.stream()
                    .map(TestColumn::getName)
                    .collect(Collectors.joining(",")));

    tableProperties.setProperty(
            "columns.types",
            testColumns.stream()
                    .map(TestColumn::getType)
                    .collect(Collectors.joining(",")));

    serializer.initialize(CONFIGURATION, tableProperties);

    JobConf jobConf = new JobConf();
    if (compressionCodec != null) {
        CompressionCodec codec = new CompressionCodecFactory(CONFIGURATION).getCodecByName(compressionCodec);
        jobConf.set(COMPRESS_CODEC, codec.getClass().getName());
        jobConf.set(COMPRESS_TYPE, SequenceFile.CompressionType.BLOCK.toString());
    }

    RecordWriter recordWriter = createRecordWriter(new Path(filePath), CONFIGURATION);

    try {
        SettableStructObjectInspector objectInspector = getStandardStructObjectInspector(
                testColumns.stream()
                        .map(TestColumn::getName)
                        .collect(toImmutableList()),
                testColumns.stream()
                        .map(TestColumn::getObjectInspector)
                        .collect(toImmutableList()));

        Object row = objectInspector.create();

        List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs());

        for (int rowNumber = 0; rowNumber < numRows; rowNumber++) {
            for (int i = 0; i < testColumns.size(); i++) {
                Object writeValue = testColumns.get(i).getWriteValue();
                if (writeValue instanceof Slice) {
                    writeValue = ((Slice) writeValue).getBytes();
                }
                objectInspector.setStructFieldData(row, fields.get(i), writeValue);
            }

            Writable record = serializer.serialize(row, objectInspector);
            recordWriter.write(record);
            if (rowNumber % stripeRows == stripeRows - 1) {
                flushStripe(recordWriter);
            }
        }
    }
    finally {
        recordWriter.close(false);
    }

    Path path = new Path(filePath);
    path.getFileSystem(CONFIGURATION).setVerifyChecksum(true);
    File file = new File(filePath);
    return new FileSplit(path, 0, file.length(), new String[0]);
}
 
Example #22
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public RowFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, List<Type> fieldTypes)
{
    super(rowInspector, row, field);
    this.fieldTypes = ImmutableList.copyOf(fieldTypes);
}
 
Example #23
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public MapFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type keyType, Type valueType)
{
    super(rowInspector, row, field);
    this.keyType = requireNonNull(keyType, "keyType is null");
    this.valueType = requireNonNull(valueType, "valueType is null");
}
 
Example #24
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public ArrayFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type elementType)
{
    super(rowInspector, row, field);
    this.elementType = requireNonNull(elementType, "elementType is null");
}
 
Example #25
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public DecimalFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, DecimalType decimalType)
{
    super(rowInspector, row, field);
    this.decimalType = decimalType;
}
 
Example #26
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public TimestampFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
    super(rowInspector, row, field);
}
 
Example #27
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public BinaryFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
    super(rowInspector, row, field);
}
 
Example #28
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public CharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type)
{
    super(rowInspector, row, field);
    this.type = type;
}
 
Example #29
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public VarcharFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field, Type type)
{
    super(rowInspector, row, field);
    this.type = type;
}
 
Example #30
Source File: HiveWriteUtils.java    From presto with Apache License 2.0 4 votes vote down vote up
public FloatFieldSetter(SettableStructObjectInspector rowInspector, Object row, StructField field)
{
    super(rowInspector, row, field);
}