Java Code Examples for org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector

The following examples show how to use org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: presto   Source File: SerDeUtils.java    License: Apache License 2.0 6 votes vote down vote up
@VisibleForTesting
public static Block serializeObject(Type type, BlockBuilder builder, Object object, ObjectInspector inspector, boolean filterNullMapKeys)
{
    switch (inspector.getCategory()) {
        case PRIMITIVE:
            serializePrimitive(type, builder, object, (PrimitiveObjectInspector) inspector);
            return null;
        case LIST:
            return serializeList(type, builder, object, (ListObjectInspector) inspector);
        case MAP:
            return serializeMap(type, builder, object, (MapObjectInspector) inspector, filterNullMapKeys);
        case STRUCT:
            return serializeStruct(type, builder, object, (StructObjectInspector) inspector);
        case UNION:
            return serializeUnion(type, builder, object, (UnionObjectInspector) inspector);
    }
    throw new RuntimeException("Unknown object inspector category: " + inspector.getCategory());
}
 
Example 2
Source Project: localization_nifi   Source File: OrcFlowFileWriter.java    License: Apache License 2.0 6 votes vote down vote up
UnionTreeWriter(int columnId,
                ObjectInspector inspector,
                StreamFactory writer,
                boolean nullable) throws IOException {
    super(columnId, inspector, writer, nullable);
    UnionObjectInspector insp = (UnionObjectInspector) inspector;
    List<ObjectInspector> choices = insp.getObjectInspectors();
    childrenWriters = new TreeWriter[choices.size()];
    for (int i = 0; i < childrenWriters.length; ++i) {
        childrenWriters[i] = createTreeWriter(choices.get(i), writer, true);
    }
    tags =
            new RunLengthByteWriter(writer.createStream(columnId,
                    OrcProto.Stream.Kind.DATA));
    recordPosition(rowIndexPosition);
}
 
Example 3
Source Project: dremio-oss   Source File: HiveORCVectorizedReader.java    License: Apache License 2.0 6 votes vote down vote up
private ColumnVector getColumnVector(ObjectInspector oi) {
  Category category = oi.getCategory();
  switch (category) {

    case PRIMITIVE:
      return getPrimitiveColumnVector((PrimitiveObjectInspector)oi);
    case LIST:
      return getListColumnVector((ListObjectInspector)oi);
    case STRUCT:
      return getStructColumnVector((StructObjectInspector)oi);
    case MAP:
      return getMapColumnVector((MapObjectInspector)oi);
    case UNION:
      return getUnionColumnVector((UnionObjectInspector)oi);
    default:
      throw UserException.unsupportedError()
        .message("Vectorized ORC reader is not supported for datatype: %s", category)
        .build(logger);
  }
}
 
Example 4
Source Project: dremio-oss   Source File: HiveORCVectorizedReader.java    License: Apache License 2.0 6 votes vote down vote up
private ColumnVector getColumnVector(ObjectInspector oi) {
  Category category = oi.getCategory();
  switch (category) {

    case PRIMITIVE:
      return getPrimitiveColumnVector((PrimitiveObjectInspector)oi);
    case LIST:
      return getListColumnVector((ListObjectInspector)oi);
    case STRUCT:
      return getStructColumnVector((StructObjectInspector)oi);
    case MAP:
      return getMapColumnVector((MapObjectInspector)oi);
    case UNION:
      return getUnionColumnVector((UnionObjectInspector)oi);
    default:
      throw UserException.unsupportedError()
        .message("Vectorized ORC reader is not supported for datatype: %s", category)
        .build(logger);
  }
}
 
Example 5
Source Project: nifi   Source File: OrcFlowFileWriter.java    License: Apache License 2.0 6 votes vote down vote up
UnionTreeWriter(int columnId,
                ObjectInspector inspector,
                StreamFactory writer,
                boolean nullable) throws IOException {
    super(columnId, inspector, writer, nullable);
    UnionObjectInspector insp = (UnionObjectInspector) inspector;
    List<ObjectInspector> choices = insp.getObjectInspectors();
    childrenWriters = new TreeWriter[choices.size()];
    for (int i = 0; i < childrenWriters.length; ++i) {
        childrenWriters[i] = createTreeWriter(choices.get(i), writer, true);
    }
    tags =
            new RunLengthByteWriter(writer.createStream(columnId,
                    OrcProto.Stream.Kind.DATA));
    recordPosition(rowIndexPosition);
}
 
Example 6
Source Project: hive-dwrf   Source File: WriterImpl.java    License: Apache License 2.0 6 votes vote down vote up
UnionTreeWriter(int columnId,
              ObjectInspector inspector,
              StreamFactory writer,
              boolean nullable, Configuration conf,
              boolean useVInts, boolean lowMemoryMode,
              MemoryEstimate memoryEstimate) throws IOException {
  super(columnId, inspector, writer, nullable, conf, useVInts, memoryEstimate);
  UnionObjectInspector insp = (UnionObjectInspector) inspector;
  List<ObjectInspector> choices = insp.getObjectInspectors();
  childrenWriters = new TreeWriter[choices.size()];
  for(int i=0; i < childrenWriters.length; ++i) {
    childrenWriters[i] = createTreeWriter(choices.get(i), writer, true, conf, useVInts,
        lowMemoryMode, memoryEstimate);
  }
  tags =
    new RunLengthByteWriter(writer.createStream(columnId,
        OrcProto.Stream.Kind.DATA));
  recordPosition(rowIndexPosition);
}
 
Example 7
Source Project: presto   Source File: SerDeUtils.java    License: Apache License 2.0 5 votes vote down vote up
private static Block serializeUnion(Type type, BlockBuilder builder, Object object, UnionObjectInspector inspector)
{
    if (object == null) {
        requireNonNull(builder, "parent builder is null").appendNull();
        return null;
    }

    boolean builderSynthesized = false;
    if (builder == null) {
        builderSynthesized = true;
        builder = type.createBlockBuilder(null, 1);
    }

    BlockBuilder currentBuilder = builder.beginBlockEntry();

    byte tag = inspector.getTag(object);
    TINYINT.writeLong(currentBuilder, tag);

    List<Type> typeParameters = type.getTypeParameters();
    for (int i = 1; i < typeParameters.size(); i++) {
        if (i == tag + 1) {
            serializeObject(typeParameters.get(i), currentBuilder, inspector.getField(object), inspector.getObjectInspectors().get(tag));
        }
        else {
            currentBuilder.appendNull();
        }
    }

    builder.closeEntry();
    if (builderSynthesized) {
        return (Block) type.getObject(builder, 0);
    }
    return null;
}
 
Example 8
Source Project: localization_nifi   Source File: OrcFlowFileWriter.java    License: Apache License 2.0 5 votes vote down vote up
@Override
void write(Object obj) throws IOException {
    super.write(obj);
    if (obj != null) {
        UnionObjectInspector insp = (UnionObjectInspector) inspector;
        byte tag = insp.getTag(obj);
        tags.write(tag);
        if (createBloomFilter) {
            bloomFilter.addLong(tag);
        }
        childrenWriters[tag].write(insp.getField(obj));
    }
}
 
Example 9
Source Project: pxf   Source File: HiveResolver.java    License: Apache License 2.0 5 votes vote down vote up
private List<OneField> traverseUnion(Object obj, UnionObjectInspector uoi)
        throws BadRecordException, IOException {
    List<OneField> unionRecord = new LinkedList<>();
    List<? extends ObjectInspector> ois = uoi.getObjectInspectors();
    if (ois == null) {
        throw new BadRecordException(
                "Illegal value NULL for Hive data type Union");
    }
    traverseTuple(uoi.getField(obj), ois.get(uoi.getTag(obj)), unionRecord,
            true);
    return unionRecord;
}
 
Example 10
Source Project: dremio-oss   Source File: HiveORCVectorizedReader.java    License: Apache License 2.0 5 votes vote down vote up
private ColumnVector getUnionColumnVector(UnionObjectInspector uoi) {
  ArrayList<ColumnVector> vectors = new ArrayList<>();
  List<? extends ObjectInspector> members = uoi.getObjectInspectors();
  for (ObjectInspector unionField: members) {
    vectors.add(getColumnVector(unionField));
  }
  ColumnVector[] columnVectors = vectors.toArray(new ColumnVector[0]);
  return new UnionColumnVector(VectorizedRowBatch.DEFAULT_SIZE, columnVectors);
}
 
Example 11
Source Project: dremio-oss   Source File: HiveORCVectorizedReader.java    License: Apache License 2.0 5 votes vote down vote up
private ColumnVector getUnionColumnVector(UnionObjectInspector uoi) {
  ArrayList<ColumnVector> vectors = new ArrayList<>();
  List<? extends ObjectInspector> members = uoi.getObjectInspectors();
  for (ObjectInspector unionField: members) {
    vectors.add(getColumnVector(unionField));
  }
  ColumnVector[] columnVectors = vectors.toArray(new ColumnVector[0]);
  return new UnionColumnVector(VectorizedRowBatch.DEFAULT_SIZE, columnVectors);
}
 
Example 12
Source Project: nifi   Source File: OrcFlowFileWriter.java    License: Apache License 2.0 5 votes vote down vote up
@Override
void write(Object obj) throws IOException {
    super.write(obj);
    if (obj != null) {
        UnionObjectInspector insp = (UnionObjectInspector) inspector;
        byte tag = insp.getTag(obj);
        tags.write(tag);
        if (createBloomFilter) {
            bloomFilter.addLong(tag);
        }
        childrenWriters[tag].write(insp.getField(obj));
    }
}
 
Example 13
Source Project: hive-dwrf   Source File: WriterImpl.java    License: Apache License 2.0 5 votes vote down vote up
@Override
void write(Object obj) throws IOException {
  long rawDataSize = 0;
  if (obj != null) {
    UnionObjectInspector insp = (UnionObjectInspector) inspector;
    byte tag = insp.getTag(obj);
    tags.write(tag);
    childrenWriters[tag].write(insp.getField(obj));
    // raw data size is size of tag (1) + size of value
    rawDataSize = childrenWriters[tag].getRowRawDataSize() + RawDatasizeConst.UNION_TAG_SIZE;
  }
  super.write(obj, rawDataSize);
}
 
Example 14
Source Project: pxf   Source File: HiveResolver.java    License: Apache License 2.0 4 votes vote down vote up
private void traverseTuple(Object obj, ObjectInspector objInspector,
                           List<OneField> record, boolean toFlatten)
        throws IOException, BadRecordException {
    ObjectInspector.Category category = objInspector.getCategory();
    switch (category) {
        case PRIMITIVE:
            resolvePrimitive(obj, (PrimitiveObjectInspector) objInspector,
                    record, toFlatten);
            break;
        case LIST:
            if (obj == null) {
                addOneFieldToRecord(record, DataType.TEXT, null);
            } else {
                List<OneField> listRecord = traverseList(obj,
                        (ListObjectInspector) objInspector);
                addOneFieldToRecord(record, DataType.TEXT, String.format("[%s]",
                        HdfsUtilities.toString(listRecord, collectionDelim)));
            }
            break;
        case MAP:
            if (obj == null) {
                addOneFieldToRecord(record, DataType.TEXT, null);
            } else {
                List<OneField> mapRecord = traverseMap(obj,
                        (MapObjectInspector) objInspector);
                addOneFieldToRecord(record, DataType.TEXT, String.format("{%s}",
                        HdfsUtilities.toString(mapRecord, collectionDelim)));
            }
            break;
        case STRUCT:
            if (obj == null) {
                addOneFieldToRecord(record, DataType.TEXT, null);
            } else {
                List<OneField> structRecord = traverseStruct(obj,
                        (StructObjectInspector) objInspector, true);
                addOneFieldToRecord(record, DataType.TEXT, String.format("{%s}",
                        HdfsUtilities.toString(structRecord, collectionDelim)));
            }
            break;
        case UNION:
            if (obj == null) {
                addOneFieldToRecord(record, DataType.TEXT, null);
            } else {
                List<OneField> unionRecord = traverseUnion(obj,
                        (UnionObjectInspector) objInspector);
                addOneFieldToRecord(record, DataType.TEXT, String.format("[%s]",
                        HdfsUtilities.toString(unionRecord, collectionDelim)));
            }
            break;
        default:
            throw new UnsupportedTypeException("Unknown category type: "
                    + objInspector.getCategory());
    }
}
 
Example 15
Source Project: incubator-hivemall   Source File: JsonSerdeUtils.java    License: Apache License 2.0 4 votes vote down vote up
private static void buildJSONString(@Nonnull final StringBuilder sb, @Nullable final Object obj,
        @Nonnull final ObjectInspector oi) throws SerDeException {
    switch (oi.getCategory()) {
        case PRIMITIVE: {
            PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
            serializePrimitive(sb, obj, poi);
            break;
        }
        case LIST: {
            ListObjectInspector loi = (ListObjectInspector) oi;
            serializeList(sb, obj, loi);
            break;
        }
        case MAP: {
            MapObjectInspector moi = (MapObjectInspector) oi;
            serializeMap(sb, obj, moi);
            break;
        }
        case STRUCT: {
            StructObjectInspector soi = (StructObjectInspector) oi;
            serializeStruct(sb, obj, soi, null);
            break;
        }
        case UNION: {
            UnionObjectInspector uoi = (UnionObjectInspector) oi;
            if (obj == null) {
                sb.append("null");
            } else {
                sb.append(SerDeUtils.LBRACE);
                sb.append(uoi.getTag(obj));
                sb.append(SerDeUtils.COLON);
                buildJSONString(sb, uoi.getField(obj),
                    uoi.getObjectInspectors().get(uoi.getTag(obj)));
                sb.append(SerDeUtils.RBRACE);
            }
            break;
        }
        default:
            throw new SerDeException("Unknown type in ObjectInspector: " + oi.getCategory());
    }
}