Java Code Examples for org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter#convert()

The following examples show how to use org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter#convert() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveData.java    From transport with BSD 2-Clause "Simplified" License 5 votes vote down vote up
public Object getUnderlyingDataForObjectInspector(ObjectInspector oi) {
  if (oi.equals(getUnderlyingObjectInspector())) {
    return getUnderlyingData();
  }

  Object result = getObjectFromCache(oi);
  if (result == null) {
    Converter c = ((HiveFactory) _stdFactory).getConverter(getUnderlyingObjectInspector(), oi);
    result = c.convert(getUnderlyingData());
    _cachedObjectsForObjectInspectors.putIfAbsent(oi, result);
  }
  return result;
}
 
Example 2
Source File: TestCacheableObjectInspectorConverters.java    From transport with BSD 2-Clause "Simplified" License 5 votes vote down vote up
@Test
public void testReturnValue() {
  // Should create new return objects inside the converter
  CacheableObjectInspectorConverters cacheableObjectInspectorConverters = new CacheableObjectInspectorConverters();

  Converter c1 =
      cacheableObjectInspectorConverters.getConverter(javaStringObjectInspector, writableStringObjectInspector);
  String s1 = "Test_STR";
  Object o1 = c1.convert(s1);
  Object o2 = c1.convert(s1);
  Assert.assertNotSame(o1, o2);
}
 
Example 3
Source File: HiveTextReader.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
@Override
public int populateData() throws IOException, SerDeException {
  final SkipRecordsInspector skipRecordsInspector = this.skipRecordsInspector;
  final RecordReader<Object, Object> reader = this.reader;
  final Converter partTblObjectInspectorConverter = this.partTblObjectInspectorConverter;
  final Object key = this.key;

  final int numRowsPerBatch = (int) this.numRowsPerBatch;

  final StructField[] selectedStructFieldRefs = this.selectedStructFieldRefs;
  final SerDe partitionSerDe = this.partitionSerDe;
  final StructObjectInspector finalOI = this.finalOI;
  final ObjectInspector[] selectedColumnObjInspectors = this.selectedColumnObjInspectors;
  final HiveFieldConverter[] selectedColumnFieldConverters = this.selectedColumnFieldConverters;
  final ValueVector[] vectors = this.vectors;

  skipRecordsInspector.reset();
  Object value;

  int recordCount = 0;

  while (recordCount < numRowsPerBatch) {
    try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())) {
      boolean hasNext = reader.next(key, value = skipRecordsInspector.getNextValue());
      if (!hasNext) {
        break;
      }
    }
    catch(FSError e) {
      throw HadoopFileSystemWrapper.propagateFSError(e);
    }
    if (skipRecordsInspector.doSkipHeader(recordCount++)) {
      continue;
    }
    Object bufferedValue = skipRecordsInspector.bufferAdd(value);
    if (bufferedValue != null) {
      Object deSerializedValue = partitionSerDe.deserialize((Writable) bufferedValue);
      if (partTblObjectInspectorConverter != null) {
        deSerializedValue = partTblObjectInspectorConverter.convert(deSerializedValue);
      }

      for (int i = 0; i < selectedStructFieldRefs.length; i++) {
        Object hiveValue = finalOI.getStructFieldData(deSerializedValue, selectedStructFieldRefs[i]);
        if (hiveValue != null) {
          selectedColumnFieldConverters[i].setSafeValue(selectedColumnObjInspectors[i], hiveValue, vectors[i], skipRecordsInspector.getActualCount());
        }
      }
      skipRecordsInspector.incrementActualCount();
    }
    skipRecordsInspector.incrementTempCount();
  }
  for (int i = 0; i < selectedStructFieldRefs.length; i++) {
    vectors[i].setValueCount(skipRecordsInspector.getActualCount());
  }

  skipRecordsInspector.updateContinuance();
  return skipRecordsInspector.getActualCount();
}
 
Example 4
Source File: HiveTextReader.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
@Override
public int populateData() throws IOException, SerDeException {
  final SkipRecordsInspector skipRecordsInspector = this.skipRecordsInspector;
  final RecordReader<Object, Object> reader = this.reader;
  final Converter partTblObjectInspectorConverter = this.partTblObjectInspectorConverter;
  final Object key = this.key;

  final int numRowsPerBatch = (int) this.numRowsPerBatch;

  final StructField[] selectedStructFieldRefs = this.selectedStructFieldRefs;
  final AbstractSerDe partitionSerDe = this.partitionSerDe;
  final StructObjectInspector finalOI = this.finalOI;
  final ObjectInspector[] selectedColumnObjInspectors = this.selectedColumnObjInspectors;
  final HiveFieldConverter[] selectedColumnFieldConverters = this.selectedColumnFieldConverters;
  final ValueVector[] vectors = this.vectors;

  skipRecordsInspector.reset();
  Object value;

  int recordCount = 0;

  while (recordCount < numRowsPerBatch) {
    try (OperatorStats.WaitRecorder recorder = OperatorStats.getWaitRecorder(this.context.getStats())) {
      boolean hasNext = reader.next(key, value = skipRecordsInspector.getNextValue());
      if (!hasNext) {
        break;
      }
    }
    catch(FSError e) {
      throw HadoopFileSystemWrapper.propagateFSError(e);
    }
    if (skipRecordsInspector.doSkipHeader(recordCount++)) {
      continue;
    }
    Object bufferedValue = skipRecordsInspector.bufferAdd(value);
    if (bufferedValue != null) {
      Object deSerializedValue = partitionSerDe.deserialize((Writable) bufferedValue);
      if (partTblObjectInspectorConverter != null) {
        deSerializedValue = partTblObjectInspectorConverter.convert(deSerializedValue);
      }

      for (int i = 0; i < selectedStructFieldRefs.length; i++) {
        Object hiveValue = finalOI.getStructFieldData(deSerializedValue, selectedStructFieldRefs[i]);
        if (hiveValue != null) {
          selectedColumnFieldConverters[i].setSafeValue(selectedColumnObjInspectors[i], hiveValue, vectors[i], skipRecordsInspector.getActualCount());
        }
      }
      skipRecordsInspector.incrementActualCount();
    }
    skipRecordsInspector.incrementTempCount();
  }
  for (int i = 0; i < selectedStructFieldRefs.length; i++) {
    vectors[i].setValueCount(skipRecordsInspector.getActualCount());
  }

  skipRecordsInspector.updateContinuance();
  return skipRecordsInspector.getActualCount();
}
 
Example 5
Source File: HiveJsonStructReader.java    From incubator-hivemall with Apache License 2.0 4 votes vote down vote up
private Object getObjectOfCorrespondingPrimitiveType(String s, PrimitiveObjectInspector oi)
        throws IOException {
    PrimitiveTypeInfo typeInfo = oi.getTypeInfo();
    if (writeablePrimitives) {
        Converter c = ObjectInspectorConverters.getConverter(
            PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi);
        return c.convert(s);
    }

    switch (typeInfo.getPrimitiveCategory()) {
        case INT:
            return Integer.valueOf(s);
        case BYTE:
            return Byte.valueOf(s);
        case SHORT:
            return Short.valueOf(s);
        case LONG:
            return Long.valueOf(s);
        case BOOLEAN:
            return (s.equalsIgnoreCase("true"));
        case FLOAT:
            return Float.valueOf(s);
        case DOUBLE:
            return Double.valueOf(s);
        case STRING:
            return s;
        case BINARY:
            try {
                String t = Text.decode(s.getBytes(), 0, s.getBytes().length);
                return t.getBytes();
            } catch (CharacterCodingException e) {
                LOG.warn("Error generating json binary type from object.", e);
                return null;
            }
        case DATE:
            return Date.valueOf(s);
        case TIMESTAMP:
            return Timestamp.valueOf(s);
        case DECIMAL:
            return HiveDecimal.create(s);
        case VARCHAR:
            return new HiveVarchar(s, ((BaseCharTypeInfo) typeInfo).getLength());
        case CHAR:
            return new HiveChar(s, ((BaseCharTypeInfo) typeInfo).getLength());
        default:
            throw new IOException(
                "Could not convert from string to " + typeInfo.getPrimitiveCategory());
    }
}
 
Example 6
Source File: ArrayRemoveUDF.java    From incubator-hivemall with Apache License 2.0 4 votes vote down vote up
private static void removeAll(@Nonnull final List<?> values, @Nonnull final Object target,
        @Nonnull final Converter converter, @Nonnull final ListObjectInspector valueListOI) {
    Object converted = converter.convert(target);
    List<?> convertedList = valueListOI.getList(converted);
    values.removeAll(convertedList);
}
 
Example 7
Source File: ArrayRemoveUDF.java    From incubator-hivemall with Apache License 2.0 4 votes vote down vote up
private static void removeAll(@Nonnull final List<?> values, @Nonnull final Object target,
        @Nonnull final Converter converter) {
    Object converted = converter.convert(target);
    values.removeAll(Collections.singleton(converted));
}