org.apache.hadoop.hive.serde2.objectinspector.StructField Java Examples
The following examples show how to use
org.apache.hadoop.hive.serde2.objectinspector.StructField.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: spatial-framework-for-hadoop Author: Esri File: TestEsriJsonSerDe.java License: Apache License 2.0 | 6 votes |
@Test public void TestIntParse() throws Exception { Configuration config = new Configuration(); Text value = new Text(); AbstractSerDe jserde = new EsriJsonSerDe(); Properties proptab = new Properties(); proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num"); proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int"); jserde.initialize(config, proptab); StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector(); //value.set("{\"attributes\":{\"num\":7},\"geometry\":null}"); value.set("{\"attributes\":{\"num\":7}}"); Object row = jserde.deserialize(value); StructField f0 = rowOI.getStructFieldRef("num"); Object fieldData = rowOI.getStructFieldData(row, f0); Assert.assertEquals(7, ((IntWritable)fieldData).get()); value.set("{\"attributes\":{\"num\":9}}"); row = jserde.deserialize(value); f0 = rowOI.getStructFieldRef("num"); fieldData = rowOI.getStructFieldData(row, f0); Assert.assertEquals(9, ((IntWritable)fieldData).get()); }
Example #2
Source Project: presto Author: prestosql File: TestOrcReaderMemoryUsage.java License: Apache License 2.0 | 6 votes |
/** * Write a file that contains a number of rows with 1 VARCHAR column, and all values are not null. */ private static TempFile createSingleColumnVarcharFile(int count, int length) throws Exception { Serializer serde = new OrcSerde(); TempFile tempFile = new TempFile(); FileSinkOperator.RecordWriter writer = createOrcRecordWriter(tempFile.getFile(), ORC_12, CompressionKind.NONE, VARCHAR); SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", VARCHAR); Object row = objectInspector.create(); StructField field = objectInspector.getAllStructFieldRefs().get(0); for (int i = 0; i < count; i++) { objectInspector.setStructFieldData(row, field, Strings.repeat("0", length)); Writable record = serde.serialize(row, objectInspector); writer.write(record); } writer.close(false); return tempFile; }
Example #3
Source Project: presto Author: prestosql File: OrcTester.java License: Apache License 2.0 | 6 votes |
public static DataSize writeOrcFileColumnHive(File outputFile, RecordWriter recordWriter, Type type, Iterator<?> values) throws Exception { SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", type); Object row = objectInspector.create(); List<StructField> fields = ImmutableList.copyOf(objectInspector.getAllStructFieldRefs()); Serializer serializer = new OrcSerde(); while (values.hasNext()) { Object value = values.next(); value = preprocessWriteValueHive(type, value); objectInspector.setStructFieldData(row, fields.get(0), value); Writable record = serializer.serialize(row, objectInspector); recordWriter.write(record); } recordWriter.close(false); return succinctBytes(outputFile.length()); }
Example #4
Source Project: presto Author: prestosql File: TestOrcReaderPositions.java License: Apache License 2.0 | 6 votes |
private static void createSequentialFile(File file, int count) throws IOException, SerDeException { FileSinkOperator.RecordWriter writer = createOrcRecordWriter(file, ORC_12, CompressionKind.NONE, BIGINT); Serializer serde = new OrcSerde(); SettableStructObjectInspector objectInspector = createSettableStructObjectInspector("test", BIGINT); Object row = objectInspector.create(); StructField field = objectInspector.getAllStructFieldRefs().get(0); for (int i = 0; i < count; i++) { objectInspector.setStructFieldData(row, field, (long) i); Writable record = serde.serialize(row, objectInspector); writer.write(record); } writer.close(false); }
Example #5
Source Project: Hive-XML-SerDe Author: dvasilen File: ObjectInspectorTest.java License: Apache License 2.0 | 6 votes |
public void testSimpleXmlByte() throws SerDeException { XmlSerDe xmlSerDe = new XmlSerDe(); Configuration configuration = new Configuration(); Properties properties = new Properties(); properties.put(LIST_COLUMNS, "test"); properties.put(LIST_COLUMN_TYPES, "tinyint"); properties.setProperty("column.xpath.test", "/test/text()"); xmlSerDe.initialize(configuration, properties); Text text = new Text(); text.set("<test>14</test>"); Object o = xmlSerDe.deserialize(text); XmlStructObjectInspector structInspector = ((XmlStructObjectInspector) xmlSerDe.getObjectInspector()); StructField structField = structInspector.getStructFieldRef("test"); Object data = structInspector.getStructFieldData(o, structField); assertEquals((byte) 14, ((Byte) data).byteValue()); }
Example #6
Source Project: hive-dwrf Author: facebookarchive File: OrcLazyStructObjectInspector.java License: Apache License 2.0 | 6 votes |
@Override public boolean equals(Object o) { if (o == null || o.getClass() != getClass()) { return false; } else if (o == this) { return true; } else { List<StructField> other = ((OrcLazyStructObjectInspector) o).fields; if (other.size() != fields.size()) { return false; } for(int i = 0; i < fields.size(); ++i) { StructField left = other.get(i); StructField right = fields.get(i); if (!(left.getFieldName().equals(right.getFieldName()) && left.getFieldObjectInspector().equals (right.getFieldObjectInspector()))) { return false; } } return true; } }
Example #7
Source Project: Hive-XML-SerDe Author: dvasilen File: ObjectInspectorTest.java License: Apache License 2.0 | 6 votes |
@SuppressWarnings("rawtypes") public void testSimpleXmlNotMap() throws SerDeException { XmlSerDe xmlSerDe = new XmlSerDe(); Configuration configuration = new Configuration(); Properties properties = new Properties(); properties.put(LIST_COLUMNS, "test"); properties.put(LIST_COLUMN_TYPES, "map<string,string>"); properties.setProperty("column.xpath.test", "//*[contains(name(),'test')]/text()"); xmlSerDe.initialize(configuration, properties); Text text = new Text(); text.set("<root><test1>string1</test1><test2>string2</test2></root>"); Object o = xmlSerDe.deserialize(text); XmlStructObjectInspector structInspector = ((XmlStructObjectInspector) xmlSerDe.getObjectInspector()); StructField structField = structInspector.getStructFieldRef("test"); Object data = structInspector.getStructFieldData(o, structField); XmlMapObjectInspector fieldInspector = (XmlMapObjectInspector) structField.getFieldObjectInspector(); Map map = fieldInspector.getMap(data); assertEquals(0, map.size()); }
Example #8
Source Project: Hive-XML-SerDe Author: dvasilen File: XmlStructObjectInspector.java License: Apache License 2.0 | 6 votes |
/** * @see org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector#getStructFieldData(java.lang.Object, * org.apache.hadoop.hive.serde2.objectinspector.StructField) */ @SuppressWarnings("unchecked") @Override public Object getStructFieldData(Object data, StructField structField) { if ((data instanceof List) && !(data instanceof SerDeArray)) { MyField f = (MyField) structField; int fieldID = f.getFieldID(); return ((List<Object>) data).get(fieldID); } else { ObjectInspector fieldObjectInspector = structField.getFieldObjectInspector(); Category category = fieldObjectInspector.getCategory(); Object fieldData = this.xmlProcessor.getObjectValue(data, structField.getFieldName()); switch (category) { case PRIMITIVE: { PrimitiveObjectInspector primitiveObjectInspector = (PrimitiveObjectInspector) fieldObjectInspector; PrimitiveCategory primitiveCategory = primitiveObjectInspector.getPrimitiveCategory(); return this.xmlProcessor.getPrimitiveObjectValue(fieldData, primitiveCategory); } default: return fieldData; } } }
Example #9
Source Project: spatial-framework-for-hadoop Author: Esri File: TestEsriJsonSerDe.java License: Apache License 2.0 | 6 votes |
@Test public void TestEpochParse() throws Exception { Configuration config = new Configuration(); Text value = new Text(); AbstractSerDe jserde = new EsriJsonSerDe(); Properties proptab = new Properties(); proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when"); proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "date"); jserde.initialize(config, proptab); StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector(); value.set("{\"attributes\":{\"when\":147147147147}}"); Object row = jserde.deserialize(value); StructField f0 = rowOI.getStructFieldRef("when"); Object fieldData = rowOI.getStructFieldData(row, f0); //Assert.assertEquals(147147147147L, ((DateWritable)fieldData).get().getTime()); Assert.assertEquals(new java.sql.Date(147147147147L).toString(), ((DateWritable)fieldData).get().toString()); value.set("{\"attributes\":{\"when\":142857142857}}"); row = jserde.deserialize(value); fieldData = rowOI.getStructFieldData(row, f0); //Assert.assertEquals(142857142857L, ((DateWritable)fieldData).get()); Assert.assertEquals(new java.sql.Date(142857142857L).toString(), ((DateWritable)fieldData).get().toString()); }
Example #10
Source Project: incubator-hivemall Author: apache File: MaxRowUDAF.java License: Apache License 2.0 | 6 votes |
private ObjectInspector initReduceSide(StructObjectInspector inputStructOI) throws HiveException { List<? extends StructField> fields = inputStructOI.getAllStructFieldRefs(); int length = fields.size(); this.inputStructOI = inputStructOI; this.inputOIs = new ObjectInspector[length]; this.outputOIs = new ObjectInspector[length]; for (int i = 0; i < length; i++) { StructField field = fields.get(i); ObjectInspector oi = field.getFieldObjectInspector(); inputOIs[i] = oi; outputOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(oi); } return ObjectInspectorUtils.getStandardObjectInspector(inputStructOI); }
Example #11
Source Project: hive-dwrf Author: facebookarchive File: OrcStruct.java License: Apache License 2.0 | 6 votes |
@Override public boolean equals(Object o) { if (o == null || o.getClass() != getClass()) { return false; } else if (o == this) { return true; } else { List<StructField> other = ((OrcStructInspector) o).fields; if (other.size() != fields.size()) { return false; } for(int i = 0; i < fields.size(); ++i) { StructField left = other.get(i); StructField right = fields.get(i); if (!(left.getFieldName().equals(right.getFieldName()) && left.getFieldObjectInspector().equals (right.getFieldObjectInspector()))) { return false; } } return true; } }
Example #12
Source Project: hive-dwrf Author: facebookarchive File: OrcLazyStructObjectInspector.java License: Apache License 2.0 | 6 votes |
@Override public String getTypeName() { StringBuilder buffer = new StringBuilder(); buffer.append("struct<"); for(int i=0; i < fields.size(); ++i) { StructField field = fields.get(i); if (i != 0) { buffer.append(","); } buffer.append(field.getFieldName()); buffer.append(":"); buffer.append(field.getFieldObjectInspector().getTypeName()); } buffer.append(">"); return buffer.toString(); }
Example #13
Source Project: flink Author: flink-tpc-ds File: HiveTableInputFormat.java License: Apache License 2.0 | 6 votes |
@Override public Row nextRecord(Row ignore) throws IOException { if (reachedEnd()) { return null; } Row row = new Row(rowArity); try { //Use HiveDeserializer to deserialize an object out of a Writable blob Object hiveRowStruct = deserializer.deserialize(value); int index = 0; for (; index < structFields.size(); index++) { StructField structField = structFields.get(index); Object object = HiveInspectors.toFlinkObject(structField.getFieldObjectInspector(), structObjectInspector.getStructFieldData(hiveRowStruct, structField)); row.setField(index, object); } for (String partition : partitionColNames){ row.setField(index++, hiveTablePartition.getPartitionSpec().get(partition)); } } catch (Exception e){ logger.error("Error happens when converting hive data type to flink data type."); throw new FlinkHiveException(e); } this.fetched = false; return row; }
Example #14
Source Project: bigdata-tutorial Author: micmiu File: JSONCDHSerDe.java License: Apache License 2.0 | 6 votes |
/** * Deparses struct data into a serializable JSON object. * * @param obj - Hive struct data * @param structOI - ObjectInspector for the struct * @param isRow - Whether or not this struct represents a top-level row * @return - A deparsed struct */ private Object deparseStruct(Object obj, StructObjectInspector structOI, boolean isRow) { Map<Object, Object> struct = new HashMap<Object, Object>(); List<? extends StructField> fields = structOI.getAllStructFieldRefs(); for (int i = 0; i < fields.size(); i++) { StructField field = fields.get(i); // The top-level row object is treated slightly differently from other // structs, because the field names for the row do not correctly reflect // the Hive column names. For lower-level structs, we can get the field // name from the associated StructField object. String fieldName = isRow ? colNames.get(i) : field.getFieldName(); ObjectInspector fieldOI = field.getFieldObjectInspector(); Object fieldObj = structOI.getStructFieldData(obj, field); struct.put(fieldName, deparseObject(fieldObj, fieldOI)); } return struct; }
Example #15
Source Project: incubator-datasketches-hive Author: apache File: GetFrequentItemsFromStringsSketchUDTFTest.java License: Apache License 2.0 | 6 votes |
private static void checkResultInspector(ObjectInspector resultInspector) { Assert.assertNotNull(resultInspector); Assert.assertEquals(resultInspector.getCategory(), ObjectInspector.Category.STRUCT); List<? extends StructField> fields = ((StructObjectInspector) resultInspector).getAllStructFieldRefs(); Assert.assertEquals(fields.size(), 4); Assert.assertEquals(fields.get(0).getFieldObjectInspector().getCategory(), ObjectInspector.Category.PRIMITIVE); Assert.assertEquals( ((PrimitiveObjectInspector) fields.get(0).getFieldObjectInspector()).getPrimitiveCategory(), PrimitiveObjectInspector.PrimitiveCategory.STRING ); Assert.assertEquals(fields.get(1).getFieldObjectInspector().getCategory(), ObjectInspector.Category.PRIMITIVE); Assert.assertEquals( ((PrimitiveObjectInspector) fields.get(1).getFieldObjectInspector()).getPrimitiveCategory(), PrimitiveObjectInspector.PrimitiveCategory.LONG ); Assert.assertEquals(fields.get(2).getFieldObjectInspector().getCategory(), ObjectInspector.Category.PRIMITIVE); Assert.assertEquals( ((PrimitiveObjectInspector) fields.get(2).getFieldObjectInspector()).getPrimitiveCategory(), PrimitiveObjectInspector.PrimitiveCategory.LONG ); Assert.assertEquals(fields.get(3).getFieldObjectInspector().getCategory(), ObjectInspector.Category.PRIMITIVE); Assert.assertEquals( ((PrimitiveObjectInspector) fields.get(3).getFieldObjectInspector()).getPrimitiveCategory(), PrimitiveObjectInspector.PrimitiveCategory.LONG ); }
Example #16
Source Project: Hive-XML-SerDe Author: dvasilen File: ObjectInspectorTest.java License: Apache License 2.0 | 6 votes |
public void testSimpleXmlLong() throws SerDeException { XmlSerDe xmlSerDe = new XmlSerDe(); Configuration configuration = new Configuration(); Properties properties = new Properties(); properties.put(LIST_COLUMNS, "test"); properties.put(LIST_COLUMN_TYPES, "bigint"); properties.setProperty("column.xpath.test", "/test/text()"); xmlSerDe.initialize(configuration, properties); Text text = new Text(); text.set("<test>123456</test>"); Object o = xmlSerDe.deserialize(text); XmlStructObjectInspector structInspector = ((XmlStructObjectInspector) xmlSerDe.getObjectInspector()); StructField structField = structInspector.getStructFieldRef("test"); Object data = structInspector.getStructFieldData(o, structField); assertEquals(123456l, data); }
Example #17
Source Project: incubator-datasketches-hive Author: apache File: DataToDoubleSummaryWithModeSketchUDAFTest.java License: Apache License 2.0 | 6 votes |
static void checkIntermediateResultInspector(ObjectInspector resultInspector) { Assert.assertNotNull(resultInspector); Assert.assertEquals(resultInspector.getCategory(), ObjectInspector.Category.STRUCT); StructObjectInspector structResultInspector = (StructObjectInspector) resultInspector; List<?> fields = structResultInspector.getAllStructFieldRefs(); Assert.assertEquals(fields.size(), 3); ObjectInspector inspector1 = ((StructField) fields.get(0)).getFieldObjectInspector(); Assert.assertEquals(inspector1.getCategory(), ObjectInspector.Category.PRIMITIVE); PrimitiveObjectInspector primitiveInspector1 = (PrimitiveObjectInspector) inspector1; Assert.assertEquals(primitiveInspector1.getPrimitiveCategory(), PrimitiveCategory.INT); ObjectInspector inspector2 = ((StructField) fields.get(1)).getFieldObjectInspector(); Assert.assertEquals(inspector2.getCategory(), ObjectInspector.Category.PRIMITIVE); PrimitiveObjectInspector primitiveInspector2 = (PrimitiveObjectInspector) inspector2; Assert.assertEquals(primitiveInspector2.getPrimitiveCategory(), PrimitiveCategory.STRING); ObjectInspector inspector3 = ((StructField) fields.get(2)).getFieldObjectInspector(); Assert.assertEquals(inspector3.getCategory(), ObjectInspector.Category.PRIMITIVE); PrimitiveObjectInspector primitiveInspector3 = (PrimitiveObjectInspector) inspector3; Assert.assertEquals(primitiveInspector3.getPrimitiveCategory(), PrimitiveCategory.BINARY); }
Example #18
Source Project: incubator-datasketches-hive Author: apache File: IntersectSketchUDAFTest.java License: Apache License 2.0 | 6 votes |
private static void checkIntermediateResultInspector(ObjectInspector resultInspector) { Assert.assertNotNull(resultInspector); Assert.assertEquals(resultInspector.getCategory(), ObjectInspector.Category.STRUCT); StructObjectInspector structResultInspector = (StructObjectInspector) resultInspector; List<?> fields = structResultInspector.getAllStructFieldRefs(); Assert.assertEquals(fields.size(), 2); ObjectInspector inspector1 = ((StructField) fields.get(0)).getFieldObjectInspector(); Assert.assertEquals(inspector1.getCategory(), ObjectInspector.Category.PRIMITIVE); PrimitiveObjectInspector primitiveInspector1 = (PrimitiveObjectInspector) inspector1; Assert.assertEquals(primitiveInspector1.getPrimitiveCategory(), PrimitiveCategory.LONG); ObjectInspector inspector2 = ((StructField) fields.get(1)).getFieldObjectInspector(); Assert.assertEquals(inspector2.getCategory(), ObjectInspector.Category.PRIMITIVE); PrimitiveObjectInspector primitiveInspector2 = (PrimitiveObjectInspector) inspector2; Assert.assertEquals(primitiveInspector2.getPrimitiveCategory(), PrimitiveCategory.BINARY); }
Example #19
Source Project: parquet-mr Author: apache File: ArrayWritableObjectInspector.java License: Apache License 2.0 | 6 votes |
@Override public Object getStructFieldData(final Object data, final StructField fieldRef) { if (data == null) { return null; } if (data instanceof ArrayWritable) { final ArrayWritable arr = (ArrayWritable) data; return arr.get()[((StructFieldImpl) fieldRef).getIndex()]; } //since setStructFieldData and create return a list, getStructFieldData should be able to //handle list data. This is required when table serde is ParquetHiveSerDe and partition serde //is something else. if (data instanceof List) { return ((List) data).get(((StructFieldImpl) fieldRef).getIndex()); } throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName()); }
Example #20
Source Project: flink Author: apache File: HiveMapredSplitReader.java License: Apache License 2.0 | 6 votes |
@Override @SuppressWarnings("unchecked") public RowData nextRecord(RowData reuse) throws IOException { if (reachedEnd()) { return null; } try { //Use HiveDeserializer to deserialize an object out of a Writable blob Object hiveRowStruct = deserializer.deserialize(value); for (int i = 0; i < selectedFields.length; i++) { // set non-partition columns if (selectedFields[i] < structFields.size()) { StructField structField = structFields.get(selectedFields[i]); Object object = HiveInspectors.toFlinkObject(structField.getFieldObjectInspector(), structObjectInspector.getStructFieldData(hiveRowStruct, structField), hiveShim); row.setField(i, converters[i].toInternal(object)); } } } catch (Exception e) { LOG.error("Error happens when converting hive data type to flink data type."); throw new FlinkHiveException(e); } this.fetched = false; return row; }
Example #21
Source Project: dremio-oss Author: dremio File: HiveORCVectorizedReader.java License: Apache License 2.0 | 6 votes |
/** * Helper method that creates {@link VectorizedRowBatch}. For each selected column an input vector is created in the * batch. For unselected columns the vector entry is going to be null. The order of input vectors in batch should * match the order the columns in ORC file. * * @param rowOI Used to find the ordinal of the selected column. * @return */ private VectorizedRowBatch createVectorizedRowBatch(StructObjectInspector rowOI, boolean isOriginal) { final List<? extends StructField> fieldRefs = rowOI.getAllStructFieldRefs(); final List<ColumnVector> vectors = getVectors(rowOI); final VectorizedRowBatch result = new VectorizedRowBatch(fieldRefs.size()); ColumnVector[] vectorArray = vectors.toArray(new ColumnVector[0]); if (!isOriginal) { vectorArray = createTransactionalVectors(vectorArray); } result.cols = vectorArray; result.numCols = fieldRefs.size(); result.reset(); return result; }
Example #22
Source Project: incubator-datasketches-hive Author: apache File: DataToSketchUDAFTest.java License: Apache License 2.0 | 6 votes |
static void checkIntermediateResultInspector(ObjectInspector resultInspector) { Assert.assertNotNull(resultInspector); Assert.assertEquals(resultInspector.getCategory(), ObjectInspector.Category.STRUCT); StructObjectInspector structResultInspector = (StructObjectInspector) resultInspector; List<?> fields = structResultInspector.getAllStructFieldRefs(); Assert.assertEquals(fields.size(), 3); ObjectInspector inspector1 = ((StructField) fields.get(0)).getFieldObjectInspector(); Assert.assertEquals(inspector1.getCategory(), ObjectInspector.Category.PRIMITIVE); PrimitiveObjectInspector primitiveInspector1 = (PrimitiveObjectInspector) inspector1; Assert.assertEquals(primitiveInspector1.getPrimitiveCategory(), PrimitiveCategory.INT); ObjectInspector inspector2 = ((StructField) fields.get(1)).getFieldObjectInspector(); Assert.assertEquals(inspector2.getCategory(), ObjectInspector.Category.PRIMITIVE); PrimitiveObjectInspector primitiveInspector2 = (PrimitiveObjectInspector) inspector2; Assert.assertEquals(primitiveInspector2.getPrimitiveCategory(), PrimitiveCategory.STRING); ObjectInspector inspector3 = ((StructField) fields.get(2)).getFieldObjectInspector(); Assert.assertEquals(inspector3.getCategory(), ObjectInspector.Category.PRIMITIVE); PrimitiveObjectInspector primitiveInspector3 = (PrimitiveObjectInspector) inspector3; Assert.assertEquals(primitiveInspector3.getPrimitiveCategory(), PrimitiveCategory.BINARY); }
Example #23
Source Project: Hive-XML-SerDe Author: dvasilen File: ObjectInspectorTest.java License: Apache License 2.0 | 6 votes |
public void testSimpleXmlInt() throws SerDeException { XmlSerDe xmlSerDe = new XmlSerDe(); Configuration configuration = new Configuration(); Properties properties = new Properties(); properties.put(LIST_COLUMNS, "test"); properties.put(LIST_COLUMN_TYPES, "int"); properties.setProperty("column.xpath.test", "/test/text()"); xmlSerDe.initialize(configuration, properties); Text text = new Text(); text.set("<test>23</test>"); Object o = xmlSerDe.deserialize(text); XmlStructObjectInspector structInspector = ((XmlStructObjectInspector) xmlSerDe.getObjectInspector()); StructField structField = structInspector.getStructFieldRef("test"); Object data = structInspector.getStructFieldData(o, structField); assertEquals(23, data); }
Example #24
Source Project: spatial-framework-for-hadoop Author: Esri File: TestGeoJsonSerDe.java License: Apache License 2.0 | 6 votes |
@Test public void TestIntParse() throws Exception { Configuration config = new Configuration(); Text value = new Text(); AbstractSerDe jserde = new GeoJsonSerDe(); Properties proptab = new Properties(); proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "num"); proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "int"); jserde.initialize(config, proptab); StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector(); value.set("{\"properties\":{\"num\":7}}"); Object row = jserde.deserialize(value); StructField f0 = rowOI.getStructFieldRef("num"); Object fieldData = rowOI.getStructFieldData(row, f0); Assert.assertEquals(7, ((IntWritable)fieldData).get()); value.set("{\"properties\":{\"num\":9}}"); row = jserde.deserialize(value); f0 = rowOI.getStructFieldRef("num"); fieldData = rowOI.getStructFieldData(row, f0); Assert.assertEquals(9, ((IntWritable)fieldData).get()); }
Example #25
Source Project: spork Author: sigmoidanalytics File: OrcUtils.java License: Apache License 2.0 | 6 votes |
@Override public String getTypeName() { StringBuilder buffer = new StringBuilder(); buffer.append("struct<"); for (int i = 0; i < fields.size(); ++i) { StructField field = fields.get(i); if (i != 0) { buffer.append(","); } buffer.append(field.getFieldName()); buffer.append(":"); buffer.append(field.getFieldObjectInspector().getTypeName()); } buffer.append(">"); return buffer.toString(); }
Example #26
Source Project: Hive-XML-SerDe Author: dvasilen File: ObjectInspectorTest.java License: Apache License 2.0 | 6 votes |
public void testSimpleXmlFloat() throws SerDeException { XmlSerDe xmlSerDe = new XmlSerDe(); Configuration configuration = new Configuration(); Properties properties = new Properties(); properties.put(LIST_COLUMNS, "test"); properties.put(LIST_COLUMN_TYPES, "float"); properties.setProperty("column.xpath.test", "/test/text()"); xmlSerDe.initialize(configuration, properties); Text text = new Text(); text.set("<test>123.456</test>"); Object o = xmlSerDe.deserialize(text); XmlStructObjectInspector structInspector = ((XmlStructObjectInspector) xmlSerDe.getObjectInspector()); StructField structField = structInspector.getStructFieldRef("test"); Object data = structInspector.getStructFieldData(o, structField); assertEquals(123.456f, data); }
Example #27
Source Project: hive-dwrf Author: facebookarchive File: OrcStruct.java License: Apache License 2.0 | 6 votes |
@Override public String getTypeName() { StringBuilder buffer = new StringBuilder(); buffer.append("struct<"); for(int i=0; i < fields.size(); ++i) { StructField field = fields.get(i); if (i != 0) { buffer.append(","); } buffer.append(field.getFieldName()); buffer.append(":"); buffer.append(field.getFieldObjectInspector().getTypeName()); } buffer.append(">"); return buffer.toString(); }
Example #28
Source Project: emr-dynamodb-connector Author: awslabs File: DynamoDBSerDe.java License: Apache License 2.0 | 5 votes |
private void validateData(List<? extends StructField> fields, List<Object> rowData) { if (rowData == null) { throw new RuntimeException("No data found in the row."); } if (fields == null) { throw new RuntimeException("Field information not available"); } if (rowData.size() != fields.size()) { throw new RuntimeException("Number of data objects do not match number of columns. Data: " + rowData); } }
Example #29
Source Project: presto Author: prestosql File: OrcTester.java License: Apache License 2.0 | 5 votes |
private static void assertFileContentsOrcHive( Type type, TempFile tempFile, Iterable<?> expectedValues) throws Exception { JobConf configuration = new JobConf(new Configuration(false)); configuration.set(READ_COLUMN_IDS_CONF_STR, "0"); configuration.setBoolean(READ_ALL_COLUMNS, false); Reader reader = OrcFile.createReader( new Path(tempFile.getFile().getAbsolutePath()), new ReaderOptions(configuration)); RecordReader recordReader = reader.rows(); StructObjectInspector rowInspector = (StructObjectInspector) reader.getObjectInspector(); StructField field = rowInspector.getStructFieldRef("test"); Iterator<?> iterator = expectedValues.iterator(); Object rowData = null; while (recordReader.hasNext()) { rowData = recordReader.next(rowData); Object expectedValue = iterator.next(); Object actualValue = rowInspector.getStructFieldData(rowData, field); actualValue = decodeRecordReaderValue(type, actualValue); assertColumnValueEquals(type, actualValue, expectedValue); } assertFalse(iterator.hasNext()); }
Example #30
Source Project: spatial-framework-for-hadoop Author: Esri File: TestEsriJsonSerDe.java License: Apache License 2.0 | 5 votes |
@Test public void TestTimeParse() throws Exception { Configuration config = new Configuration(); Text value = new Text(); AbstractSerDe jserde = new EsriJsonSerDe(); Properties proptab = new Properties(); proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMNS, "when"); proptab.setProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES, "timestamp"); jserde.initialize(config, proptab); StructObjectInspector rowOI = (StructObjectInspector)jserde.getObjectInspector(); value.set("{\"attributes\":{\"when\":\"2020-02-20\"}}"); Object row = jserde.deserialize(value); StructField f0 = rowOI.getStructFieldRef("when"); Object fieldData = rowOI.getStructFieldData(row, f0); Assert.assertEquals( new java.text.SimpleDateFormat("yyyy-MM-dd").parse("2020-02-20").getTime(), ((TimestampWritable)fieldData).getTimestamp().getTime()); value.set("{\"attributes\":{\"when\":\"2017-05-05 05:05\"}}"); row = jserde.deserialize(value); fieldData = rowOI.getStructFieldData(row, f0); Assert.assertEquals( new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm").parse("2017-05-05 05:05").getTime(), ((TimestampWritable)fieldData).getTimestamp().getTime()); value.set("{\"attributes\":{\"when\":\"2017-08-09 10:11:12\"}}"); row = jserde.deserialize(value); fieldData = rowOI.getStructFieldData(row, f0); Assert.assertEquals( new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2017-08-09 10:11:12").getTime(), ((TimestampWritable)fieldData).getTimestamp().getTime()); value.set("{\"attributes\":{\"when\":\"2017-06-05 04:03:02.123456789\"}}"); row = jserde.deserialize(value); fieldData = rowOI.getStructFieldData(row, f0); Assert.assertEquals( new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse("2017-06-05 04:03:02.123").getTime(), ((TimestampWritable)fieldData).getTimestamp().getTime()); // ns parsed but not checked }