org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils Java Examples

The following examples show how to use org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveUtils.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
static StandardStructObjectInspector structObjectInspector(Properties tableProperties) {
    // extract column info - don't use Hive constants as they were renamed in 0.9 breaking compatibility
    // the column names are saved as the given inspector to #serialize doesn't preserves them (maybe because it's an external table)
    // use the class since StructType requires it ...
    List<String> columnNames = StringUtils.tokenize(tableProperties.getProperty(HiveConstants.COLUMNS), ",");
    List<TypeInfo> colTypes = TypeInfoUtils.getTypeInfosFromTypeString(tableProperties.getProperty(HiveConstants.COLUMNS_TYPES));

    // create a standard writable Object Inspector - used later on by serialization/deserialization
    List<ObjectInspector> inspectors = new ArrayList<ObjectInspector>();

    for (TypeInfo typeInfo : colTypes) {
        inspectors.add(TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo));
    }

    return ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors);
}
 
Example #2
Source File: JsonSerdeUtilsTest.java    From incubator-hivemall with Apache License 2.0 6 votes vote down vote up
@Test
public void testMapValues() throws SerDeException {
    List<String> columnNames = Arrays.asList("a,b".split(","));
    List<TypeInfo> columnTypes =
            TypeInfoUtils.getTypeInfosFromTypeString("array<string>,map<string,int>");

    Text text1 = new Text("{ \"a\":[\"aaa\"],\"b\":{\"bbb\":1}} ");
    Text text2 = new Text("{\"a\":[\"yyy\"],\"b\":{\"zzz\":123}}");
    Text text3 = new Text("{\"a\":[\"a\"],\"b\":{\"x\":11, \"y\": 22, \"z\": null}}");

    List<Object> expected1 = Arrays.<Object>asList(Arrays.<String>asList("aaa"),
        createHashMapStringInteger("bbb", 1));
    List<Object> expected2 = Arrays.<Object>asList(Arrays.<String>asList("yyy"),
        createHashMapStringInteger("zzz", 123));
    List<Object> expected3 = Arrays.<Object>asList(Arrays.<String>asList("a"),
        createHashMapStringInteger("x", 11, "y", 22, "z", null));

    List<Object> result1 = JsonSerdeUtils.deserialize(text1, columnNames, columnTypes);
    List<Object> result2 = JsonSerdeUtils.deserialize(text2, columnNames, columnTypes);
    List<Object> result3 = JsonSerdeUtils.deserialize(text3, columnNames, columnTypes);

    Assert.assertEquals(expected1, result1);
    Assert.assertEquals(expected2, result2);
    Assert.assertEquals(expected3, result3);
}
 
Example #3
Source File: HiveTypeConverter.java    From metacat with Apache License 2.0 6 votes vote down vote up
@Override
public Type toMetacatType(final String type) {
    // Hack to fix presto "varchar" type coming in with no length which is required by Hive.
    final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(
        "varchar".equals(type.toLowerCase()) ? serdeConstants.STRING_TYPE_NAME : type);
    ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo);
    // The standard struct object inspector forces field names to lower case, however in Metacat we need to preserve
    // the original case of the struct fields so we wrap it with our wrapper to force the fieldNames to keep
    // their original case
    if (typeInfo.getCategory().equals(ObjectInspector.Category.STRUCT)) {
        final StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
        final StandardStructObjectInspector objectInspector = (StandardStructObjectInspector) oi;
        oi = new HiveTypeConverter.SameCaseStandardStructObjectInspector(
            structTypeInfo.getAllStructFieldNames(), objectInspector);
    }
    return getCanonicalType(oi);
}
 
Example #4
Source File: JsonSerdeUtilsTest.java    From incubator-hivemall with Apache License 2.0 6 votes vote down vote up
@Test
public void testTopLevelArray() throws Exception {
    List<String> expected1 = Arrays.asList("Taro", "Tanaka");
    Text json1 = new Text("[\"Taro\",\"Tanaka\"]");
    TypeInfo type1 = TypeInfoUtils.getTypeInfoFromTypeString("array<string>");

    List<Object> deserialized1 = JsonSerdeUtils.deserialize(json1, type1);
    assertRecordEquals(expected1, deserialized1);
    Text serialized1 = JsonSerdeUtils.serialize(deserialized1,
        HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(type1));
    Assert.assertEquals(json1, serialized1);

    List<Double> expected2 = Arrays.asList(1.1d, 2.2d, 3.3d);
    Text json2 = new Text("[1.1,2.2,3.3]");
    TypeInfo type2 = TypeInfoUtils.getTypeInfoFromTypeString("array<double>");

    List<Object> deserialized2 = JsonSerdeUtils.deserialize(json2, type2);
    assertRecordEquals(expected2, deserialized2);
    Text serialized2 = JsonSerdeUtils.serialize(deserialized2,
        HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(type2));
    Assert.assertEquals(json2, serialized2);
}
 
Example #5
Source File: JsonSerdeUtilsTest.java    From incubator-hivemall with Apache License 2.0 6 votes vote down vote up
@Test
public void testTopLevelPrimitive() throws Exception {
    Double expected1 = Double.valueOf(3.3);
    Text json1 = new Text("3.3");
    TypeInfo type1 = TypeInfoUtils.getTypeInfoFromTypeString("double");

    Object deserialized1 = JsonSerdeUtils.deserialize(json1, type1);
    Assert.assertEquals(expected1, deserialized1);
    Text serialized1 = JsonSerdeUtils.serialize(deserialized1,
        HCatRecordObjectInspectorFactory.getStandardObjectInspectorFromTypeInfo(type1));
    Assert.assertEquals(json1, serialized1);

    Boolean expected2 = Boolean.FALSE;
    Text json2 = new Text("false");

    Boolean deserialized2 = JsonSerdeUtils.deserialize(json2);
    Assert.assertEquals(expected2, deserialized2);
    Text serialized2 = JsonSerdeUtils.serialize(deserialized2,
        PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
    Assert.assertEquals(json2, serialized2);
}
 
Example #6
Source File: MDSSerde.java    From multiple-dimension-spread with Apache License 2.0 6 votes vote down vote up
private StructTypeInfo getColumnProjectionTypeInfo( final String columnNameProperty , final String columnTypeProperty , final String projectionColumnNames ){
  Set<String> columnNameSet = new HashSet<String>();
  for( String columnName : projectionColumnNames.split(",") ){
    columnNameSet.add( columnName );
  }

  ArrayList<TypeInfo> fieldTypes = TypeInfoUtils.getTypeInfosFromTypeString( columnTypeProperty );
  String[] splitNames = columnNameProperty.split(",");

  ArrayList<String> projectionColumnNameList = new ArrayList<String>();
  ArrayList<TypeInfo> projectionFieldTypeList = new ArrayList<TypeInfo>();
  for( int i = 0 ; i < fieldTypes.size() ; i++ ){
    if( columnNameSet.contains( splitNames[i] ) ){
      projectionColumnNameList.add( splitNames[i] );
      projectionFieldTypeList.add( fieldTypes.get(i) );
    }
    filedIndexMap.put( splitNames[i] , i );
  }
  StructTypeInfo rootType = new StructTypeInfo();

  rootType.setAllStructFieldNames( projectionColumnNameList );
  rootType.setAllStructFieldTypeInfos( projectionFieldTypeList );

  return rootType;
}
 
Example #7
Source File: JSONCDHSerDe.java    From bigdata-tutorial with Apache License 2.0 6 votes vote down vote up
/**
 * An initialization function used to gather information about the table.
 * Typically, a SerDe implementation will be interested in the list of
 * column names and their types. That information will be used to help perform
 * actual serialization and deserialization of data.
 */
@Override
public void initialize(Configuration conf, Properties tbl)
		throws SerDeException {
	// Get a list of the table's column names.
	String colNamesStr = tbl.getProperty(serdeConstants.LIST_COLUMNS);
	colNames = Arrays.asList(colNamesStr.split(","));

	// Get a list of TypeInfos for the columns. This list lines up with
	// the list of column names.
	String colTypesStr = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
	List<TypeInfo> colTypes =
			TypeInfoUtils.getTypeInfosFromTypeString(colTypesStr);

	rowTypeInfo =
			(StructTypeInfo) TypeInfoFactory.getStructTypeInfo(colNames, colTypes);
	rowOI =
			TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(rowTypeInfo);
}
 
Example #8
Source File: UDAFCollectAction.java    From 163-bigdate-note with GNU General Public License v3.0 6 votes vote down vote up
@Override
    public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
        //判断参数个数
        if (parameters.length != 2) {
            throw new UDFArgumentTypeException(parameters.length - 1, "Two argument is excepted.");
        }

        ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[0]);
//        ObjectInspector oi1 = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[1]);
        if (oi.getCategory() != ObjectInspector.Category.PRIMITIVE) {
            throw new UDFArgumentTypeException(0, "Argument must be PRIMITIVE, but"
                + oi.getCategory().name()
                + " was passed.");
        }

//        PrimitiveObjectInspector inputOI = (PrimitiveObjectInspector) oi;
//        if (inputOI.getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.STRING) {
//            throw new UDFArgumentTypeException(0, "Argument must be String, but"
//                    + inputOI.getPrimitiveCategory().name()
//                    + " was passed.");
//        }

        return new AllActionsOfThisPeople30MinBefore();
    }
 
Example #9
Source File: HiveTableUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Create a Flink's TableSchema from Hive table's columns and partition keys.
 */
public static TableSchema createTableSchema(List<FieldSchema> cols, List<FieldSchema> partitionKeys,
		Set<String> notNullColumns, UniqueConstraint primaryKey) {
	List<FieldSchema> allCols = new ArrayList<>(cols);
	allCols.addAll(partitionKeys);

	String[] colNames = new String[allCols.size()];
	DataType[] colTypes = new DataType[allCols.size()];

	for (int i = 0; i < allCols.size(); i++) {
		FieldSchema fs = allCols.get(i);

		colNames[i] = fs.getName();
		colTypes[i] = HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(fs.getType()));
		if (notNullColumns.contains(colNames[i])) {
			colTypes[i] = colTypes[i].notNull();
		}
	}

	TableSchema.Builder builder = TableSchema.builder().fields(colNames, colTypes);
	if (primaryKey != null) {
		builder.primaryKey(primaryKey.getName(), primaryKey.getColumns().toArray(new String[0]));
	}
	return builder.build();
}
 
Example #10
Source File: TestNiFiOrcUtils.java    From localization_nifi with Apache License 2.0 6 votes vote down vote up
@Test
public void test_getWritable() throws Exception {
    assertTrue(NiFiOrcUtils.convertToORCObject(null, 1) instanceof IntWritable);
    assertTrue(NiFiOrcUtils.convertToORCObject(null, 1L) instanceof LongWritable);
    assertTrue(NiFiOrcUtils.convertToORCObject(null, 1.0f) instanceof FloatWritable);
    assertTrue(NiFiOrcUtils.convertToORCObject(null, 1.0) instanceof DoubleWritable);
    assertTrue(NiFiOrcUtils.convertToORCObject(null, new int[]{1, 2, 3}) instanceof List);
    assertTrue(NiFiOrcUtils.convertToORCObject(null, Arrays.asList(1, 2, 3)) instanceof List);
    Map<String, Float> map = new HashMap<>();
    map.put("Hello", 1.0f);
    map.put("World", 2.0f);

    Object writable = NiFiOrcUtils.convertToORCObject(TypeInfoUtils.getTypeInfoFromTypeString("map<string,float>"), map);
    assertTrue(writable instanceof MapWritable);
    MapWritable mapWritable = (MapWritable) writable;
    mapWritable.forEach((key, value) -> {
        assertTrue(key instanceof Text);
        assertTrue(value instanceof FloatWritable);
    });
}
 
Example #11
Source File: HiveSerializationEventConverterTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void generateEventHiveRecordLimited() throws Exception {
    Map<Writable, Writable> map = new MapWritable();
    map.put(new Text("one"), new IntWritable(1));
    map.put(new Text("two"), new IntWritable(2));
    map.put(new Text("three"), new IntWritable(3));

    HiveType tuple = new HiveType(map, TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(
            TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo)));

    SerializationEventConverter eventConverter = new SerializationEventConverter();

    SerializationFailure iaeFailure = new SerializationFailure(new IllegalArgumentException("garbage"), tuple, new ArrayList<String>());

    String rawEvent = eventConverter.getRawEvent(iaeFailure);
    assertThat(rawEvent, startsWith("HiveType{object=org.apache.hadoop.io.MapWritable@"));
    String timestamp = eventConverter.getTimestamp(iaeFailure);
    assertTrue(StringUtils.hasText(timestamp));
    assertTrue(DateUtils.parseDate(timestamp).getTime().getTime() > 1L);
    String exceptionType = eventConverter.renderExceptionType(iaeFailure);
    assertEquals("illegal_argument_exception", exceptionType);
    String exceptionMessage = eventConverter.renderExceptionMessage(iaeFailure);
    assertEquals("garbage", exceptionMessage);
    String eventMessage = eventConverter.renderEventMessage(iaeFailure);
    assertEquals("Could not construct bulk entry from record", eventMessage);
}
 
Example #12
Source File: HiveStatsUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
private static ColumnStatistics createHiveColumnStatistics(
		Map<String, CatalogColumnStatisticsDataBase> colStats,
		StorageDescriptor sd,
		ColumnStatisticsDesc desc,
		String hiveVersion) {
	List<ColumnStatisticsObj> colStatsList = new ArrayList<>();

	for (FieldSchema field : sd.getCols()) {
		String hiveColName = field.getName();
		String hiveColType = field.getType();
		CatalogColumnStatisticsDataBase flinkColStat = colStats.get(field.getName());
		if (null != flinkColStat) {
			ColumnStatisticsData statsData = getColumnStatisticsData(
					HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(hiveColType)),
					flinkColStat,
					hiveVersion);
			ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj(hiveColName, hiveColType, statsData);
			colStatsList.add(columnStatisticsObj);
		}
	}

	return new ColumnStatistics(desc, colStatsList);
}
 
Example #13
Source File: HiveUtils.java    From incubator-hivemall with Apache License 2.0 6 votes vote down vote up
@Nullable
public static String[] getConstStringArray(@Nonnull final ObjectInspector oi)
        throws UDFArgumentException {
    if (!ObjectInspectorUtils.isConstantObjectInspector(oi)) {
        throw new UDFArgumentException("argument must be a constant value: "
                + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
    }
    ConstantObjectInspector constOI = (ConstantObjectInspector) oi;
    if (constOI.getCategory() != Category.LIST) {
        throw new UDFArgumentException(
            "argument must be an array: " + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
    }
    final List<?> lst = (List<?>) constOI.getWritableConstantValue();
    if (lst == null) {
        return null;
    }
    final int size = lst.size();
    final String[] ary = new String[size];
    for (int i = 0; i < size; i++) {
        Object o = lst.get(i);
        if (o != null) {
            ary[i] = o.toString();
        }
    }
    return ary;
}
 
Example #14
Source File: HiveGenericUDF.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public DataType getHiveResultType(Object[] constantArguments, DataType[] argTypes) {
	LOG.info("Getting result type of HiveGenericUDF from {}", hiveFunctionWrapper.getClassName());

	try {
		ObjectInspector[] argumentInspectors = HiveInspectors.toInspectors(constantArguments, argTypes);

		ObjectInspector resultObjectInspector =
			hiveFunctionWrapper.createFunction().initializeAndFoldConstants(argumentInspectors);

		return HiveTypeUtil.toFlinkType(
			TypeInfoUtils.getTypeInfoFromObjectInspector(resultObjectInspector));
	} catch (UDFArgumentException e) {
		throw new FlinkHiveUDFException(e);
	}
}
 
Example #15
Source File: HiveGenericUDF.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void openInternal() {

	LOG.info("Open HiveGenericUDF as {}", hiveFunctionWrapper.getClassName());

	function = hiveFunctionWrapper.createFunction();

	try {
		returnInspector = function.initializeAndFoldConstants(
			HiveInspectors.toInspectors(constantArguments, argTypes));
	} catch (UDFArgumentException e) {
		throw new FlinkHiveUDFException(e);
	}

	deferredObjects = new GenericUDF.DeferredObject[argTypes.length];

	for (int i = 0; i < deferredObjects.length; i++) {
		deferredObjects[i] = new DeferredObjectAdapter(
			TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(
				HiveTypeUtil.toHiveTypeInfo(argTypes[i])),
			argTypes[i].getLogicalType()
		);
	}
}
 
Example #16
Source File: HiveStatsUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
private static ColumnStatistics createHiveColumnStatistics(
		Map<String, CatalogColumnStatisticsDataBase> colStats,
		StorageDescriptor sd,
		ColumnStatisticsDesc desc) {
	List<ColumnStatisticsObj> colStatsList = new ArrayList<>();

	for (FieldSchema field : sd.getCols()) {
		String hiveColName = field.getName();
		String hiveColType = field.getType();
		CatalogColumnStatisticsDataBase flinkColStat = colStats.get(field.getName());
		if (null != flinkColStat) {
			ColumnStatisticsData statsData =
					getColumnStatisticsData(HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(hiveColType)), flinkColStat);
			ColumnStatisticsObj columnStatisticsObj = new ColumnStatisticsObj(hiveColName, hiveColType, statsData);
			colStatsList.add(columnStatisticsObj);
		}
	}

	return new ColumnStatistics(desc, colStatsList);
}
 
Example #17
Source File: HiveTableUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Create a Flink's TableSchema from Hive table's columns and partition keys.
 */
public static TableSchema createTableSchema(List<FieldSchema> cols, List<FieldSchema> partitionKeys) {
	List<FieldSchema> allCols = new ArrayList<>(cols);
	allCols.addAll(partitionKeys);

	String[] colNames = new String[allCols.size()];
	DataType[] colTypes = new DataType[allCols.size()];

	for (int i = 0; i < allCols.size(); i++) {
		FieldSchema fs = allCols.get(i);

		colNames[i] = fs.getName();
		colTypes[i] = HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(fs.getType()));
	}

	return TableSchema.builder()
			.fields(colNames, colTypes)
			.build();
}
 
Example #18
Source File: OrcTestTools.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
/**
 * AvroRow version of writeAsOrcBinary
 */
private void writeAsOrcBinary(OrcRowIterator input, TypeInfo schema, Path outputPath) throws IOException {
  Configuration configuration = new Configuration();

  // Note that it doesn't support schema evolution at all.
  // If the schema in realRow is inconsistent with given schema, writing into disk
  // would run into failure.
  ObjectInspector oi = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(schema);
  OrcFile.WriterOptions options = OrcFile.writerOptions(configuration).inspector(oi);
  Writer writer = null;

  while (input.hasNext()) {
    AvroRow avroRow = (AvroRow) input.next();
    if (writer == null) {
      options.inspector(avroRow.getInspector());
      writer = OrcFile.createWriter(outputPath, options);
    }
    writer.addRow(avroRow.realRow);
  }
  if (writer != null) {
    writer.close();
  }
}
 
Example #19
Source File: CobolCopybook.java    From Cobol-to-Hive with Apache License 2.0 5 votes vote down vote up
private void setFieldVars(CobolFieldDecl cfd, int occurance, int dependId) {

		cfd.fieldProperties.put("prev.col", prevColumn);
		cfd.fieldProperties.put("occurance", occurance);
		
		// get unique fieldName
		String fieldName = "";
		if (uniqueNames.get(cfd.getFieldName()) == null) {
			fieldName = cfd.getFieldName();
		} else {
			uniqueNames.put(cfd.getFieldName(),
					uniqueNames.get(cfd.getFieldName()) + 1);
			fieldName = cfd.getFieldName() + "_"
					+ uniqueNames.get(cfd.getFieldName());

		}

		uniqueNames.put(fieldName, 0);
		namePrevCol.put(fieldName, prevColumn);
		
		String fieldType = cfd.getFieldType();
		fieldNames.add(fieldName);
		fieldTypes.add(fieldType);
		fieldTypeInfos.add(TypeInfoUtils.getTypeInfoFromTypeString(fieldType));
		fieldOIs.add(TypeInfoUtils
				.getStandardJavaObjectInspectorFromTypeInfo(TypeInfoUtils
						.getTypeInfoFromTypeString(fieldType)));
		columnNos++;

		cfd.fieldProperties.put("id", columnNos);
		cfd.fieldProperties.put("depend.id", dependId);
		Map<String, Integer> prop = new HashMap<String, Integer>(
				cfd.fieldProperties);

		fieldProperties.add(prop);
		prevColumn = columnNos;

	}
 
Example #20
Source File: HiveSimpleUDF.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void openInternal() {
	LOG.info("Opening HiveSimpleUDF as '{}'", hiveFunctionWrapper.getClassName());

	function = hiveFunctionWrapper.createFunction();

	List<TypeInfo> typeInfos = new ArrayList<>();

	for (DataType arg : argTypes) {
		typeInfos.add(HiveTypeUtil.toHiveTypeInfo(arg, false));
	}

	try {
		method = function.getResolver().getEvalMethod(typeInfos);
		returnInspector = ObjectInspectorFactory.getReflectionObjectInspector(method.getGenericReturnType(),
			ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
		ObjectInspector[] argInspectors = new ObjectInspector[typeInfos.size()];

		for (int i = 0; i < argTypes.length; i++) {
			argInspectors[i] = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfos.get(i));
		}

		conversionHelper = new GenericUDFUtils.ConversionHelper(method, argInspectors);
		conversions = new HiveObjectConversion[argInspectors.length];
		for (int i = 0; i < argInspectors.length; i++) {
			conversions[i] = HiveInspectors.getConversion(argInspectors[i], argTypes[i].getLogicalType(), hiveShim);
		}

		allIdentityConverter = Arrays.stream(conversions)
			.allMatch(conv -> conv instanceof IdentityConversion);
	} catch (Exception e) {
		throw new FlinkHiveUDFException(
			String.format("Failed to open HiveSimpleUDF from %s", hiveFunctionWrapper.getClassName()), e);
	}
}
 
Example #21
Source File: MinByUDAF.java    From incubator-hivemall with Apache License 2.0 5 votes vote down vote up
@Override
public GenericUDAFEvaluator getEvaluator(@Nonnull TypeInfo[] argTypes)
        throws SemanticException {
    if (argTypes.length != 2) {
        throw new UDFArgumentLengthException(
            "Exactly two arguments are expected: " + argTypes.length);
    }
    ObjectInspector yOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(argTypes[1]);
    if (!ObjectInspectorUtils.compareSupported(yOI)) {
        throw new UDFArgumentTypeException(1,
            "Cannot support comparison of map<> type or complex type containing map<>.");
    }
    return new Evaluator();
}
 
Example #22
Source File: IndexRSerde.java    From indexr with Apache License 2.0 5 votes vote down vote up
@Override
public void initialize(Configuration conf, Properties tbl) throws SerDeException {
    String columnNameProperty = tbl.getProperty(IOConstants.COLUMNS);
    String columnTypeProperty = tbl.getProperty(IOConstants.COLUMNS_TYPES);

    if (Strings.isEmpty(columnNameProperty)) {
        columnNames = new ArrayList<String>();
    } else {
        columnNames = Arrays.asList(columnNameProperty.split(","));
    }
    if (Strings.isEmpty(columnTypeProperty)) {
        columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(StringUtils.repeat("string", ":", columnNames.size()));
    } else {
        columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
    }
    if (columnNames.size() != columnTypes.size()) {
        throw new IllegalArgumentException("IndexRHiveSerde initialization failed. Number of column " +
                "name and column type differs. columnNames = " + columnNames + ", columnTypes = " +
                columnTypes);
    }

    TypeInfo rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
    this.objInspector = new ArrayWritableObjectInspector((StructTypeInfo) rowTypeInfo);

    stats = new SerDeStats();
    serdeSize = 0;
}
 
Example #23
Source File: HiveUtils.java    From incubator-hivemall with Apache License 2.0 5 votes vote down vote up
@Nonnull
public static ObjectInspector getObjectInspector(@Nonnull final String typeString,
        final boolean preferWritable) {
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeString);
    if (preferWritable) {
        return TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
    } else {
        return TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo);
    }
}
 
Example #24
Source File: HiveUtils.java    From incubator-hivemall with Apache License 2.0 5 votes vote down vote up
@Nonnull
public static ListObjectInspector asListOI(@Nonnull final ObjectInspector[] argOIs,
        final int argIndex) throws UDFArgumentException {
    final ObjectInspector oi = getObjectInspector(argOIs, argIndex);
    Category category = oi.getCategory();
    if (category != Category.LIST) {
        throw new UDFArgumentException("Expecting ListObjectInspector for argOIs[" + argIndex
                + "] but got " + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
    }
    return (ListObjectInspector) oi;
}
 
Example #25
Source File: HiveUtils.java    From incubator-hivemall with Apache License 2.0 5 votes vote down vote up
public static boolean getConstBoolean(@Nonnull final ObjectInspector[] argOIs,
        final int argIndex) throws UDFArgumentException {
    final ObjectInspector oi = getObjectInspector(argOIs, argIndex);
    if (!isBooleanOI(oi)) {
        throw new UDFArgumentTypeException(argIndex, "argument must be a Boolean value: "
                + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
    }
    BooleanWritable v = getConstValue(oi);
    return v.get();
}
 
Example #26
Source File: HiveUtils.java    From incubator-hivemall with Apache License 2.0 5 votes vote down vote up
@Nonnull
public static PrimitiveObjectInspector asPrimitiveObjectInspector(
        @Nonnull final ObjectInspector[] argOIs, final int argIndex)
        throws UDFArgumentException {
    final ObjectInspector oi = getObjectInspector(argOIs, argIndex);
    if (oi.getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentException("Expecting PrimitiveObjectInspector for argOIs["
                + argIndex + "] but got " + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
    }
    return (PrimitiveObjectInspector) oi;
}
 
Example #27
Source File: HiveUtils.java    From incubator-hivemall with Apache License 2.0 5 votes vote down vote up
@Nonnull
public static PrimitiveObjectInspector asPrimitiveObjectInspector(
        @Nonnull final ObjectInspector oi) throws UDFArgumentException {
    if (oi.getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentException("Expecting PrimitiveObjectInspector: "
                + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
    }
    return (PrimitiveObjectInspector) oi;
}
 
Example #28
Source File: HiveUtils.java    From incubator-hivemall with Apache License 2.0 5 votes vote down vote up
@Nonnull
public static ConstantObjectInspector asConstantObjectInspector(
        @Nonnull final ObjectInspector oi) throws UDFArgumentException {
    if (!ObjectInspectorUtils.isConstantObjectInspector(oi)) {
        throw new UDFArgumentException("argument must be a constant value: "
                + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
    }
    return (ConstantObjectInspector) oi;
}
 
Example #29
Source File: HiveUtils.java    From incubator-hivemall with Apache License 2.0 5 votes vote down vote up
public static long getConstLong(@Nonnull final ObjectInspector oi) throws UDFArgumentException {
    if (!isBigIntOI(oi)) {
        throw new UDFArgumentException("argument must be a BigInt value: "
                + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
    }
    LongWritable v = getConstValue(oi);
    return v.get();
}
 
Example #30
Source File: HiveUtils.java    From incubator-hivemall with Apache License 2.0 5 votes vote down vote up
public static int getConstInt(@Nonnull final ObjectInspector oi) throws UDFArgumentException {
    if (!isIntOI(oi)) {
        throw new UDFArgumentException("argument must be a Int value: "
                + TypeInfoUtils.getTypeInfoFromObjectInspector(oi));
    }
    IntWritable v = getConstValue(oi);
    return v.get();
}