org.apache.hadoop.hive.ql.exec.UDAF Java Examples

The following examples show how to use org.apache.hadoop.hive.ql.exec.UDAF. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveTableFactory.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public FunctionDefinition createFunctionDefinition(String name, CatalogFunction catalogFunction) {
	String functionClassName = catalogFunction.getClassName();

	if (Boolean.valueOf(catalogFunction.getProperties().get(CatalogConfig.IS_GENERIC))) {
		throw new TableException(
			String.format("HiveFunctionDefinitionFactory does not support generic functions %s yet", name));
	}

	Class clazz;
	try {
		clazz = Thread.currentThread().getContextClassLoader().loadClass(functionClassName);

		LOG.info("Successfully loaded Hive udf '{}' with class '{}'", name, functionClassName);
	} catch (ClassNotFoundException e) {
		throw new TableException(
			String.format("Failed to initiate an instance of class %s.", functionClassName), e);
	}

	if (UDF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveSimpleUDF", name);

		return new ScalarFunctionDefinition(
			name,
			new HiveSimpleUDF(new HiveFunctionWrapper<>(functionClassName))
		);
	} else if (GenericUDF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveGenericUDF", name);

		return new ScalarFunctionDefinition(
			name,
			new HiveGenericUDF(new HiveFunctionWrapper<>(functionClassName))
		);
	} else if (GenericUDTF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveGenericUDTF", name);

		HiveGenericUDTF udtf = new HiveGenericUDTF(new HiveFunctionWrapper<>(functionClassName));

		return new TableFunctionDefinition(
			name,
			udtf,
			GenericTypeInfo.of(Row.class)
		);
	} else if (GenericUDAFResolver2.class.isAssignableFrom(clazz) || UDAF.class.isAssignableFrom(clazz)) {
		HiveGenericUDAF udaf;

		if (GenericUDAFResolver2.class.isAssignableFrom(clazz)) {
			LOG.info(
				"Transforming Hive function '{}' into a HiveGenericUDAF with no UDAF bridging and Hive version %s",
				name, hiveVersion);

			udaf = new HiveGenericUDAF(new HiveFunctionWrapper<>(functionClassName), false, hiveVersion);
		} else {
			LOG.info(
				"Transforming Hive function '{}' into a HiveGenericUDAF with UDAF bridging and Hive version %s",
				name, hiveVersion);

			udaf = new HiveGenericUDAF(new HiveFunctionWrapper<>(functionClassName), true, hiveVersion);
		}

		return new AggregateFunctionDefinition(
			name,
			udaf,
			GenericTypeInfo.of(Object.class),
			GenericTypeInfo.of(GenericUDAFEvaluator.AggregationBuffer.class)
		);
	} else {
		throw new IllegalArgumentException(
			String.format("HiveFunctionDefinitionFactory cannot initiate FunctionDefinition for class %s", functionClassName));
	}
}
 
Example #2
Source File: HiveSqlOperatorTable.java    From marble with Apache License 2.0 4 votes vote down vote up
private void registerUDAF(Reflections reflections) {
  Set<Class> udafClasses = Sets.union(
      reflections.getSubTypesOf(GenericUDAFResolver2.class),
      reflections.getSubTypesOf(UDAF.class));
  for (Class clazz : udafClasses) {
    boolean isWindowFunc = false;
    Description desc = (Description) clazz.getAnnotation(Description.class);
    WindowFunctionDescription windowFunctionDescription = null;
    if (desc == null) {
      windowFunctionDescription =
          (WindowFunctionDescription) clazz
              .getAnnotation(WindowFunctionDescription.class);
      if (windowFunctionDescription != null
          && windowFunctionDescription.description() != null) {
        desc = windowFunctionDescription.description();
        isWindowFunc = true;
      }
    }
    //ignore the operators that need to be excluded
    if (desc == null || isWindowFunc || EXCLUDED_HIVE_UDAF_LIST.contains(
        clazz)) {
      continue;
    }
    String[] names = desc.name().split(",");
    for (int i = 0; i < names.length; i++) {
      String upName = names[i].toUpperCase();
      methodsUDAF.put(upName, clazz);
      SqlAggFunction aggOperatorInSqlStdOperatorTable =
          (SqlAggFunction) getOperatorInSqlStdOperatorTable(
              upName, SqlSyntax.FUNCTION, true);
      HiveSqlAggFunction sqlAggFunction;
      if (aggOperatorInSqlStdOperatorTable == null) {
        sqlAggFunction = new HiveSqlAggFunction(upName, false,
            false, HiveSqlUDAFReturnTypeInference.INSTANCE);

      } else {
        sqlAggFunction = new HiveSqlAggFunction(upName,
            aggOperatorInSqlStdOperatorTable.getNameAsId(),
            aggOperatorInSqlStdOperatorTable.getKind(),
            aggOperatorInSqlStdOperatorTable.getFunctionType(),
            false, false, HiveSqlUDAFReturnTypeInference.INSTANCE);
      }
      register(sqlAggFunction);
    }

  }
}
 
Example #3
Source File: HiveFunctionDefinitionFactory.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Create a FunctionDefinition from a Hive function's class name.
 * Called directly by {@link org.apache.flink.table.module.hive.HiveModule}.
 */
public FunctionDefinition createFunctionDefinitionFromHiveFunction(String name, String functionClassName) {
	Class clazz;
	try {
		clazz = Thread.currentThread().getContextClassLoader().loadClass(functionClassName);

		LOG.info("Successfully loaded Hive udf '{}' with class '{}'", name, functionClassName);
	} catch (ClassNotFoundException e) {
		throw new TableException(
			String.format("Failed to initiate an instance of class %s.", functionClassName), e);
	}

	if (UDF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveSimpleUDF", name);

		return new ScalarFunctionDefinition(
			name,
			new HiveSimpleUDF(new HiveFunctionWrapper<>(functionClassName), hiveShim)
		);
	} else if (GenericUDF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveGenericUDF", name);

		return new ScalarFunctionDefinition(
			name,
			new HiveGenericUDF(new HiveFunctionWrapper<>(functionClassName), hiveShim)
		);
	} else if (GenericUDTF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveGenericUDTF", name);

		HiveGenericUDTF udtf = new HiveGenericUDTF(new HiveFunctionWrapper<>(functionClassName), hiveShim);

		return new TableFunctionDefinition(
			name,
			udtf,
			GenericTypeInfo.of(Row.class)
		);
	} else if (GenericUDAFResolver2.class.isAssignableFrom(clazz) || UDAF.class.isAssignableFrom(clazz)) {
		HiveGenericUDAF udaf;

		if (GenericUDAFResolver2.class.isAssignableFrom(clazz)) {
			LOG.info(
				"Transforming Hive function '{}' into a HiveGenericUDAF without UDAF bridging", name);

			udaf = new HiveGenericUDAF(new HiveFunctionWrapper<>(functionClassName), false, hiveShim);
		} else {
			LOG.info(
				"Transforming Hive function '{}' into a HiveGenericUDAF with UDAF bridging", name);

			udaf = new HiveGenericUDAF(new HiveFunctionWrapper<>(functionClassName), true, hiveShim);
		}

		return new AggregateFunctionDefinition(
			name,
			udaf,
			GenericTypeInfo.of(Object.class),
			GenericTypeInfo.of(GenericUDAFEvaluator.AggregationBuffer.class)
		);
	} else {
		throw new IllegalArgumentException(
			String.format("HiveFunctionDefinitionFactory cannot initiate FunctionDefinition for class %s", functionClassName));
	}
}