org.apache.hadoop.hive.ql.exec.UDF Java Examples

The following examples show how to use org.apache.hadoop.hive.ql.exec.UDF. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveFunctionWrapper.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Instantiate a Hive function instance.
 *
 * @return a Hive function instance
 */
public UDFType createFunction() {
	if (instance != null) {
		return instance;
	} else {
		UDFType func = null;
		try {
			func = (UDFType) Thread.currentThread().getContextClassLoader().loadClass(className).newInstance();
		} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
			throw new FlinkHiveUDFException(
				String.format("Failed to create function from %s", className), e);
		}

		if (!(func instanceof UDF)) {
			// We cache the function if it is not the Simple UDF,
			// as we always have to create new instance for Simple UDF.
			instance = func;
		}

		return func;
	}
}
 
Example #2
Source File: HiveFunctionRegistry.java    From dremio-oss with Apache License 2.0 6 votes vote down vote up
private HiveFuncHolder matchAndCreateUDFHolder(String udfName,
                                               Class<? extends UDF> udfClazz,
                                               CompleteType[] argTypes,
                                               ObjectInspector[] argOIs) {
  try {
    GenericUDF udfInstance = new GenericUDFBridge(udfName, false/* is operator */, udfClazz.getName());
    ObjectInspector returnOI = udfInstance.initialize(argOIs);

    return new HiveFuncHolder(
      udfName,
      udfClazz,
      argTypes,
      returnOI,
      CompleteType.fromMinorType(ObjectInspectorHelper.getMinorType(returnOI)),
      nonDeterministicUDFs.contains(udfClazz));
  } catch (Exception e) { /*ignore this*/ }

  return null;
}
 
Example #3
Source File: TestHiveFunctionLoader.java    From tajo with Apache License 2.0 6 votes vote down vote up
@Test
public void testAnalyzeUDFclass() {
  Set<Class<? extends UDF>> funcSet = new HashSet<>();
  funcSet.add(HiveUDFtest.class);
  List<FunctionDesc> funcList = new LinkedList<>();

  HiveFunctionLoader.buildFunctionsFromUDF(funcSet, funcList, null);

  assertEquals(funcList.size(), 1);

  FunctionDesc desc = funcList.get(0);

  assertEquals("multiplestr", desc.getFunctionName());
  assertEquals(false, desc.isDeterministic());
  assertEquals(TajoDataTypes.Type.TEXT, desc.getReturnType().getType());
  assertEquals(TajoDataTypes.Type.TEXT, desc.getParamTypes()[0].getType());
  assertEquals(TajoDataTypes.Type.INT4, desc.getParamTypes()[1].getType());
}
 
Example #4
Source File: HiveFunctionWrapper.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Instantiate a Hive function instance.
 *
 * @return a Hive function instance
 */
public UDFType createFunction() {
	if (instance != null) {
		return instance;
	} else {
		UDFType func = null;
		try {
			func = (UDFType) Thread.currentThread().getContextClassLoader().loadClass(className).newInstance();
		} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
			throw new FlinkHiveUDFException(
				String.format("Failed to create function from %s", className), e);
		}

		if (!(func instanceof UDF)) {
			// We cache the function if it is not the Simple UDF,
			// as we always have to create new instance for Simple UDF.
			instance = func;
		}

		return func;
	}
}
 
Example #5
Source File: HiveUDFImplementor.java    From marble with Apache License 2.0 5 votes vote down vote up
private MemberDeclaration generateUdfInstanceDeclaration(String opName,
    SqlSyntax syntax, String fieldName) {
  try {
    if (opName.equals("NOT RLIKE")) {
      //we use a RexImpTable.NotImplementor to wrapper a HiveUDFImplementor ,
      // so `NOT RLIKE` and `RLIKE` would be treated as same here
      opName = "RLIKE";
    }
    if (opName.equals("NOT REGEXP")) {
      opName = "REGEXP";
    }
    Class hiveUDFClazz = HiveSqlOperatorTable.instance()
        .getHiveUDFClass(opName, syntax);
    Expression newUdfExpr;
    if (GenericUDF.class.isAssignableFrom(hiveUDFClazz)) {
      newUdfExpr = Expressions.new_(hiveUDFClazz.getConstructor());
    } else if (UDF.class.isAssignableFrom(hiveUDFClazz)) {
      newUdfExpr = Expressions.new_(GENERIC_UDF_BRIDGE_CONSTRUCTOR
          , new ConstantExpression(String.class, opName)
          , new ConstantExpression(boolean.class, false)
          , new ConstantExpression(String.class, hiveUDFClazz.getName()));
    } else {
      throw new IllegalArgumentException("unknown hive udf class for opName="
          + opName
          + ",and syntax="
          + syntax);
    }
    MemberDeclaration udfMemberDeclaration = Expressions.fieldDecl(
        Modifier.PUBLIC,
        Expressions.parameter(GenericUDF.class, fieldName),
        newUdfExpr);
    return udfMemberDeclaration;
  } catch (NoSuchMethodException e) {
    throw new RuntimeException("fail to new instance for op name " + opName, e);
  }
}
 
Example #6
Source File: HiveUDFImplementor.java    From marble with Apache License 2.0 5 votes vote down vote up
public static GenericUDF newGenericUDF(String opName,
    SqlSyntax syntax) {
  if (opName.equals("NOT RLIKE")) {
    //we use a RexImpTable.NotImplementor to wrapper a HiveUDFImplementor ,
    // so `NOT RLIKE` and `RLIKE` would be treated as same here
    opName = "RLIKE";
  }
  if (opName.equals("NOT REGEXP")) {
    opName = "REGEXP";
  }
  Class hiveUDFClazz = HiveSqlOperatorTable.instance()
      .getHiveUDFClass(opName, syntax);
  if (GenericUDF.class.isAssignableFrom(hiveUDFClazz)) {
    try {
      return (GenericUDF) hiveUDFClazz.newInstance();
    } catch (InstantiationException | IllegalAccessException e) {
      throw new RuntimeException(
          "fail to new instance for class " + hiveUDFClazz, e);
    }
  } else if (UDF.class.isAssignableFrom(hiveUDFClazz)) {
    return new GenericUDFBridge(opName, false, hiveUDFClazz.getName());
  } else {
    throw new IllegalArgumentException("unknown hive udf class for opName="
        + opName
        + ",and syntax="
        + syntax);
  }
}
 
Example #7
Source File: CreateFunctions.java    From emodb with Apache License 2.0 5 votes vote down vote up
@Override
protected void generateScript(Namespace namespace, PrintStream out) {
    for (Map.Entry<String, Class<? extends UDF>> entry : EmoFunctions.ALL_FUNCTIONS.entrySet()) {
        String fcnName = entry.getKey();
        Class<? extends UDF> fcnClass = entry.getValue();

        out.println(format(CREATE_FUNCTION_COMMAND, fcnName, fcnClass.getName()));
        out.println();
    }
}
 
Example #8
Source File: HiveSimpleUDF.java    From flink with Apache License 2.0 4 votes vote down vote up
public HiveSimpleUDF(HiveFunctionWrapper<UDF> hiveFunctionWrapper) {
	super(hiveFunctionWrapper);
	LOG.info("Creating HiveSimpleUDF from '{}'", this.hiveFunctionWrapper.getClassName());
}
 
Example #9
Source File: HiveTableFactory.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public FunctionDefinition createFunctionDefinition(String name, CatalogFunction catalogFunction) {
	String functionClassName = catalogFunction.getClassName();

	if (Boolean.valueOf(catalogFunction.getProperties().get(CatalogConfig.IS_GENERIC))) {
		throw new TableException(
			String.format("HiveFunctionDefinitionFactory does not support generic functions %s yet", name));
	}

	Class clazz;
	try {
		clazz = Thread.currentThread().getContextClassLoader().loadClass(functionClassName);

		LOG.info("Successfully loaded Hive udf '{}' with class '{}'", name, functionClassName);
	} catch (ClassNotFoundException e) {
		throw new TableException(
			String.format("Failed to initiate an instance of class %s.", functionClassName), e);
	}

	if (UDF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveSimpleUDF", name);

		return new ScalarFunctionDefinition(
			name,
			new HiveSimpleUDF(new HiveFunctionWrapper<>(functionClassName))
		);
	} else if (GenericUDF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveGenericUDF", name);

		return new ScalarFunctionDefinition(
			name,
			new HiveGenericUDF(new HiveFunctionWrapper<>(functionClassName))
		);
	} else if (GenericUDTF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveGenericUDTF", name);

		HiveGenericUDTF udtf = new HiveGenericUDTF(new HiveFunctionWrapper<>(functionClassName));

		return new TableFunctionDefinition(
			name,
			udtf,
			GenericTypeInfo.of(Row.class)
		);
	} else if (GenericUDAFResolver2.class.isAssignableFrom(clazz) || UDAF.class.isAssignableFrom(clazz)) {
		HiveGenericUDAF udaf;

		if (GenericUDAFResolver2.class.isAssignableFrom(clazz)) {
			LOG.info(
				"Transforming Hive function '{}' into a HiveGenericUDAF with no UDAF bridging and Hive version %s",
				name, hiveVersion);

			udaf = new HiveGenericUDAF(new HiveFunctionWrapper<>(functionClassName), false, hiveVersion);
		} else {
			LOG.info(
				"Transforming Hive function '{}' into a HiveGenericUDAF with UDAF bridging and Hive version %s",
				name, hiveVersion);

			udaf = new HiveGenericUDAF(new HiveFunctionWrapper<>(functionClassName), true, hiveVersion);
		}

		return new AggregateFunctionDefinition(
			name,
			udaf,
			GenericTypeInfo.of(Object.class),
			GenericTypeInfo.of(GenericUDAFEvaluator.AggregationBuffer.class)
		);
	} else {
		throw new IllegalArgumentException(
			String.format("HiveFunctionDefinitionFactory cannot initiate FunctionDefinition for class %s", functionClassName));
	}
}
 
Example #10
Source File: HiveExprFactory.java    From multiple-dimension-spread with Apache License 2.0 4 votes vote down vote up
public static IHiveExprNode getFromUdfClassName( final Class<? extends UDF> udf , final List<ExprNodeDesc> childNodeDesc ){
  if( UDFLike.class.getName() == udf.getName() ){
    return new RegexpHiveExpr( childNodeDesc );
  }
  return new UnsupportHiveExpr();  
}
 
Example #11
Source File: HiveFunctionLoader.java    From tajo with Apache License 2.0 4 votes vote down vote up
static void buildFunctionsFromUDF(Set<Class<? extends UDF>> classes, List<FunctionDesc> list, String jarurl) {
  for (Class<? extends UDF> clazz: classes) {
    String [] names;
    String value = null, extended = null;

    Description desc = clazz.getAnnotation(Description.class);

    // Check @Description annotation (if exists)
    if (desc != null) {
      names = desc.name().split(",");
      for (int i=0; i<names.length; i++) {
        names[i] = names[i].trim();
      }

      value = desc.value();
      extended = desc.extended();
    }
    else {
      names = new String [] {clazz.getName().replace('.','_')};
    }

    // actual function descriptor building
    FunctionDescBuilder builder = new FunctionDescBuilder();

    UDFType type = clazz.getDeclaredAnnotation(UDFType.class);
    if (type != null) {
      builder.setDeterministic(type.deterministic());
    }

    builder.setFunctionType(CatalogProtos.FunctionType.UDF);

    if (value != null) {
      builder.setDescription(value);
    }

    if (extended != null) {
      builder.setExample(extended);
    }

    UDFInvocationDesc udfInvocation = new UDFInvocationDesc(CatalogProtos.UDFtype.HIVE, clazz.getName(), jarurl, true);

    // verify 'evaluate' method and extract return type and parameter types
    for (Method method: clazz.getMethods()) {
      if (method.getName().equals("evaluate")) {
        registerMethod(method, names, udfInvocation, builder, list);
      }
    }
  }
}
 
Example #12
Source File: HiveFunctionDefinitionFactory.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Create a FunctionDefinition from a Hive function's class name.
 * Called directly by {@link org.apache.flink.table.module.hive.HiveModule}.
 */
public FunctionDefinition createFunctionDefinitionFromHiveFunction(String name, String functionClassName) {
	Class clazz;
	try {
		clazz = Thread.currentThread().getContextClassLoader().loadClass(functionClassName);

		LOG.info("Successfully loaded Hive udf '{}' with class '{}'", name, functionClassName);
	} catch (ClassNotFoundException e) {
		throw new TableException(
			String.format("Failed to initiate an instance of class %s.", functionClassName), e);
	}

	if (UDF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveSimpleUDF", name);

		return new ScalarFunctionDefinition(
			name,
			new HiveSimpleUDF(new HiveFunctionWrapper<>(functionClassName), hiveShim)
		);
	} else if (GenericUDF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveGenericUDF", name);

		return new ScalarFunctionDefinition(
			name,
			new HiveGenericUDF(new HiveFunctionWrapper<>(functionClassName), hiveShim)
		);
	} else if (GenericUDTF.class.isAssignableFrom(clazz)) {
		LOG.info("Transforming Hive function '{}' into a HiveGenericUDTF", name);

		HiveGenericUDTF udtf = new HiveGenericUDTF(new HiveFunctionWrapper<>(functionClassName), hiveShim);

		return new TableFunctionDefinition(
			name,
			udtf,
			GenericTypeInfo.of(Row.class)
		);
	} else if (GenericUDAFResolver2.class.isAssignableFrom(clazz) || UDAF.class.isAssignableFrom(clazz)) {
		HiveGenericUDAF udaf;

		if (GenericUDAFResolver2.class.isAssignableFrom(clazz)) {
			LOG.info(
				"Transforming Hive function '{}' into a HiveGenericUDAF without UDAF bridging", name);

			udaf = new HiveGenericUDAF(new HiveFunctionWrapper<>(functionClassName), false, hiveShim);
		} else {
			LOG.info(
				"Transforming Hive function '{}' into a HiveGenericUDAF with UDAF bridging", name);

			udaf = new HiveGenericUDAF(new HiveFunctionWrapper<>(functionClassName), true, hiveShim);
		}

		return new AggregateFunctionDefinition(
			name,
			udaf,
			GenericTypeInfo.of(Object.class),
			GenericTypeInfo.of(GenericUDAFEvaluator.AggregationBuffer.class)
		);
	} else {
		throw new IllegalArgumentException(
			String.format("HiveFunctionDefinitionFactory cannot initiate FunctionDefinition for class %s", functionClassName));
	}
}
 
Example #13
Source File: HiveSimpleUDF.java    From flink with Apache License 2.0 4 votes vote down vote up
public HiveSimpleUDF(HiveFunctionWrapper<UDF> hiveFunctionWrapper, HiveShim hiveShim) {
	super(hiveFunctionWrapper);
	this.hiveShim = hiveShim;
	LOG.info("Creating HiveSimpleUDF from '{}'", this.hiveFunctionWrapper.getClassName());
}
 
Example #14
Source File: HiveFuncHolder.java    From dremio-oss with Apache License 2.0 3 votes vote down vote up
/**
 * Create holder for UDF
 * @param udfName name of the UDF class
 * @param udfClazz UDF implementation class
 * @param argTypes
 * @param returnOI
 * @param returnType
 */
public HiveFuncHolder(String udfName, Class< ? extends UDF> udfClazz, CompleteType[] argTypes,
                      ObjectInspector returnOI, CompleteType returnType, boolean isRandom) {
  this(GenericUDFBridge.class, argTypes, returnOI, returnType, isRandom);
  this.isGenericUDF = false;
  this.udfClazz = udfClazz;
  this.udfName = udfName;
}