org.apache.hadoop.hive.ql.exec.FunctionRegistry Java Examples

The following examples show how to use org.apache.hadoop.hive.ql.exec.FunctionRegistry. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveSimpleUDF.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public Object evalInternal(Object[] args) {
	checkArgument(args.length == conversions.length);

	if (!allIdentityConverter) {
		for (int i = 0; i < args.length; i++) {
			args[i] = conversions[i].toHiveObject(args[i]);
		}
	}

	try {
		Object result = FunctionRegistry.invoke(method, function, conversionHelper.convertIfNecessary(args));
		return HiveInspectors.toFlinkObject(returnInspector, result);
	} catch (HiveException e) {
		throw new FlinkHiveUDFException(e);
	}
}
 
Example #2
Source File: HiveSimpleUDF.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public Object evalInternal(Object[] args) {
	checkArgument(args.length == conversions.length);

	if (!allIdentityConverter) {
		for (int i = 0; i < args.length; i++) {
			args[i] = conversions[i].toHiveObject(args[i]);
		}
	}

	try {
		Object result = FunctionRegistry.invoke(method, function, conversionHelper.convertIfNecessary(args));
		return HiveInspectors.toFlinkObject(returnInspector, result, hiveShim);
	} catch (HiveException e) {
		throw new FlinkHiveUDFException(e);
	}
}
 
Example #3
Source File: HiveAuthzBindingHook.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
public HiveAuthzBindingHook() throws Exception {
  SessionState session = SessionState.get();
  if(session == null) {
    throw new IllegalStateException("Session has not been started");
  }
  // HACK: set a random classname to force the Auth V2 in Hive
  SessionState.get().setAuthorizer(null);

  HiveConf hiveConf = session.getConf();
  if(hiveConf == null) {
    throw new IllegalStateException("Session HiveConf is null");
  }
  authzConf = loadAuthzConf(hiveConf);
  hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);

  String serdeWhiteLists = authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST,
      HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT);
  serdeWhiteList = Arrays.asList(serdeWhiteLists.split(","));
  serdeURIPrivilegesEnabled = authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED,
      HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT);

  FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST);
}
 
Example #4
Source File: ExprBuilder.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 5 votes vote down vote up
private ExprBuilder fn(String name, TypeInfo ti, int args) throws Exception {
  List<ExprNodeDesc> children = new ArrayList<>();
  for (int i = 0; i < args; ++i) {
    children.add(stack.pop());
  }
  stack.push(new ExprNodeGenericFuncDesc(ti, FunctionRegistry.getFunctionInfo(name).getGenericUDF(), children));
  return this;
}
 
Example #5
Source File: HiveShimV120.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Set<String> listBuiltInFunctions() {
	try {
		Method method = FunctionRegistry.class.getMethod("getFunctionNames");
		// getFunctionNames is a static method
		Set<String> names = (Set<String>) method.invoke(null);

		return names.stream()
			.filter(n -> getBuiltInFunctionInfo(n).isPresent())
			.collect(Collectors.toSet());
	} catch (Exception ex) {
		throw new CatalogException("Failed to invoke FunctionRegistry.getFunctionNames()", ex);
	}
}
 
Example #6
Source File: PredicateHandlerTest.java    From accumulo-hive-storage-manager with Apache License 2.0 5 votes vote down vote up
private void setup() {
    FunctionRegistry. getFunctionNames();
    conf = new JobConf();
    conf.set(serdeConstants.LIST_COLUMNS, "field1,rid");
    conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string");
    conf.set(AccumuloSerde.COLUMN_MAPPINGS, "cf|f1,rowID");
}