org.apache.hadoop.hive.metastore.api.FunctionType Java Examples

The following examples show how to use org.apache.hadoop.hive.metastore.api.FunctionType. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CatalogToHiveConverter.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 6 votes vote down vote up
public static Function convertFunction(final String dbName,
                                       final com.amazonaws.services.glue.model.UserDefinedFunction catalogFunction) {
  if (catalogFunction ==  null) {
    return null;
  }
  Function hiveFunction = new Function();
  hiveFunction.setClassName(catalogFunction.getClassName());
  hiveFunction.setCreateTime((int)(catalogFunction.getCreateTime().getTime() / 1000));
  hiveFunction.setDbName(dbName);
  hiveFunction.setFunctionName(catalogFunction.getFunctionName());
  hiveFunction.setFunctionType(FunctionType.JAVA);
  hiveFunction.setOwnerName(catalogFunction.getOwnerName());
  hiveFunction.setOwnerType(convertPrincipalType(com.amazonaws.services.glue.model.PrincipalType.fromValue(catalogFunction.getOwnerType())));
  hiveFunction.setResourceUris(convertResourceUriList(catalogFunction.getResourceUris()));
  return hiveFunction;
}
 
Example #2
Source File: HiveCatalog.java    From flink with Apache License 2.0 6 votes vote down vote up
private static Function instantiateHiveFunction(ObjectPath functionPath, CatalogFunction function) {

		boolean isGeneric = Boolean.valueOf(function.getProperties().get(CatalogConfig.IS_GENERIC));

		// Hive Function does not have properties map
		// thus, use a prefix in class name to distinguish Flink and Hive functions
		String functionClassName = isGeneric ?
			FLINK_FUNCTION_PREFIX + function.getClassName() :
			function.getClassName();

		return new Function(
			// due to https://issues.apache.org/jira/browse/HIVE-22053, we have to normalize function name ourselves
			HiveStringUtils.normalizeIdentifier(functionPath.getObjectName()),
			functionPath.getDatabaseName(),
			functionClassName,
			null,			// Owner name
			PrincipalType.GROUP,	// Temporarily set to GROUP type because it's required by Hive. May change later
			(int) (System.currentTimeMillis() / 1000),
			FunctionType.JAVA,		// FunctionType only has JAVA now
			new ArrayList<>()		// Resource URIs
		);
	}
 
Example #3
Source File: WaggleDanceIntegrationTest.java    From waggle-dance with Apache License 2.0 5 votes vote down vote up
@Test
public void typicalGetAllFunctions() throws Exception {
  runner = WaggleDanceRunner
      .builder(configLocation)
      .databaseResolution(DatabaseResolution.PREFIXED)
      .primary("primary", localServer.getThriftConnectionUri(), READ_ONLY)
      .withPrimaryPrefix("primary_")
      .federate(SECONDARY_METASTORE_NAME, remoteServer.getThriftConnectionUri(), REMOTE_DATABASE)
      .build();

  runWaggleDance(runner);
  HiveMetaStoreClient proxy = getWaggleDanceClient();
  List<ResourceUri> resourceUris = Lists
      .newArrayList(new ResourceUri(ResourceType.JAR, "hdfs://path/to/my/jar/my.jar"));
  Function localFunction = new Function("fn1", LOCAL_DATABASE, "com.hotels.hive.FN1", "hadoop", PrincipalType.USER, 0,
      FunctionType.JAVA, resourceUris);
  localServer.client().createFunction(localFunction);
  Function remoteFunction = new Function("fn2", REMOTE_DATABASE, "com.hotels.hive.FN1", "hadoop", PrincipalType.USER,
      0, FunctionType.JAVA, resourceUris);
  remoteServer.client().createFunction(remoteFunction);

  GetAllFunctionsResponse allFunctions = proxy.getAllFunctions();
  List<Function> functions = allFunctions.getFunctions();
  assertThat(functions.size(), is(3));
  assertThat(functions.get(0).getFunctionName(), is("fn1"));
  assertThat(functions.get(0).getDbName(), is("primary_" + LOCAL_DATABASE));
  assertThat(functions.get(1).getFunctionName(), is("fn1"));
  assertThat(functions.get(1).getDbName(), is(LOCAL_DATABASE));
  assertThat(functions.get(2).getFunctionName(), is("fn2"));
  assertThat(functions.get(2).getDbName(), is(PREFIXED_REMOTE_DATABASE));
}
 
Example #4
Source File: HiveStubs.java    From waggle-dance with Apache License 2.0 5 votes vote down vote up
public static Function newFunction(String databaseName, String functionName) {
  List<ResourceUri> resourceUris = Lists
      .newArrayList(new ResourceUri(ResourceType.JAR, "hdfs://path/to/my/jar/my.jar"));
  Function function = new Function(functionName, databaseName, "com.hotels.hive.FN", "hadoop", PrincipalType.USER, 0,
      FunctionType.JAVA, resourceUris);
  return function;
}
 
Example #5
Source File: HiveCatalog.java    From flink with Apache License 2.0 5 votes vote down vote up
private static Function instantiateHiveFunction(ObjectPath functionPath, CatalogFunction function) {

		boolean isGeneric = function.isGeneric();

		// Hive Function does not have properties map
		// thus, use a prefix in class name to distinguish Flink and Hive functions
		String functionClassName;
		if (function.getFunctionLanguage().equals(FunctionLanguage.JAVA)) {
			functionClassName = isGeneric ?
				FLINK_FUNCTION_PREFIX + function.getClassName() :
				function.getClassName();
		} else if (function.getFunctionLanguage().equals(FunctionLanguage.PYTHON)) {
			functionClassName = FLINK_PYTHON_FUNCTION_PREFIX + function.getClassName();
		} else {
			throw new UnsupportedOperationException("HiveCatalog supports only creating" +
				" JAVA or PYTHON based function for now");
		}

		return new Function(
			// due to https://issues.apache.org/jira/browse/HIVE-22053, we have to normalize function name ourselves
			functionPath.getObjectName().trim().toLowerCase(),
			functionPath.getDatabaseName(),
			functionClassName,
			null,			// Owner name
			PrincipalType.GROUP,	// Temporarily set to GROUP type because it's required by Hive. May change later
			(int) (System.currentTimeMillis() / 1000),
			FunctionType.JAVA,		// FunctionType only has JAVA now
			new ArrayList<>()		// Resource URIs
		);
	}