Java Code Examples for org.apache.flink.table.catalog.hive.client.HiveShimLoader#getHiveVersion()

The following examples show how to use org.apache.flink.table.catalog.hive.client.HiveShimLoader#getHiveVersion() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveModuleTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testNumberOfBuiltinFunctions() {
	String hiveVersion = HiveShimLoader.getHiveVersion();
	HiveModule hiveModule = new HiveModule(hiveVersion);

	switch (hiveVersion) {
		case HIVE_VERSION_V1_2_0:
			assertEquals(231, hiveModule.listFunctions().size());
			break;
		case HIVE_VERSION_V2_0_0:
			assertEquals(235, hiveModule.listFunctions().size());
			break;
		case HIVE_VERSION_V2_1_1:
			assertEquals(245, hiveModule.listFunctions().size());
			break;
		case HIVE_VERSION_V2_2_0:
			assertEquals(261, hiveModule.listFunctions().size());
			break;
		case HIVE_VERSION_V2_3_4:
			assertEquals(279, hiveModule.listFunctions().size());
			break;
		case HIVE_VERSION_V3_1_1:
			assertEquals(298, hiveModule.listFunctions().size());
			break;
	}
}
 
Example 2
Source File: HiveRunnerShimLoader.java    From flink with Apache License 2.0 5 votes vote down vote up
public static HiveRunnerShim load() {
	String hiveVersion = HiveShimLoader.getHiveVersion();
	return hiveRunnerShims.computeIfAbsent(hiveVersion, v -> {
		switch (v) {
			case HiveShimLoader.HIVE_V1_VERSION_NAME:
				return new HiveRunnerShimV3();
			case HiveShimLoader.HIVE_V2_VERSION_NAME:
				return new HiveRunnerShimV4();
			default:
				throw new RuntimeException("Unsupported Hive version " + v);
		}
	});
}
 
Example 3
Source File: HiveRunnerShimLoader.java    From flink with Apache License 2.0 5 votes vote down vote up
public static HiveRunnerShim load() {
	String hiveVersion = HiveShimLoader.getHiveVersion();
	return hiveRunnerShims.computeIfAbsent(hiveVersion, v -> {
		switch (v) {
			case HiveShimLoader.HIVE_VERSION_V1_0_0:
			case HiveShimLoader.HIVE_VERSION_V1_0_1:
			case HiveShimLoader.HIVE_VERSION_V1_1_0:
			case HiveShimLoader.HIVE_VERSION_V1_1_1:
			case HiveShimLoader.HIVE_VERSION_V1_2_0:
			case HiveShimLoader.HIVE_VERSION_V1_2_1:
			case HiveShimLoader.HIVE_VERSION_V1_2_2:
				return new HiveRunnerShimV3();
			case HiveShimLoader.HIVE_VERSION_V2_0_0:
			case HiveShimLoader.HIVE_VERSION_V2_0_1:
			case HiveShimLoader.HIVE_VERSION_V2_1_0:
			case HiveShimLoader.HIVE_VERSION_V2_1_1:
			case HiveShimLoader.HIVE_VERSION_V2_2_0:
			case HiveShimLoader.HIVE_VERSION_V2_3_0:
			case HiveShimLoader.HIVE_VERSION_V2_3_1:
			case HiveShimLoader.HIVE_VERSION_V2_3_2:
			case HiveShimLoader.HIVE_VERSION_V2_3_3:
			case HiveShimLoader.HIVE_VERSION_V2_3_4:
			case HiveShimLoader.HIVE_VERSION_V2_3_5:
			case HiveShimLoader.HIVE_VERSION_V2_3_6:
			case HiveShimLoader.HIVE_VERSION_V3_1_0:
			case HiveShimLoader.HIVE_VERSION_V3_1_1:
			case HiveShimLoader.HIVE_VERSION_V3_1_2:
				return new HiveRunnerShimV4();
			default:
				throw new RuntimeException("Unsupported Hive version " + v);
		}
	});
}
 
Example 4
Source File: HiveTestUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
public static HiveCatalog createHiveCatalog(String name, String hiveVersion) {
	return new HiveCatalog(name, null, createHiveConf(),
			StringUtils.isNullOrWhitespaceOnly(hiveVersion) ? HiveShimLoader.getHiveVersion() : hiveVersion);
}
 
Example 5
Source File: HiveTestUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
public static HiveCatalog createHiveCatalog(HiveConf hiveConf) {
	return new HiveCatalog(CatalogTest.TEST_CATALOG_NAME, null, hiveConf, HiveShimLoader.getHiveVersion());
}
 
Example 6
Source File: HiveCatalog.java    From flink with Apache License 2.0 4 votes vote down vote up
public HiveCatalog(String catalogName, @Nullable String defaultDatabase, @Nullable String hiveConfDir) {
	this(catalogName, defaultDatabase, hiveConfDir, HiveShimLoader.getHiveVersion());
}
 
Example 7
Source File: HiveModule.java    From flink with Apache License 2.0 4 votes vote down vote up
public HiveModule() {
	this(HiveShimLoader.getHiveVersion());
}
 
Example 8
Source File: HiveTestUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
public static HiveCatalog createHiveCatalog(String name, String hiveVersion) {
	return new HiveCatalog(name, null, createHiveConf(),
			StringUtils.isNullOrWhitespaceOnly(hiveVersion) ? HiveShimLoader.getHiveVersion() : hiveVersion, true);
}
 
Example 9
Source File: HiveTestUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
public static HiveCatalog createHiveCatalog(HiveConf hiveConf) {
	return new HiveCatalog(CatalogTest.TEST_CATALOG_NAME, null, hiveConf, HiveShimLoader.getHiveVersion(), true);
}
 
Example 10
Source File: TableEnvHiveConnectorITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testUDTF() throws Exception {
	// W/o https://issues.apache.org/jira/browse/HIVE-11878 Hive registers the App classloader as the classloader
	// for the UDTF and closes the App classloader when we tear down the session. This causes problems for JUnit code
	// and shutdown hooks that have to run after the test finishes, because App classloader can no longer load new
	// classes. And will crash the forked JVM, thus failing the test phase.
	// Therefore disable such tests for older Hive versions.
	String hiveVersion = HiveShimLoader.getHiveVersion();
	Assume.assumeTrue(hiveVersion.compareTo("2.0.0") >= 0 || hiveVersion.compareTo("1.3.0") >= 0);
	TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
	tableEnv.executeSql("create database db1");
	try {
		tableEnv.executeSql("create table db1.simple (i int,a array<int>)");
		tableEnv.executeSql("create table db1.nested (a array<map<int, string>>)");
		tableEnv.executeSql("create function hiveudtf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode'");
		hiveShell.insertInto("db1", "simple").addRow(3, Arrays.asList(1, 2, 3)).commit();
		Map<Integer, String> map1 = new HashMap<>();
		map1.put(1, "a");
		map1.put(2, "b");
		Map<Integer, String> map2 = new HashMap<>();
		map2.put(3, "c");
		hiveShell.insertInto("db1", "nested").addRow(Arrays.asList(map1, map2)).commit();

		List<Row> results = Lists.newArrayList(
				tableEnv.sqlQuery("select x from db1.simple, lateral table(hiveudtf(a)) as T(x)").execute().collect());
		assertEquals("[1, 2, 3]", results.toString());
		results = Lists.newArrayList(
				tableEnv.sqlQuery("select x from db1.nested, lateral table(hiveudtf(a)) as T(x)").execute().collect());
		assertEquals("[{1=a, 2=b}, {3=c}]", results.toString());

		tableEnv.executeSql("create table db1.ts (a array<timestamp>)");
		HiveTestUtils.createTextTableInserter(hiveShell, "db1", "ts").addRow(new Object[]{
				new Object[]{Timestamp.valueOf("2015-04-28 15:23:00"), Timestamp.valueOf("2016-06-03 17:05:52")}})
				.commit();
		results = Lists.newArrayList(
				tableEnv.sqlQuery("select x from db1.ts, lateral table(hiveudtf(a)) as T(x)").execute().collect());
		assertEquals("[2015-04-28T15:23, 2016-06-03T17:05:52]", results.toString());
	} finally {
		tableEnv.executeSql("drop database db1 cascade");
		tableEnv.executeSql("drop function hiveudtf");
	}
}
 
Example 11
Source File: HiveGenericUDAFTest.java    From flink with Apache License 2.0 3 votes vote down vote up
private static HiveGenericUDAF init(Class hiveUdfClass, Object[] constantArgs, DataType[] argTypes) throws Exception {
	HiveFunctionWrapper<GenericUDAFResolver2> wrapper = new HiveFunctionWrapper(hiveUdfClass.getName());

	HiveGenericUDAF udf = new HiveGenericUDAF(wrapper, HiveShimLoader.getHiveVersion());

	udf.setArgumentTypesAndConstants(constantArgs, argTypes);
	udf.getHiveResultType(constantArgs, argTypes);

	udf.open(null);

	return udf;
}