org.apache.flink.table.catalog.hive.client.HiveShimLoader Java Examples

The following examples show how to use org.apache.flink.table.catalog.hive.client.HiveShimLoader. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveOutputFormatFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCreateOutputFormat() {
	TableSchema schema = TableSchema.builder().field("x", DataTypes.INT()).build();
	SerDeInfo serDeInfo = new SerDeInfo("name", LazySimpleSerDe.class.getName(), Collections.emptyMap());
	HiveWriterFactory writerFactory = new HiveWriterFactory(
			new JobConf(),
			VerifyURIOutputFormat.class,
			serDeInfo, schema,
			new String[0],
			new Properties(),
			HiveShimLoader.loadHiveShim(HiveShimLoader.getHiveVersion()),
			false);
	HiveOutputFormatFactory factory = new HiveOutputFormatFactory(writerFactory);
	org.apache.flink.core.fs.Path path = new org.apache.flink.core.fs.Path(TEST_URI_SCHEME, TEST_URI_AUTHORITY, "/foo/path");
	factory.createOutputFormat(path);
}
 
Example #2
Source File: HiveModuleTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testNumberOfBuiltinFunctions() {
	String hiveVersion = HiveShimLoader.getHiveVersion();
	HiveModule hiveModule = new HiveModule(hiveVersion);

	switch (hiveVersion) {
		case HIVE_VERSION_V1_2_0:
			assertEquals(231, hiveModule.listFunctions().size());
			break;
		case HIVE_VERSION_V2_0_0:
			assertEquals(235, hiveModule.listFunctions().size());
			break;
		case HIVE_VERSION_V2_1_1:
			assertEquals(245, hiveModule.listFunctions().size());
			break;
		case HIVE_VERSION_V2_2_0:
			assertEquals(261, hiveModule.listFunctions().size());
			break;
		case HIVE_VERSION_V2_3_4:
			assertEquals(279, hiveModule.listFunctions().size());
			break;
		case HIVE_VERSION_V3_1_1:
			assertEquals(298, hiveModule.listFunctions().size());
			break;
	}
}
 
Example #3
Source File: HiveBatchSource.java    From Alink with Apache License 2.0 6 votes vote down vote up
private HiveBatchSource(JobConf jobConf, ObjectPath tablePath, CatalogTable catalogTable,
                        List<Map<String, String>> remainingPartitions,
                        String hiveVersion,
                        boolean partitionPruned,
                        int[] projectedFields,
                        boolean isLimitPushDown,
                        long limit) {
    this.jobConf = Preconditions.checkNotNull(jobConf);
    this.tablePath = Preconditions.checkNotNull(tablePath);
    this.catalogTable = Preconditions.checkNotNull(catalogTable);
    this.remainingPartitions = remainingPartitions;
    this.hiveVersion = hiveVersion;
    hiveShim = HiveShimLoader.loadHiveShim(hiveVersion);
    this.partitionPruned = partitionPruned;
    this.projectedFields = projectedFields;
    this.isLimitPushDown = isLimitPushDown;
    this.limit = limit;
}
 
Example #4
Source File: HiveTableSource.java    From flink with Apache License 2.0 6 votes vote down vote up
private HiveTableSource(
		JobConf jobConf,
		ReadableConfig flinkConf,
		ObjectPath tablePath,
		CatalogTable catalogTable,
		List<Map<String, String>> remainingPartitions,
		String hiveVersion,
		boolean partitionPruned,
		int[] projectedFields,
		boolean isLimitPushDown,
		long limit) {
	this.jobConf = Preconditions.checkNotNull(jobConf);
	this.flinkConf = Preconditions.checkNotNull(flinkConf);
	this.tablePath = Preconditions.checkNotNull(tablePath);
	this.catalogTable = Preconditions.checkNotNull(catalogTable);
	this.remainingPartitions = remainingPartitions;
	this.hiveVersion = hiveVersion;
	hiveShim = HiveShimLoader.loadHiveShim(hiveVersion);
	this.partitionPruned = partitionPruned;
	this.projectedFields = projectedFields;
	this.isLimitPushDown = isLimitPushDown;
	this.limit = limit;
}
 
Example #5
Source File: HiveCatalog.java    From flink with Apache License 2.0 6 votes vote down vote up
@VisibleForTesting
protected HiveCatalog(String catalogName, String defaultDatabase, @Nullable HiveConf hiveConf, String hiveVersion,
		boolean allowEmbedded) {
	super(catalogName, defaultDatabase == null ? DEFAULT_DB : defaultDatabase);

	this.hiveConf = hiveConf == null ? createHiveConf(null) : hiveConf;
	if (!allowEmbedded) {
		checkArgument(!StringUtils.isNullOrWhitespaceOnly(this.hiveConf.getVar(HiveConf.ConfVars.METASTOREURIS)),
				"Embedded metastore is not allowed. Make sure you have set a valid value for " +
						HiveConf.ConfVars.METASTOREURIS.toString());
	}
	checkArgument(!StringUtils.isNullOrWhitespaceOnly(hiveVersion), "hiveVersion cannot be null or empty");
	this.hiveVersion = hiveVersion;
	hiveShim = HiveShimLoader.loadHiveShim(hiveVersion);
	// add this to hiveConf to make sure table factory and source/sink see the same Hive version as HiveCatalog
	this.hiveConf.set(HiveCatalogValidator.CATALOG_HIVE_VERSION, hiveVersion);

	LOG.info("Created HiveCatalog '{}'", catalogName);
}
 
Example #6
Source File: HiveTableInputFormat.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void open(HiveTableInputSplit split) throws IOException {
	HiveTablePartition partition = split.getHiveTablePartition();
	if (!useMapRedReader && useOrcVectorizedRead(partition)) {
		this.reader = new HiveVectorizedOrcSplitReader(
				hiveVersion, jobConf, fieldNames, fieldTypes, selectedFields, split);
	} else if (!useMapRedReader && useParquetVectorizedRead(partition)) {
		this.reader = new HiveVectorizedParquetSplitReader(
				hiveVersion, jobConf, fieldNames, fieldTypes, selectedFields, split);
	} else {
		JobConf clonedConf = new JobConf(jobConf);
		addSchemaToConf(clonedConf);
		this.reader = new HiveMapredSplitReader(clonedConf, partitionKeys, fieldTypes, selectedFields, split,
				HiveShimLoader.loadHiveShim(hiveVersion));
	}
	currentReadCount = 0L;
}
 
Example #7
Source File: HiveModuleFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Module createModule(Map<String, String> properties) {
	final DescriptorProperties descProperties = getValidatedProperties(properties);

	final String hiveVersion = descProperties.getOptionalString(MODULE_HIVE_VERSION)
		.orElse(HiveShimLoader.getHiveVersion());

	return new HiveModule(hiveVersion);
}
 
Example #8
Source File: HiveRunnerShimLoader.java    From flink with Apache License 2.0 5 votes vote down vote up
public static HiveRunnerShim load() {
	String hiveVersion = HiveShimLoader.getHiveVersion();
	return hiveRunnerShims.computeIfAbsent(hiveVersion, v -> {
		switch (v) {
			case HiveShimLoader.HIVE_VERSION_V1_0_0:
			case HiveShimLoader.HIVE_VERSION_V1_0_1:
			case HiveShimLoader.HIVE_VERSION_V1_1_0:
			case HiveShimLoader.HIVE_VERSION_V1_1_1:
			case HiveShimLoader.HIVE_VERSION_V1_2_0:
			case HiveShimLoader.HIVE_VERSION_V1_2_1:
			case HiveShimLoader.HIVE_VERSION_V1_2_2:
				return new HiveRunnerShimV3();
			case HiveShimLoader.HIVE_VERSION_V2_0_0:
			case HiveShimLoader.HIVE_VERSION_V2_0_1:
			case HiveShimLoader.HIVE_VERSION_V2_1_0:
			case HiveShimLoader.HIVE_VERSION_V2_1_1:
			case HiveShimLoader.HIVE_VERSION_V2_2_0:
			case HiveShimLoader.HIVE_VERSION_V2_3_0:
			case HiveShimLoader.HIVE_VERSION_V2_3_1:
			case HiveShimLoader.HIVE_VERSION_V2_3_2:
			case HiveShimLoader.HIVE_VERSION_V2_3_3:
			case HiveShimLoader.HIVE_VERSION_V2_3_4:
			case HiveShimLoader.HIVE_VERSION_V2_3_5:
			case HiveShimLoader.HIVE_VERSION_V2_3_6:
			case HiveShimLoader.HIVE_VERSION_V3_1_0:
			case HiveShimLoader.HIVE_VERSION_V3_1_1:
			case HiveShimLoader.HIVE_VERSION_V3_1_2:
				return new HiveRunnerShimV4();
			default:
				throw new RuntimeException("Unsupported Hive version " + v);
		}
	});
}
 
Example #9
Source File: TableEnvHiveConnectorITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testDifferentFormats() throws Exception {
	String[] formats = new String[]{"orc", "parquet", "sequencefile", "csv", "avro"};
	for (String format : formats) {
		if (format.equals("orc") && HiveShimLoader.getHiveVersion().startsWith("2.0")) {
			// Ignore orc test for Hive version 2.0.x for now due to FLINK-13998
			continue;
		} else if (format.equals("avro") && !HiveVersionTestUtil.HIVE_110_OR_LATER) {
			// timestamp is not supported for avro tables before 1.1.0
			continue;
		}
		readWriteFormat(format);
	}
}
 
Example #10
Source File: TableEnvHiveConnectorITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setup() {
	HiveConf hiveConf = hiveShell.getHiveConf();
	hiveCatalog = HiveTestUtils.createHiveCatalog(hiveConf);
	hiveCatalog.open();
	hmsClient = HiveMetastoreClientFactory.create(hiveConf, HiveShimLoader.getHiveVersion());
}
 
Example #11
Source File: HiveCatalogHiveMetadataTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testAlterTableColumnStatistics() throws Exception {
	String hiveVersion = ((HiveCatalog) catalog).getHiveVersion();
	boolean supportDateStats = hiveVersion.compareTo(HiveShimLoader.HIVE_VERSION_V1_2_0) >= 0;
	catalog.createDatabase(db1, createDb(), false);
	TableSchema.Builder builder = TableSchema.builder()
			.field("first", DataTypes.STRING())
			.field("second", DataTypes.INT())
			.field("third", DataTypes.BOOLEAN())
			.field("fourth", DataTypes.DOUBLE())
			.field("fifth", DataTypes.BIGINT())
			.field("sixth", DataTypes.BYTES())
			.field("seventh", DataTypes.DECIMAL(10, 3))
			.field("eighth", DataTypes.DECIMAL(30, 3));
	if (supportDateStats) {
		builder.field("ninth", DataTypes.DATE());
	}
	TableSchema tableSchema = builder.build();
	CatalogTable catalogTable = new CatalogTableImpl(tableSchema, getBatchTableProperties(), TEST_COMMENT);
	catalog.createTable(path1, catalogTable, false);
	Map<String, CatalogColumnStatisticsDataBase> columnStatisticsDataBaseMap = new HashMap<>();
	columnStatisticsDataBaseMap.put("first", new CatalogColumnStatisticsDataString(10L, 5.2, 3L, 100L));
	columnStatisticsDataBaseMap.put("second", new CatalogColumnStatisticsDataLong(0L, 1000L, 3L, 0L));
	columnStatisticsDataBaseMap.put("third", new CatalogColumnStatisticsDataBoolean(15L, 20L, 3L));
	columnStatisticsDataBaseMap.put("fourth", new CatalogColumnStatisticsDataDouble(15.02, 20.01, 3L, 10L));
	columnStatisticsDataBaseMap.put("fifth", new CatalogColumnStatisticsDataLong(0L, 20L, 3L, 2L));
	columnStatisticsDataBaseMap.put("sixth", new CatalogColumnStatisticsDataBinary(150L, 20D, 3L));
	columnStatisticsDataBaseMap.put("seventh", new CatalogColumnStatisticsDataDouble(1.23, 99.456, 100L, 0L));
	columnStatisticsDataBaseMap.put("eighth", new CatalogColumnStatisticsDataDouble(0.123, 123456.789, 5723L, 19L));
	if (supportDateStats) {
		columnStatisticsDataBaseMap.put("ninth", new CatalogColumnStatisticsDataDate(
				new Date(71L), new Date(17923L), 132L, 0L));
	}
	CatalogColumnStatistics catalogColumnStatistics = new CatalogColumnStatistics(columnStatisticsDataBaseMap);
	catalog.alterTableColumnStatistics(path1, catalogColumnStatistics, false);

	checkEquals(catalogColumnStatistics, catalog.getTableColumnStatistics(path1));
}
 
Example #12
Source File: HiveTableSource.java    From flink with Apache License 2.0 5 votes vote down vote up
public HiveTableSource(
		JobConf jobConf, ReadableConfig flinkConf, ObjectPath tablePath, CatalogTable catalogTable) {
	this.jobConf = Preconditions.checkNotNull(jobConf);
	this.flinkConf = Preconditions.checkNotNull(flinkConf);
	this.tablePath = Preconditions.checkNotNull(tablePath);
	this.catalogTable = Preconditions.checkNotNull(catalogTable);
	this.hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION),
			"Hive version is not defined");
	hiveShim = HiveShimLoader.loadHiveShim(hiveVersion);
	partitionPruned = false;
}
 
Example #13
Source File: HiveTableSink.java    From flink with Apache License 2.0 5 votes vote down vote up
public HiveTableSink(
		boolean userMrWriter, boolean isBounded, JobConf jobConf, ObjectIdentifier identifier, CatalogTable table) {
	this.userMrWriter = userMrWriter;
	this.isBounded = isBounded;
	this.jobConf = jobConf;
	this.identifier = identifier;
	this.catalogTable = table;
	hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION),
			"Hive version is not defined");
	hiveShim = HiveShimLoader.loadHiveShim(hiveVersion);
	tableSchema = TableSchemaUtils.getPhysicalSchema(table.getSchema());
}
 
Example #14
Source File: HiveCatalogFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	final String defaultDatabase =
		descriptorProperties.getOptionalString(CATALOG_DEFAULT_DATABASE)
			.orElse(HiveCatalog.DEFAULT_DB);

	final Optional<String> hiveConfDir = descriptorProperties.getOptionalString(CATALOG_HIVE_CONF_DIR);

	final String version = descriptorProperties.getOptionalString(CATALOG_HIVE_VERSION).orElse(HiveShimLoader.getHiveVersion());

	return new HiveCatalog(name, defaultDatabase, hiveConfDir.orElse(null), version);
}
 
Example #15
Source File: HiveModule.java    From flink with Apache License 2.0 5 votes vote down vote up
public HiveModule(String hiveVersion) {
	checkArgument(!StringUtils.isNullOrWhitespaceOnly(hiveVersion), "hiveVersion cannot be null");

	this.hiveVersion = hiveVersion;
	this.hiveShim = HiveShimLoader.loadHiveShim(hiveVersion);
	this.factory = new HiveFunctionDefinitionFactory(hiveShim);
}
 
Example #16
Source File: HiveCatalogFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	final DescriptorProperties descriptorProperties = getValidatedProperties(properties);

	final String defaultDatabase =
		descriptorProperties.getOptionalString(CATALOG_DEFAULT_DATABASE)
			.orElse(HiveCatalog.DEFAULT_DB);

	final Optional<String> hiveConfDir = descriptorProperties.getOptionalString(CATALOG_HIVE_CONF_DIR);

	final String version = descriptorProperties.getOptionalString(CATALOG_HIVE_VERSION).orElse(HiveShimLoader.getHiveVersion());

	return new HiveCatalog(name, defaultDatabase, hiveConfDir.orElse(null), version);
}
 
Example #17
Source File: HiveBatchSource.java    From Alink with Apache License 2.0 5 votes vote down vote up
public HiveBatchSource(JobConf jobConf, ObjectPath tablePath, CatalogTable catalogTable) {
    this.jobConf = Preconditions.checkNotNull(jobConf);
    this.tablePath = Preconditions.checkNotNull(tablePath);
    this.catalogTable = Preconditions.checkNotNull(catalogTable);
    this.hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION),
        "Hive version is not defined");
    hiveShim = HiveShimLoader.loadHiveShim(hiveVersion);
    partitionPruned = false;
}
 
Example #18
Source File: HiveRunnerShimLoader.java    From flink with Apache License 2.0 5 votes vote down vote up
public static HiveRunnerShim load() {
	String hiveVersion = HiveShimLoader.getHiveVersion();
	return hiveRunnerShims.computeIfAbsent(hiveVersion, v -> {
		switch (v) {
			case HiveShimLoader.HIVE_V1_VERSION_NAME:
				return new HiveRunnerShimV3();
			case HiveShimLoader.HIVE_V2_VERSION_NAME:
				return new HiveRunnerShimV4();
			default:
				throw new RuntimeException("Unsupported Hive version " + v);
		}
	});
}
 
Example #19
Source File: TableEnvHiveConnectorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void setup() {
	HiveConf hiveConf = hiveShell.getHiveConf();
	hiveCatalog = HiveTestUtils.createHiveCatalog(hiveConf);
	hiveCatalog.open();
	hmsClient = HiveMetastoreClientFactory.create(hiveConf, HiveShimLoader.getHiveVersion());
}
 
Example #20
Source File: HiveModule.java    From flink with Apache License 2.0 4 votes vote down vote up
public HiveModule() {
	this(HiveShimLoader.getHiveVersion());
}
 
Example #21
Source File: HiveStatsUtil.java    From flink with Apache License 2.0 4 votes vote down vote up
/**
 * Create Flink ColumnStats from Hive ColumnStatisticsData.
 */
private static CatalogColumnStatisticsDataBase createTableColumnStats(DataType colType, ColumnStatisticsData stats, String hiveVersion) {
	HiveShim hiveShim = HiveShimLoader.loadHiveShim(hiveVersion);
	if (stats.isSetBinaryStats()) {
		BinaryColumnStatsData binaryStats = stats.getBinaryStats();
		return new CatalogColumnStatisticsDataBinary(
				binaryStats.isSetMaxColLen() ? binaryStats.getMaxColLen() : null,
				binaryStats.isSetAvgColLen() ? binaryStats.getAvgColLen() : null,
				binaryStats.isSetNumNulls() ? binaryStats.getNumNulls() : null);
	} else if (stats.isSetBooleanStats()) {
		BooleanColumnStatsData booleanStats = stats.getBooleanStats();
		return new CatalogColumnStatisticsDataBoolean(
				booleanStats.isSetNumTrues() ? booleanStats.getNumTrues() : null,
				booleanStats.isSetNumFalses() ? booleanStats.getNumFalses() : null,
				booleanStats.isSetNumNulls() ? booleanStats.getNumNulls() : null);
	} else if (hiveShim.isDateStats(stats)) {
		return hiveShim.toFlinkDateColStats(stats);
	} else if (stats.isSetDoubleStats()) {
			DoubleColumnStatsData doubleStats = stats.getDoubleStats();
			return new CatalogColumnStatisticsDataDouble(
					doubleStats.isSetLowValue() ? doubleStats.getLowValue() : null,
					doubleStats.isSetHighValue() ? doubleStats.getHighValue() : null,
					doubleStats.isSetNumDVs() ? doubleStats.getNumDVs() : null,
					doubleStats.isSetNumNulls() ? doubleStats.getNumNulls() : null);
	} else if (stats.isSetLongStats()) {
			LongColumnStatsData longColStats = stats.getLongStats();
			return new CatalogColumnStatisticsDataLong(
					longColStats.isSetLowValue() ? longColStats.getLowValue() : null,
					longColStats.isSetHighValue() ? longColStats.getHighValue() : null,
					longColStats.isSetNumDVs() ? longColStats.getNumDVs() : null,
					longColStats.isSetNumNulls() ? longColStats.getNumNulls() : null);
	} else if (stats.isSetStringStats()) {
		StringColumnStatsData stringStats = stats.getStringStats();
		return new CatalogColumnStatisticsDataString(
				stringStats.isSetMaxColLen() ? stringStats.getMaxColLen() : null,
				stringStats.isSetAvgColLen() ? stringStats.getAvgColLen() : null,
				stringStats.isSetNumDVs() ? stringStats.getNumDVs() : null,
				stringStats.isSetNumDVs() ? stringStats.getNumNulls() : null);
	} else if (stats.isSetDecimalStats()) {
		DecimalColumnStatsData decimalStats = stats.getDecimalStats();
		// for now, just return CatalogColumnStatisticsDataDouble for decimal columns
		Double max = null;
		if (decimalStats.isSetHighValue()) {
			max = toHiveDecimal(decimalStats.getHighValue()).doubleValue();
		}
		Double min = null;
		if (decimalStats.isSetLowValue()) {
			min = toHiveDecimal(decimalStats.getLowValue()).doubleValue();
		}
		Long ndv = decimalStats.isSetNumDVs() ? decimalStats.getNumDVs() : null;
		Long nullCount = decimalStats.isSetNumNulls() ? decimalStats.getNumNulls() : null;
		return new CatalogColumnStatisticsDataDouble(min, max, ndv, nullCount);
	} else {
		LOG.warn("Flink does not support converting ColumnStatisticsData '{}' for Hive column type '{}' yet.", stats, colType);
		return null;
	}
}
 
Example #22
Source File: HiveCatalog.java    From flink with Apache License 2.0 4 votes vote down vote up
public HiveCatalog(String catalogName, @Nullable String defaultDatabase, @Nullable String hiveConfDir) {
	this(catalogName, defaultDatabase, hiveConfDir, HiveShimLoader.getHiveVersion());
}
 
Example #23
Source File: HiveTestUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
public static HiveCatalog createHiveCatalog(String name, String hiveVersion) {
	return new HiveCatalog(name, null, createHiveConf(),
			StringUtils.isNullOrWhitespaceOnly(hiveVersion) ? HiveShimLoader.getHiveVersion() : hiveVersion, true);
}
 
Example #24
Source File: HiveTestUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
public static HiveCatalog createHiveCatalog(HiveConf hiveConf) {
	return new HiveCatalog(CatalogTest.TEST_CATALOG_NAME, null, hiveConf, HiveShimLoader.getHiveVersion(), true);
}
 
Example #25
Source File: TableEnvHiveConnectorITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testUDTF() throws Exception {
	// W/o https://issues.apache.org/jira/browse/HIVE-11878 Hive registers the App classloader as the classloader
	// for the UDTF and closes the App classloader when we tear down the session. This causes problems for JUnit code
	// and shutdown hooks that have to run after the test finishes, because App classloader can no longer load new
	// classes. And will crash the forked JVM, thus failing the test phase.
	// Therefore disable such tests for older Hive versions.
	String hiveVersion = HiveShimLoader.getHiveVersion();
	Assume.assumeTrue(hiveVersion.compareTo("2.0.0") >= 0 || hiveVersion.compareTo("1.3.0") >= 0);
	TableEnvironment tableEnv = getTableEnvWithHiveCatalog();
	tableEnv.executeSql("create database db1");
	try {
		tableEnv.executeSql("create table db1.simple (i int,a array<int>)");
		tableEnv.executeSql("create table db1.nested (a array<map<int, string>>)");
		tableEnv.executeSql("create function hiveudtf as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode'");
		hiveShell.insertInto("db1", "simple").addRow(3, Arrays.asList(1, 2, 3)).commit();
		Map<Integer, String> map1 = new HashMap<>();
		map1.put(1, "a");
		map1.put(2, "b");
		Map<Integer, String> map2 = new HashMap<>();
		map2.put(3, "c");
		hiveShell.insertInto("db1", "nested").addRow(Arrays.asList(map1, map2)).commit();

		List<Row> results = Lists.newArrayList(
				tableEnv.sqlQuery("select x from db1.simple, lateral table(hiveudtf(a)) as T(x)").execute().collect());
		assertEquals("[1, 2, 3]", results.toString());
		results = Lists.newArrayList(
				tableEnv.sqlQuery("select x from db1.nested, lateral table(hiveudtf(a)) as T(x)").execute().collect());
		assertEquals("[{1=a, 2=b}, {3=c}]", results.toString());

		tableEnv.executeSql("create table db1.ts (a array<timestamp>)");
		HiveTestUtils.createTextTableInserter(hiveShell, "db1", "ts").addRow(new Object[]{
				new Object[]{Timestamp.valueOf("2015-04-28 15:23:00"), Timestamp.valueOf("2016-06-03 17:05:52")}})
				.commit();
		results = Lists.newArrayList(
				tableEnv.sqlQuery("select x from db1.ts, lateral table(hiveudtf(a)) as T(x)").execute().collect());
		assertEquals("[2015-04-28T15:23, 2016-06-03T17:05:52]", results.toString());
	} finally {
		tableEnv.executeSql("drop database db1 cascade");
		tableEnv.executeSql("drop function hiveudtf");
	}
}
 
Example #26
Source File: HiveTestUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
public static HiveCatalog createHiveCatalog(HiveConf hiveConf) {
	return new HiveCatalog(CatalogTest.TEST_CATALOG_NAME, null, hiveConf, HiveShimLoader.getHiveVersion());
}
 
Example #27
Source File: HiveTestUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
public static HiveCatalog createHiveCatalog(String name, String hiveVersion) {
	return new HiveCatalog(name, null, createHiveConf(),
			StringUtils.isNullOrWhitespaceOnly(hiveVersion) ? HiveShimLoader.getHiveVersion() : hiveVersion);
}
 
Example #28
Source File: HiveGenericUDAFTest.java    From flink with Apache License 2.0 3 votes vote down vote up
private static HiveGenericUDAF init(Class hiveUdfClass, Object[] constantArgs, DataType[] argTypes) throws Exception {
	HiveFunctionWrapper<GenericUDAFResolver2> wrapper = new HiveFunctionWrapper(hiveUdfClass.getName());

	HiveGenericUDAF udf = new HiveGenericUDAF(wrapper, HiveShimLoader.loadHiveShim(HiveShimLoader.getHiveVersion()));

	udf.setArgumentTypesAndConstants(constantArgs, argTypes);
	udf.getHiveResultType(constantArgs, argTypes);

	udf.open(null);

	return udf;
}
 
Example #29
Source File: HiveGenericUDAFTest.java    From flink with Apache License 2.0 3 votes vote down vote up
private static HiveGenericUDAF init(Class hiveUdfClass, Object[] constantArgs, DataType[] argTypes) throws Exception {
	HiveFunctionWrapper<GenericUDAFResolver2> wrapper = new HiveFunctionWrapper(hiveUdfClass.getName());

	HiveGenericUDAF udf = new HiveGenericUDAF(wrapper, HiveShimLoader.getHiveVersion());

	udf.setArgumentTypesAndConstants(constantArgs, argTypes);
	udf.getHiveResultType(constantArgs, argTypes);

	udf.open(null);

	return udf;
}