Java Code Examples for org.apache.flink.table.catalog.CatalogPartitionSpec

The following examples show how to use org.apache.flink.table.catalog.CatalogPartitionSpec. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public List<CatalogPartitionSpec> listPartitions(ObjectPath tablePath)
		throws TableNotExistException, TableNotPartitionedException, CatalogException {
	checkNotNull(tablePath, "Table path cannot be null");

	Table hiveTable = getHiveTable(tablePath);

	ensurePartitionedTable(tablePath, hiveTable);

	try {
		// pass -1 as max_parts to fetch all partitions
		return client.listPartitionNames(tablePath.getDatabaseName(), tablePath.getObjectName(), (short) -1).stream()
			.map(HiveCatalog::createPartitionSpec).collect(Collectors.toList());
	} catch (TException e) {
		throw new CatalogException(
			String.format("Failed to list partitions of table %s", tablePath), e);
	}
}
 
Example 2
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public List<CatalogPartitionSpec> listPartitions(ObjectPath tablePath, CatalogPartitionSpec partitionSpec)
		throws TableNotExistException, TableNotPartitionedException, CatalogException {
	checkNotNull(tablePath, "Table path cannot be null");
	checkNotNull(partitionSpec, "CatalogPartitionSpec cannot be null");

	Table hiveTable = getHiveTable(tablePath);

	ensurePartitionedTable(tablePath, hiveTable);

	try {
		// partition spec can be partial
		List<String> partialVals = MetaStoreUtils.getPvals(hiveTable.getPartitionKeys(), partitionSpec.getPartitionSpec());
		return client.listPartitionNames(tablePath.getDatabaseName(), tablePath.getObjectName(), partialVals,
			(short) -1).stream().map(HiveCatalog::createPartitionSpec).collect(Collectors.toList());
	} catch (TException e) {
		throw new CatalogException(
			String.format("Failed to list partitions of table %s", tablePath), e);
	}
}
 
Example 3
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 6 votes vote down vote up
private Partition instantiateHivePartition(Table hiveTable, CatalogPartitionSpec partitionSpec, CatalogPartition catalogPartition)
		throws PartitionSpecInvalidException {
	List<String> partCols = getFieldNames(hiveTable.getPartitionKeys());
	List<String> partValues = getOrderedFullPartitionValues(
		partitionSpec, partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()));
	// validate partition values
	for (int i = 0; i < partCols.size(); i++) {
		if (StringUtils.isNullOrWhitespaceOnly(partValues.get(i))) {
			throw new PartitionSpecInvalidException(getName(), partCols,
				new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()), partitionSpec);
		}
	}
	// TODO: handle GenericCatalogPartition
	StorageDescriptor sd = hiveTable.getSd().deepCopy();
	sd.setLocation(catalogPartition.getProperties().remove(HiveCatalogConfig.PARTITION_LOCATION));

	Map<String, String> properties = new HashMap<>(catalogPartition.getProperties());
	properties.put(HiveCatalogConfig.COMMENT, catalogPartition.getComment());

	return HiveTableUtil.createHivePartition(
			hiveTable.getDbName(),
			hiveTable.getTableName(),
			partValues,
			sd,
			properties);
}
 
Example 4
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Get a list of ordered partition values by re-arranging them based on the given list of partition keys.
 *
 * @param partitionSpec a partition spec.
 * @param partitionKeys a list of partition keys.
 * @param tablePath path of the table to which the partition belongs.
 * @return A list of partition values ordered according to partitionKeys.
 * @throws PartitionSpecInvalidException thrown if partitionSpec and partitionKeys have different sizes,
 *                                       or any key in partitionKeys doesn't exist in partitionSpec.
 */
private List<String> getOrderedFullPartitionValues(CatalogPartitionSpec partitionSpec, List<String> partitionKeys, ObjectPath tablePath)
		throws PartitionSpecInvalidException {
	Map<String, String> spec = partitionSpec.getPartitionSpec();
	if (spec.size() != partitionKeys.size()) {
		throw new PartitionSpecInvalidException(getName(), partitionKeys, tablePath, partitionSpec);
	}

	List<String> values = new ArrayList<>(spec.size());
	for (String key : partitionKeys) {
		if (!spec.containsKey(key)) {
			throw new PartitionSpecInvalidException(getName(), partitionKeys, tablePath, partitionSpec);
		} else {
			values.add(spec.get(key));
		}
	}

	return values;
}
 
Example 5
Source Project: flink   Source File: CatalogStatisticsTest.java    License: Apache License 2.0 6 votes vote down vote up
private void createPartitionColumnStats(String part1, int part2, boolean unknown) throws Exception {
	ObjectPath path = ObjectPath.fromString("default_database.PartT");
	LinkedHashMap<String, String> partSpecMap = new LinkedHashMap<>();
	partSpecMap.put("part1", part1);
	partSpecMap.put("part2", String.valueOf(part2));
	CatalogPartitionSpec partSpec = new CatalogPartitionSpec(partSpecMap);

	CatalogColumnStatisticsDataLong longColStats = new CatalogColumnStatisticsDataLong(
			-123L, 763322L, 23L, 77L);
	CatalogColumnStatisticsDataString stringColStats = new CatalogColumnStatisticsDataString(
			152L, 43.5D, 20L, 0L);
	Map<String, CatalogColumnStatisticsDataBase> colStatsMap = new HashMap<>();
	colStatsMap.put("id", unknown ?
			new CatalogColumnStatisticsDataLong(null, null, null, null) :
			longColStats);
	colStatsMap.put("name", unknown ?
			new CatalogColumnStatisticsDataString(null, null, null, null) :
			stringColStats);
	catalog.alterPartitionColumnStatistics(
			path,
			partSpec,
			new CatalogColumnStatistics(colStatsMap),
			true);
}
 
Example 6
Source Project: flink   Source File: HiveTableSinkTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testInsertIntoDynamicPartition() throws Exception {
	String dbName = "default";
	String tblName = "dest";
	RowTypeInfo rowTypeInfo = createDestTable(dbName, tblName, 1);
	ObjectPath tablePath = new ObjectPath(dbName, tblName);

	TableEnvironment tableEnv = HiveTestUtils.createTableEnv();

	List<Row> toWrite = generateRecords(5);
	Table src = tableEnv.fromTableSource(new CollectionTableSource(toWrite, rowTypeInfo));
	tableEnv.registerTable("src", src);

	tableEnv.registerCatalog("hive", hiveCatalog);
	tableEnv.sqlQuery("select * from src").insertInto("hive", "default", "dest");
	tableEnv.execute("mytest");

	List<CatalogPartitionSpec> partitionSpecs = hiveCatalog.listPartitions(tablePath);
	assertEquals(toWrite.size(), partitionSpecs.size());

	verifyWrittenData(toWrite, hiveShell.executeQuery("select * from " + tblName));

	hiveCatalog.dropTable(tablePath, false);
}
 
Example 7
Source Project: flink   Source File: SqlToOperationConverter.java    License: Apache License 2.0 6 votes vote down vote up
private Operation convertAlterTableProperties(ObjectIdentifier tableIdentifier, CatalogTable oldTable,
		SqlAlterTableProperties alterTableProperties) {
	LinkedHashMap<String, String> partitionKVs = alterTableProperties.getPartitionKVs();
	// it's altering partitions
	if (partitionKVs != null) {
		CatalogPartitionSpec partitionSpec = new CatalogPartitionSpec(partitionKVs);
		CatalogPartition catalogPartition = catalogManager.getPartition(tableIdentifier, partitionSpec)
				.orElseThrow(() -> new ValidationException(String.format("Partition %s of table %s doesn't exist",
						partitionSpec.getPartitionSpec(), tableIdentifier)));
		Map<String, String> newProps = new HashMap<>(catalogPartition.getProperties());
		newProps.putAll(OperationConverterUtils.extractProperties(alterTableProperties.getPropertyList()));
		return new AlterPartitionPropertiesOperation(
				tableIdentifier,
				partitionSpec,
				new CatalogPartitionImpl(newProps, catalogPartition.getComment()));
	} else {
		// it's altering a table
		Map<String, String> newProperties = new HashMap<>(oldTable.getOptions());
		newProperties.putAll(OperationConverterUtils.extractProperties(alterTableProperties.getPropertyList()));
		return new AlterTablePropertiesOperation(tableIdentifier, oldTable.copy(newProperties));
	}
}
 
Example 8
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public List<CatalogPartitionSpec> listPartitions(ObjectPath tablePath)
		throws TableNotExistException, TableNotPartitionedException, CatalogException {
	checkNotNull(tablePath, "Table path cannot be null");

	Table hiveTable = getHiveTable(tablePath);

	ensurePartitionedTable(tablePath, hiveTable);

	try {
		// pass -1 as max_parts to fetch all partitions
		return client.listPartitionNames(tablePath.getDatabaseName(), tablePath.getObjectName(), (short) -1).stream()
			.map(HiveCatalog::createPartitionSpec).collect(Collectors.toList());
	} catch (TException e) {
		throw new CatalogException(
			String.format("Failed to list partitions of table %s", tablePath), e);
	}
}
 
Example 9
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public List<CatalogPartitionSpec> listPartitions(ObjectPath tablePath, CatalogPartitionSpec partitionSpec)
		throws TableNotExistException, TableNotPartitionedException, CatalogException {
	checkNotNull(tablePath, "Table path cannot be null");
	checkNotNull(partitionSpec, "CatalogPartitionSpec cannot be null");

	Table hiveTable = getHiveTable(tablePath);

	ensurePartitionedTable(tablePath, hiveTable);

	try {
		// partition spec can be partial
		List<String> partialVals = HiveReflectionUtils.getPvals(hiveShim, hiveTable.getPartitionKeys(),
			partitionSpec.getPartitionSpec());
		return client.listPartitionNames(tablePath.getDatabaseName(), tablePath.getObjectName(), partialVals,
			(short) -1).stream().map(HiveCatalog::createPartitionSpec).collect(Collectors.toList());
	} catch (TException e) {
		throw new CatalogException(
			String.format("Failed to list partitions of table %s", tablePath), e);
	}
}
 
Example 10
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Get a list of ordered partition values by re-arranging them based on the given list of partition keys.
 *
 * @param partitionSpec a partition spec.
 * @param partitionKeys a list of partition keys.
 * @param tablePath path of the table to which the partition belongs.
 * @return A list of partition values ordered according to partitionKeys.
 * @throws PartitionSpecInvalidException thrown if partitionSpec and partitionKeys have different sizes,
 *                                       or any key in partitionKeys doesn't exist in partitionSpec.
 */
private List<String> getOrderedFullPartitionValues(CatalogPartitionSpec partitionSpec, List<String> partitionKeys, ObjectPath tablePath)
		throws PartitionSpecInvalidException {
	Map<String, String> spec = partitionSpec.getPartitionSpec();
	if (spec.size() != partitionKeys.size()) {
		throw new PartitionSpecInvalidException(getName(), partitionKeys, tablePath, partitionSpec);
	}

	List<String> values = new ArrayList<>(spec.size());
	for (String key : partitionKeys) {
		if (!spec.containsKey(key)) {
			throw new PartitionSpecInvalidException(getName(), partitionKeys, tablePath, partitionSpec);
		} else {
			values.add(spec.get(key));
		}
	}

	return values;
}
 
Example 11
Source Project: flink   Source File: HiveDialectITCase.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testAddDropPartitions() throws Exception {
	tableEnv.executeSql("create table tbl (x int,y binary) partitioned by (dt date,country string)");
	tableEnv.executeSql("alter table tbl add partition (dt='2020-04-30',country='china') partition (dt='2020-04-30',country='us')");

	ObjectPath tablePath = new ObjectPath("default", "tbl");
	assertEquals(2, hiveCatalog.listPartitions(tablePath).size());

	String partLocation = warehouse + "/part3_location";
	tableEnv.executeSql(String.format(
			"alter table tbl add partition (dt='2020-05-01',country='belgium') location '%s'", partLocation));
	Table hiveTable = hiveCatalog.getHiveTable(tablePath);
	CatalogPartitionSpec spec = new CatalogPartitionSpec(new LinkedHashMap<String, String>() {{
		put("dt", "2020-05-01");
		put("country", "belgium");
	}});
	Partition hivePartition = hiveCatalog.getHivePartition(hiveTable, spec);
	assertEquals(partLocation, locationPath(hivePartition.getSd().getLocation()));

	tableEnv.executeSql("alter table tbl drop partition (dt='2020-04-30',country='china'),partition (dt='2020-05-01',country='belgium')");
	assertEquals(1, hiveCatalog.listPartitions(tablePath).size());
}
 
Example 12
Source Project: flink   Source File: CatalogStatisticsTest.java    License: Apache License 2.0 6 votes vote down vote up
private void createPartitionStats(
		String part1, int part2, long rowCount) throws Exception {
	ObjectPath path = ObjectPath.fromString("default_database.PartT");

	LinkedHashMap<String, String> partSpecMap = new LinkedHashMap<>();
	partSpecMap.put("part1", part1);
	partSpecMap.put("part2", String.valueOf(part2));
	CatalogPartitionSpec partSpec = new CatalogPartitionSpec(partSpecMap);
	catalog.createPartition(
			path,
			partSpec,
			new CatalogPartitionImpl(new HashMap<>(), ""),
			true);
	catalog.alterPartitionStatistics(
			path,
			partSpec,
			new CatalogTableStatistics(rowCount, 10, 1000L, 2000L),
			true);
}
 
Example 13
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Creates a {@link CatalogPartitionSpec} from a Hive partition name string.
 * Example of Hive partition name string - "name=bob/year=2019"
 */
private static CatalogPartitionSpec createPartitionSpec(String hivePartitionName) {
	String[] partKeyVals = hivePartitionName.split("/");
	Map<String, String> spec = new HashMap<>(partKeyVals.length);
	for (String keyVal : partKeyVals) {
		String[] kv = keyVal.split("=");
		spec.put(kv[0], kv[1]);
	}
	return new CatalogPartitionSpec(spec);
}
 
Example 14
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 5 votes vote down vote up
private String getPartitionName(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, Table hiveTable) throws PartitionSpecInvalidException {
	List<String> partitionCols = getFieldNames(hiveTable.getPartitionKeys());
	List<String> partitionVals = getOrderedFullPartitionValues(partitionSpec, partitionCols, tablePath);
	List<String> partKVs = new ArrayList<>();
	for (int i = 0; i < partitionCols.size(); i++) {
		partKVs.add(partitionCols.get(i) + "=" + partitionVals.get(i));
	}
	return String.join("/", partKVs);
}
 
Example 15
Source Project: flink   Source File: HiveCatalogHiveMetadataTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testAlterPartitionColumnStatistics() throws Exception {
	catalog.createDatabase(db1, createDb(), false);
	CatalogTable catalogTable = createPartitionedTable();
	catalog.createTable(path1, catalogTable, false);
	CatalogPartitionSpec partitionSpec = createPartitionSpec();
	catalog.createPartition(path1, partitionSpec, createPartition(), true);
	Map<String, CatalogColumnStatisticsDataBase> columnStatisticsDataBaseMap = new HashMap<>();
	columnStatisticsDataBaseMap.put("first", new CatalogColumnStatisticsDataString(10, 5.2, 3, 100));
	CatalogColumnStatistics catalogColumnStatistics = new CatalogColumnStatistics(columnStatisticsDataBaseMap);
	catalog.alterPartitionColumnStatistics(path1, partitionSpec, catalogColumnStatistics, false);

	checkEquals(catalogColumnStatistics, catalog.getPartitionColumnStatistics(path1, partitionSpec));
}
 
Example 16
Source Project: flink   Source File: PartitionNotExistException.java    License: Apache License 2.0 5 votes vote down vote up
public PartitionNotExistException(
	String catalogName,
	ObjectPath tablePath,
	CatalogPartitionSpec partitionSpec) {

	super(String.format(MSG, partitionSpec, tablePath.getFullName(), catalogName), null);
}
 
Example 17
Source Project: flink   Source File: PartitionNotExistException.java    License: Apache License 2.0 5 votes vote down vote up
public PartitionNotExistException(
	String catalogName,
	ObjectPath tablePath,
	CatalogPartitionSpec partitionSpec,
	Throwable cause) {

	super(String.format(MSG, partitionSpec, tablePath.getFullName(), catalogName), cause);
}
 
Example 18
Source Project: flink   Source File: PartitionAlreadyExistsException.java    License: Apache License 2.0 5 votes vote down vote up
public PartitionAlreadyExistsException(
	String catalogName,
	ObjectPath tablePath,
	CatalogPartitionSpec partitionSpec) {

	super(String.format(MSG, partitionSpec, tablePath.getFullName(), catalogName));
}
 
Example 19
Source Project: flink   Source File: PartitionAlreadyExistsException.java    License: Apache License 2.0 5 votes vote down vote up
public PartitionAlreadyExistsException(
	String catalogName,
	ObjectPath tablePath,
	CatalogPartitionSpec partitionSpec,
	Throwable cause) {

	super(String.format(MSG, partitionSpec, tablePath.getFullName(), catalogName), cause);
}
 
Example 20
Source Project: flink   Source File: PartitionSpecInvalidException.java    License: Apache License 2.0 5 votes vote down vote up
public PartitionSpecInvalidException(
	String catalogName,
	List<String> partitionKeys,
	ObjectPath tablePath,
	CatalogPartitionSpec partitionSpec) {

	super(String.format(MSG, partitionSpec, partitionKeys, tablePath.getFullName(), catalogName), null);
}
 
Example 21
Source Project: flink   Source File: PartitionSpecInvalidException.java    License: Apache License 2.0 5 votes vote down vote up
public PartitionSpecInvalidException(
	String catalogName,
	List<String> partitionKeys,
	ObjectPath tablePath,
	CatalogPartitionSpec partitionSpec,
	Throwable cause) {

	super(String.format(MSG, partitionSpec, partitionKeys, tablePath.getFullName(), catalogName), cause);
}
 
Example 22
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public List<CatalogPartitionSpec> listPartitionsByFilter(ObjectPath tablePath, List<Expression> expressions)
		throws TableNotExistException, TableNotPartitionedException, CatalogException {
	Table hiveTable = getHiveTable(tablePath);
	ensurePartitionedTable(tablePath, hiveTable);
	List<String> partColNames = getFieldNames(hiveTable.getPartitionKeys());
	Optional<String> filter = HiveTableUtil.makePartitionFilter(
			getNonPartitionFields(hiveConf, hiveTable).size(), partColNames, expressions, hiveShim);
	if (!filter.isPresent()) {
		throw new UnsupportedOperationException(
				"HiveCatalog is unable to handle the partition filter expressions: " + expressions);
	}
	try {
		PartitionSpecProxy partitionSpec = client.listPartitionSpecsByFilter(
				tablePath.getDatabaseName(), tablePath.getObjectName(), filter.get(), (short) -1);
		List<CatalogPartitionSpec> res = new ArrayList<>(partitionSpec.size());
		PartitionSpecProxy.PartitionIterator partitions = partitionSpec.getPartitionIterator();
		while (partitions.hasNext()) {
			Partition partition = partitions.next();
			Map<String, String> spec = new HashMap<>();
			for (int i = 0; i < partColNames.size(); i++) {
				spec.put(partColNames.get(i), partition.getValues().get(i));
			}
			res.add(new CatalogPartitionSpec(spec));
		}
		return res;
	} catch (TException e) {
		throw new UnsupportedOperationException(
				"Failed to list partition by filter from HMS, filter expressions: " + expressions, e);
	}
}
 
Example 23
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 5 votes vote down vote up
private Partition instantiateHivePartition(Table hiveTable, CatalogPartitionSpec partitionSpec, CatalogPartition catalogPartition)
		throws PartitionSpecInvalidException {
	List<String> partCols = getFieldNames(hiveTable.getPartitionKeys());
	List<String> partValues = getOrderedFullPartitionValues(
		partitionSpec, partCols, new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()));
	// validate partition values
	for (int i = 0; i < partCols.size(); i++) {
		if (StringUtils.isNullOrWhitespaceOnly(partValues.get(i))) {
			throw new PartitionSpecInvalidException(getName(), partCols,
				new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName()), partitionSpec);
		}
	}
	// TODO: handle GenericCatalogPartition
	StorageDescriptor sd = hiveTable.getSd().deepCopy();
	sd.setLocation(catalogPartition.getProperties().remove(SqlCreateHiveTable.TABLE_LOCATION_URI));

	Map<String, String> properties = new HashMap<>(catalogPartition.getProperties());
	String comment = catalogPartition.getComment();
	if (comment != null) {
		properties.put(HiveCatalogConfig.COMMENT, comment);
	}

	return HiveTableUtil.createHivePartition(
			hiveTable.getDbName(),
			hiveTable.getTableName(),
			partValues,
			sd,
			properties);
}
 
Example 24
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Creates a {@link CatalogPartitionSpec} from a Hive partition name string.
 * Example of Hive partition name string - "name=bob/year=2019"
 */
private static CatalogPartitionSpec createPartitionSpec(String hivePartitionName) {
	String[] partKeyVals = hivePartitionName.split("/");
	Map<String, String> spec = new HashMap<>(partKeyVals.length);
	for (String keyVal : partKeyVals) {
		String[] kv = keyVal.split("=");
		spec.put(unescapePathName(kv[0]), unescapePathName(kv[1]));
	}
	return new CatalogPartitionSpec(spec);
}
 
Example 25
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 5 votes vote down vote up
@VisibleForTesting
public Partition getHivePartition(Table hiveTable, CatalogPartitionSpec partitionSpec)
		throws PartitionSpecInvalidException, TException {
	return client.getPartition(hiveTable.getDbName(), hiveTable.getTableName(),
		getOrderedFullPartitionValues(partitionSpec, getFieldNames(hiveTable.getPartitionKeys()),
			new ObjectPath(hiveTable.getDbName(), hiveTable.getTableName())));
}
 
Example 26
Source Project: flink   Source File: HiveCatalog.java    License: Apache License 2.0 5 votes vote down vote up
private String getPartitionName(ObjectPath tablePath, CatalogPartitionSpec partitionSpec, Table hiveTable) throws PartitionSpecInvalidException {
	List<String> partitionCols = getFieldNames(hiveTable.getPartitionKeys());
	List<String> partitionVals = getOrderedFullPartitionValues(partitionSpec, partitionCols, tablePath);
	List<String> partKVs = new ArrayList<>();
	for (int i = 0; i < partitionCols.size(); i++) {
		partKVs.add(partitionCols.get(i) + "=" + partitionVals.get(i));
	}
	return String.join("/", partKVs);
}
 
Example 27
Source Project: flink   Source File: HiveCatalogHiveMetadataTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testAlterPartitionColumnStatistics() throws Exception {
	catalog.createDatabase(db1, createDb(), false);
	CatalogTable catalogTable = createPartitionedTable();
	catalog.createTable(path1, catalogTable, false);
	CatalogPartitionSpec partitionSpec = createPartitionSpec();
	catalog.createPartition(path1, partitionSpec, createPartition(), true);
	Map<String, CatalogColumnStatisticsDataBase> columnStatisticsDataBaseMap = new HashMap<>();
	columnStatisticsDataBaseMap.put("first", new CatalogColumnStatisticsDataString(10L, 5.2, 3L, 100L));
	CatalogColumnStatistics catalogColumnStatistics = new CatalogColumnStatistics(columnStatisticsDataBaseMap);
	catalog.alterPartitionColumnStatistics(path1, partitionSpec, catalogColumnStatistics, false);

	checkEquals(catalogColumnStatistics, catalog.getPartitionColumnStatistics(path1, partitionSpec));
}
 
Example 28
Source Project: flink   Source File: HiveDialectITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testAlterPartition() throws Exception {
	tableEnv.executeSql("create table tbl (x tinyint,y string) partitioned by (p1 bigint,p2 date)");
	tableEnv.executeSql("alter table tbl add partition (p1=1000,p2='2020-05-01') partition (p1=2000,p2='2020-01-01')");
	CatalogPartitionSpec spec1 = new CatalogPartitionSpec(new LinkedHashMap<String, String>() {{
		put("p1", "1000");
		put("p2", "2020-05-01");
	}});
	CatalogPartitionSpec spec2 = new CatalogPartitionSpec(new LinkedHashMap<String, String>() {{
		put("p1", "2000");
		put("p2", "2020-01-01");
	}});
	ObjectPath tablePath = new ObjectPath("default", "tbl");

	Table hiveTable = hiveCatalog.getHiveTable(tablePath);

	// change location
	String location = warehouse + "/new_part_location";
	tableEnv.executeSql(String.format("alter table tbl partition (p1=1000,p2='2020-05-01') set location '%s'", location));
	Partition partition = hiveCatalog.getHivePartition(hiveTable, spec1);
	assertEquals(location, locationPath(partition.getSd().getLocation()));

	// change file format
	tableEnv.executeSql("alter table tbl partition (p1=2000,p2='2020-01-01') set fileformat rcfile");
	partition = hiveCatalog.getHivePartition(hiveTable, spec2);
	assertEquals(LazyBinaryColumnarSerDe.class.getName(), partition.getSd().getSerdeInfo().getSerializationLib());
	assertEquals(RCFileInputFormat.class.getName(), partition.getSd().getInputFormat());
	assertEquals(RCFileOutputFormat.class.getName(), partition.getSd().getOutputFormat());

	// change serde
	tableEnv.executeSql(String.format("alter table tbl partition (p1=1000,p2='2020-05-01') set serde '%s' with serdeproperties('%s'='%s')",
			LazyBinarySerDe.class.getName(), serdeConstants.LINE_DELIM, "\n"));
	partition = hiveCatalog.getHivePartition(hiveTable, spec1);
	assertEquals(LazyBinarySerDe.class.getName(), partition.getSd().getSerdeInfo().getSerializationLib());
	assertEquals("\n", partition.getSd().getSerdeInfo().getParameters().get(serdeConstants.LINE_DELIM));
}
 
Example 29
Source Project: flink   Source File: PartitionNotExistException.java    License: Apache License 2.0 5 votes vote down vote up
public PartitionNotExistException(
	String catalogName,
	ObjectPath tablePath,
	CatalogPartitionSpec partitionSpec) {

	super(String.format(MSG, partitionSpec, tablePath.getFullName(), catalogName), null);
}
 
Example 30
Source Project: flink   Source File: PartitionNotExistException.java    License: Apache License 2.0 5 votes vote down vote up
public PartitionNotExistException(
	String catalogName,
	ObjectPath tablePath,
	CatalogPartitionSpec partitionSpec,
	Throwable cause) {

	super(String.format(MSG, partitionSpec, tablePath.getFullName(), catalogName), cause);
}