Java Code Examples for org.apache.flink.table.catalog.CatalogTable#getSchema()

The following examples show how to use org.apache.flink.table.catalog.CatalogTable#getSchema() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveTableOutputFormat.java    From flink with Apache License 2.0 6 votes vote down vote up
public HiveTableOutputFormat(JobConf jobConf, ObjectPath tablePath, CatalogTable table, HiveTablePartition hiveTablePartition,
							Properties tableProperties, boolean overwrite) {
	super(jobConf.getCredentials());

	Preconditions.checkNotNull(table, "table cannot be null");
	Preconditions.checkNotNull(hiveTablePartition, "HiveTablePartition cannot be null");
	Preconditions.checkNotNull(tableProperties, "Table properties cannot be null");

	HadoopUtils.mergeHadoopConf(jobConf);
	this.jobConf = jobConf;
	this.tablePath = tablePath;
	this.partitionColumns = table.getPartitionKeys();
	TableSchema tableSchema = table.getSchema();
	this.fieldNames = tableSchema.getFieldNames();
	this.fieldTypes = tableSchema.getFieldDataTypes();
	this.hiveTablePartition = hiveTablePartition;
	this.tableProperties = tableProperties;
	this.overwrite = overwrite;
	isPartitioned = partitionColumns != null && !partitionColumns.isEmpty();
	isDynamicPartition = isPartitioned && partitionColumns.size() > hiveTablePartition.getPartitionSpec().size();
	hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION),
			"Hive version is not defined");
}
 
Example 2
Source File: HiveTableSink.java    From flink with Apache License 2.0 5 votes vote down vote up
public HiveTableSink(JobConf jobConf, ObjectPath tablePath, CatalogTable table) {
	this.jobConf = jobConf;
	this.tablePath = tablePath;
	this.catalogTable = table;
	hiveVersion = Preconditions.checkNotNull(jobConf.get(HiveCatalogValidator.CATALOG_HIVE_VERSION),
			"Hive version is not defined");
	tableSchema = table.getSchema();
}
 
Example 3
Source File: HiveDialectITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testCreateTableWithConstraints() throws Exception {
	Assume.assumeTrue(HiveVersionTestUtil.HIVE_310_OR_LATER);
	tableEnv.executeSql("create table tbl (x int,y int not null disable novalidate rely,z int not null disable novalidate norely," +
			"constraint pk_name primary key (x) rely)");
	CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(new ObjectPath("default", "tbl"));
	TableSchema tableSchema = catalogTable.getSchema();
	assertTrue("PK not present", tableSchema.getPrimaryKey().isPresent());
	assertEquals("pk_name", tableSchema.getPrimaryKey().get().getName());
	assertFalse("PK cannot be null", tableSchema.getFieldDataTypes()[0].getLogicalType().isNullable());
	assertFalse("RELY NOT NULL should be reflected in schema",
			tableSchema.getFieldDataTypes()[1].getLogicalType().isNullable());
	assertTrue("NORELY NOT NULL shouldn't be reflected in schema",
			tableSchema.getFieldDataTypes()[2].getLogicalType().isNullable());
}
 
Example 4
Source File: SqlToOperationConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
/** convert ALTER TABLE statement. */
private Operation convertAlterTable(SqlAlterTable sqlAlterTable) {
	UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterTable.fullTableName());
	ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
	if (sqlAlterTable instanceof SqlAlterTableRename) {
		UnresolvedIdentifier newUnresolvedIdentifier =
			UnresolvedIdentifier.of(((SqlAlterTableRename) sqlAlterTable).fullNewTableName());
		ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
		return new AlterTableRenameOperation(tableIdentifier, newTableIdentifier);
	} else if (sqlAlterTable instanceof SqlAlterTableProperties) {
		Optional<CatalogManager.TableLookupResult> optionalCatalogTable = catalogManager.getTable(tableIdentifier);
		if (optionalCatalogTable.isPresent() && !optionalCatalogTable.get().isTemporary()) {
			CatalogTable originalCatalogTable = (CatalogTable) optionalCatalogTable.get().getTable();
			Map<String, String> properties = new HashMap<>();
			properties.putAll(originalCatalogTable.getProperties());
			((SqlAlterTableProperties) sqlAlterTable).getPropertyList().getList().forEach(p ->
				properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
			CatalogTable catalogTable = new CatalogTableImpl(
				originalCatalogTable.getSchema(),
				originalCatalogTable.getPartitionKeys(),
				properties,
				originalCatalogTable.getComment());
			return new AlterTablePropertiesOperation(tableIdentifier, catalogTable);
		} else {
			throw new ValidationException(String.format("Table %s doesn't exist or is a temporary table.",
				tableIdentifier.toString()));
		}
	} else {
		throw new ValidationException(
				String.format("[%s] needs to implement",
						sqlAlterTable.toSqlString(CalciteSqlDialect.DEFAULT)));
	}
}
 
Example 5
Source File: TableSourceFactoryMock.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public TableSource<Row> createTableSource(ObjectPath tablePath, CatalogTable table) {
	return new TableSourceMock(table.getSchema().toRowDataType(), table.getSchema());
}
 
Example 6
Source File: SqlCreateTableConverter.java    From flink with Apache License 2.0 4 votes vote down vote up
private CatalogTable createCatalogTable(SqlCreateTable sqlCreateTable) {

		final TableSchema sourceTableSchema;
		final List<String> sourcePartitionKeys;
		final List<SqlTableLike.SqlTableLikeOption> likeOptions;
		final Map<String, String> sourceProperties;
		if (sqlCreateTable.getTableLike().isPresent()) {
			SqlTableLike sqlTableLike = sqlCreateTable.getTableLike().get();
			CatalogTable table = lookupLikeSourceTable(sqlTableLike);
			sourceTableSchema = table.getSchema();
			sourcePartitionKeys = table.getPartitionKeys();
			likeOptions = sqlTableLike.getOptions();
			sourceProperties = table.getProperties();
		} else {
			sourceTableSchema = TableSchema.builder().build();
			sourcePartitionKeys = Collections.emptyList();
			likeOptions = Collections.emptyList();
			sourceProperties = Collections.emptyMap();
		}

		Map<SqlTableLike.FeatureOption, SqlTableLike.MergingStrategy> mergingStrategies =
			mergeTableLikeUtil.computeMergingStrategies(likeOptions);

		Map<String, String> mergedOptions = mergeOptions(sqlCreateTable, sourceProperties, mergingStrategies);

		Optional<SqlTableConstraint> primaryKey = sqlCreateTable.getFullConstraints()
			.stream()
			.filter(SqlTableConstraint::isPrimaryKey)
			.findAny();
		TableSchema mergedSchema = mergeTableLikeUtil.mergeTables(
			mergingStrategies,
			sourceTableSchema,
			sqlCreateTable.getColumnList().getList(),
			sqlCreateTable.getWatermark().map(Collections::singletonList).orElseGet(Collections::emptyList),
			primaryKey.orElse(null)
		);

		List<String> partitionKeys = mergePartitions(
			sourcePartitionKeys,
			sqlCreateTable.getPartitionKeyList(),
			mergingStrategies
		);
		verifyPartitioningColumnsExist(mergedSchema, partitionKeys);

		String tableComment = sqlCreateTable.getComment()
			.map(comment -> comment.getNlsString().getValue())
			.orElse(null);

		return new CatalogTableImpl(mergedSchema,
			partitionKeys,
			mergedOptions,
			tableComment);
	}
 
Example 7
Source File: OperationConverterUtils.java    From flink with Apache License 2.0 4 votes vote down vote up
public static Operation convertChangeColumn(
		ObjectIdentifier tableIdentifier,
		SqlChangeColumn changeColumn,
		CatalogTable catalogTable,
		SqlValidator sqlValidator) {
	String oldName = changeColumn.getOldName().getSimple();
	if (catalogTable.getPartitionKeys().indexOf(oldName) >= 0) {
		// disallow changing partition columns
		throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
	}
	TableSchema oldSchema = catalogTable.getSchema();
	int oldIndex = Arrays.asList(oldSchema.getFieldNames()).indexOf(oldName);
	if (oldIndex < 0) {
		throw new ValidationException(String.format("Old column %s not found for CHANGE COLUMN", oldName));
	}
	boolean first = changeColumn.isFirst();
	String after = changeColumn.getAfter() == null ? null : changeColumn.getAfter().getSimple();
	List<TableColumn> tableColumns = oldSchema.getTableColumns();
	TableColumn newTableColumn = toTableColumn(changeColumn.getNewColumn(), sqlValidator);
	if ((!first && after == null) || oldName.equals(after)) {
		tableColumns.set(oldIndex, newTableColumn);
	} else {
		// need to change column position
		tableColumns.remove(oldIndex);
		if (first) {
			tableColumns.add(0, newTableColumn);
		} else {
			int newIndex = tableColumns
					.stream()
					.map(TableColumn::getName)
					.collect(Collectors.toList())
					.indexOf(after);
			if (newIndex < 0) {
				throw new ValidationException(String.format("After column %s not found for CHANGE COLUMN", after));
			}
			tableColumns.add(newIndex + 1, newTableColumn);
		}
	}
	TableSchema.Builder builder = TableSchema.builder();
	for (TableColumn column : tableColumns) {
		builder.add(column);
	}
	setWatermarkAndPK(builder, oldSchema);
	TableSchema newSchema = builder.build();
	Map<String, String> newProperties = new HashMap<>(catalogTable.getOptions());
	newProperties.putAll(extractProperties(changeColumn.getProperties()));
	return new AlterTableSchemaOperation(
			tableIdentifier,
			new CatalogTableImpl(
					newSchema,
					catalogTable.getPartitionKeys(),
					newProperties,
					catalogTable.getComment()));
	// TODO: handle watermark and constraints
}