Java Code Examples for org.apache.flink.table.catalog.Catalog#createTable()

The following examples show how to use org.apache.flink.table.catalog.Catalog#createTable() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SqlToOperationConverterTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAlterTableAddPkConstraintEnforced() throws Exception {
	Catalog catalog = new GenericInMemoryCatalog("default", "default");
	catalogManager.registerCatalog("cat1", catalog);
	catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
	CatalogTable catalogTable = new CatalogTableImpl(
			TableSchema.builder()
					.field("a", DataTypes.STRING().notNull())
					.field("b", DataTypes.BIGINT().notNull())
					.field("c", DataTypes.BIGINT())
					.build(),
			new HashMap<>(),
			"tb1");
	catalogManager.setCurrentCatalog("cat1");
	catalogManager.setCurrentDatabase("db1");
	catalog.createTable(new ObjectPath("db1", "tb1"), catalogTable, true);
	// Test alter table add enforced
	thrown.expect(ValidationException.class);
	thrown.expectMessage("Flink doesn't support ENFORCED mode for PRIMARY KEY constaint. "
			+ "ENFORCED/NOT ENFORCED  controls if the constraint checks are performed on the "
			+ "incoming/outgoing data. Flink does not own the data therefore the "
			+ "only supported mode is the NOT ENFORCED mode");
	parse("alter table tb1 add constraint ct1 primary key(a, b)",
			SqlDialect.DEFAULT);
}
 
Example 2
Source File: SqlToOperationConverterTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAlterTableAddUniqueConstraint() throws Exception {
	Catalog catalog = new GenericInMemoryCatalog("default", "default");
	catalogManager.registerCatalog("cat1", catalog);
	catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
	CatalogTable catalogTable = new CatalogTableImpl(
			TableSchema.builder()
					.field("a", DataTypes.STRING().notNull())
					.field("b", DataTypes.BIGINT().notNull())
					.build(),
			new HashMap<>(),
			"tb1");
	catalogManager.setCurrentCatalog("cat1");
	catalogManager.setCurrentDatabase("db1");
	catalog.createTable(new ObjectPath("db1", "tb1"), catalogTable, true);
	// Test alter add table constraint.
	thrown.expect(UnsupportedOperationException.class);
	thrown.expectMessage("UNIQUE constraint is not supported yet");
	parse("alter table tb1 add constraint ct1 unique(a, b) not enforced",
			SqlDialect.DEFAULT);
}
 
Example 3
Source File: SqlToOperationConverterTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAlterTableAddUniqueConstraintEnforced() throws Exception {
	Catalog catalog = new GenericInMemoryCatalog("default", "default");
	catalogManager.registerCatalog("cat1", catalog);
	catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
	CatalogTable catalogTable = new CatalogTableImpl(
			TableSchema.builder()
					.field("a", DataTypes.STRING().notNull())
					.field("b", DataTypes.BIGINT().notNull())
					.field("c", DataTypes.BIGINT())
					.build(),
			new HashMap<>(),
			"tb1");
	catalogManager.setCurrentCatalog("cat1");
	catalogManager.setCurrentDatabase("db1");
	catalog.createTable(new ObjectPath("db1", "tb1"), catalogTable, true);
	// Test alter table add enforced
	thrown.expect(UnsupportedOperationException.class);
	thrown.expectMessage("UNIQUE constraint is not supported yet");
	parse("alter table tb1 add constraint ct1 unique(a, b)",
			SqlDialect.DEFAULT);
}
 
Example 4
Source File: DependencyTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	// Test HiveCatalogFactory.createCatalog
	// But not use it for testing purpose
	assertTrue(super.createCatalog(name, properties) != null);

	// Developers may already have their own production/testing hive-site.xml set in their environment,
	// and Flink tests should avoid using those hive-site.xml.
	// Thus, explicitly create a testing HiveConf for unit tests here
	Catalog hiveCatalog = HiveTestUtils.createHiveCatalog(name, properties.get(HiveCatalogValidator.CATALOG_HIVE_VERSION));

	// Creates an additional database to test tableEnv.useDatabase() will switch current database of the catalog
	hiveCatalog.open();
	try {
		hiveCatalog.createDatabase(
			ADDITIONAL_TEST_DATABASE,
			new CatalogDatabaseImpl(new HashMap<>(), null),
			false);
		hiveCatalog.createTable(
			new ObjectPath(ADDITIONAL_TEST_DATABASE, TEST_TABLE),
			new CatalogTableImpl(
				TableSchema.builder()
					.field("testcol", DataTypes.INT())
					.build(),
				new HashMap<String, String>() {{
					put(CatalogConfig.IS_GENERIC, String.valueOf(true));
				}},
				""
			),
			false
		);
	} catch (DatabaseAlreadyExistException | TableAlreadyExistException | DatabaseNotExistException e) {
		throw new CatalogException(e);
	}

	return hiveCatalog;
}
 
Example 5
Source File: FlinkSqlParser.java    From sylph with Apache License 2.0 5 votes vote down vote up
private void translateJoin(JoinInfo joinInfo, Map<String, CreateTable> batchTables)
{
    Table streamTable = getTable(tableEnv, joinInfo.getStreamTable());
    RowTypeInfo streamRowType = (RowTypeInfo) streamTable.getSchema().toRowType();
    DataStream<Row> inputStream = tableEnv.toAppendStream(streamTable, org.apache.flink.types.Row.class);
    inputStream.getTransformation().setOutputType(streamRowType);

    //get batch table schema
    CreateTable batchTable = requireNonNull(batchTables.get(joinInfo.getBatchTable().getName()), "batch table [" + joinInfo.getJoinTableName() + "] not exits");
    RowTypeInfo batchTableRowType = StreamSqlUtil.schemaToRowTypeInfo(StreamSqlUtil.getTableSchema(batchTable));
    List<SelectField> joinSelectFields = getAllSelectFields(joinInfo, streamRowType, batchTableRowType);

    //It is recommended to do keyby first.
    JoinContext joinContext = JoinContextImpl.createContext(joinInfo, streamRowType, joinSelectFields);
    RealTimeTransForm transForm = getJoinTransForm(joinContext, batchTable);
    DataStream<Row> joinResultStream = AsyncFunctionHelper.translate(inputStream, transForm);

    //set schema
    RowTypeInfo rowTypeInfo = getJoinOutScheam(joinSelectFields);
    joinResultStream.getTransformation().setOutputType(rowTypeInfo);
    //--register tmp joinTable

    Catalog catalog = tableEnv.getCatalog(tableEnv.getCurrentCatalog()).get();
    if (catalog.tableExists(ObjectPath.fromString(joinInfo.getJoinTableName()))) {
        Table table = tableEnv.fromDataStream(joinResultStream);
        CatalogBaseTable tableTable = new QueryOperationCatalogView(table.getQueryOperation());
        try {
            catalog.createTable(ObjectPath.fromString(joinInfo.getJoinTableName()), tableTable, true);
        }
        catch (TableAlreadyExistException | DatabaseNotExistException e) {
            e.printStackTrace();
        }
        //tableEnv.replaceRegisteredTable(joinInfo.getJoinTableName(), new RelTable(table.getRelNode()));
    }
    else {
        tableEnv.registerDataStream(joinInfo.getJoinTableName(), joinResultStream);
    }
    //next update join select query
    joinQueryUpdate(joinInfo, rowTypeInfo.getFieldNames());
}
 
Example 6
Source File: DependencyTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	// Developers may already have their own production/testing hive-site.xml set in their environment,
	// and Flink tests should avoid using those hive-site.xml.
	// Thus, explicitly create a testing HiveConf for unit tests here
	Catalog hiveCatalog = HiveTestUtils.createHiveCatalog(name, properties.get(HiveCatalogValidator.CATALOG_HIVE_VERSION));

	// Creates an additional database to test tableEnv.useDatabase() will switch current database of the catalog
	hiveCatalog.open();
	try {
		hiveCatalog.createDatabase(
			ADDITIONAL_TEST_DATABASE,
			new CatalogDatabaseImpl(new HashMap<>(), null),
			false);
		hiveCatalog.createTable(
			new ObjectPath(ADDITIONAL_TEST_DATABASE, TEST_TABLE),
			new CatalogTableImpl(
				TableSchema.builder()
					.field("testcol", DataTypes.INT())
					.build(),
				new HashMap<String, String>() {{
					put(CatalogConfig.IS_GENERIC, String.valueOf(false));
				}},
				""
			),
			false
		);
		// create a table to test parameterized types
		hiveCatalog.createTable(new ObjectPath("default", TABLE_WITH_PARAMETERIZED_TYPES),
				tableWithParameterizedTypes(),
				false);
	} catch (DatabaseAlreadyExistException | TableAlreadyExistException | DatabaseNotExistException e) {
		throw new CatalogException(e);
	}

	return hiveCatalog;
}
 
Example 7
Source File: TableEnvironmentImpl.java    From flink with Apache License 2.0 3 votes vote down vote up
/**
 * Registers a {@link CatalogBaseTable} under a given object path. The {@code path} could be
 * 3 formats:
 * <ol>
 *   <li>`catalog.db.table`: A full table path including the catalog name,
 *   the database name and table name.</li>
 *   <li>`db.table`: database name following table name, with the current catalog name.</li>
 *   <li>`table`: Only the table name, with the current catalog name and database  name.</li>
 * </ol>
 * The registered tables then can be referenced in Sql queries.
 *
 * @param path           The path under which the table will be registered
 * @param catalogTable   The table to register
 * @param ignoreIfExists If true, do nothing if there is already same table name under
 *                       the {@code path}. If false, a TableAlreadyExistException throws.
 */
private void registerCatalogTableInternal(String[] path,
		CatalogBaseTable catalogTable,
		boolean ignoreIfExists) {
	String[] fullName = catalogManager.getFullTablePath(Arrays.asList(path));
	Catalog catalog = getCatalog(fullName[0]).orElseThrow(() ->
		new TableException("Catalog " + fullName[0] + " does not exist"));
	ObjectPath objectPath = new ObjectPath(fullName[1], fullName[2]);
	try {
		catalog.createTable(objectPath, catalogTable, ignoreIfExists);
	} catch (Exception e) {
		throw new TableException("Could not register table", e);
	}
}