org.apache.flink.table.catalog.CatalogDatabaseImpl Java Examples

The following examples show how to use org.apache.flink.table.catalog.CatalogDatabaseImpl. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SqlToOperationConverterTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAlterTableAddUniqueConstraintEnforced() throws Exception {
	Catalog catalog = new GenericInMemoryCatalog("default", "default");
	catalogManager.registerCatalog("cat1", catalog);
	catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
	CatalogTable catalogTable = new CatalogTableImpl(
			TableSchema.builder()
					.field("a", DataTypes.STRING().notNull())
					.field("b", DataTypes.BIGINT().notNull())
					.field("c", DataTypes.BIGINT())
					.build(),
			new HashMap<>(),
			"tb1");
	catalogManager.setCurrentCatalog("cat1");
	catalogManager.setCurrentDatabase("db1");
	catalog.createTable(new ObjectPath("db1", "tb1"), catalogTable, true);
	// Test alter table add enforced
	thrown.expect(UnsupportedOperationException.class);
	thrown.expectMessage("UNIQUE constraint is not supported yet");
	parse("alter table tb1 add constraint ct1 unique(a, b)",
			SqlDialect.DEFAULT);
}
 
Example #2
Source File: HiveTableFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testGenericTable() throws Exception {
	TableSchema schema = TableSchema.builder()
		.field("name", DataTypes.STRING())
		.field("age", DataTypes.INT())
		.build();

	Map<String, String> properties = new HashMap<>();
	properties.put(CatalogConfig.IS_GENERIC, String.valueOf(true));
	properties.put("connector", "COLLECTION");

	catalog.createDatabase("mydb", new CatalogDatabaseImpl(new HashMap<>(), ""), true);
	ObjectPath path = new ObjectPath("mydb", "mytable");
	CatalogTable table = new CatalogTableImpl(schema, properties, "csv table");
	catalog.createTable(path, table, true);
	Optional<TableFactory> opt = catalog.getTableFactory();
	assertTrue(opt.isPresent());
	HiveTableFactory tableFactory = (HiveTableFactory) opt.get();
	TableSource tableSource = tableFactory.createTableSource(path, table);
	assertTrue(tableSource instanceof StreamTableSource);
	TableSink tableSink = tableFactory.createTableSink(path, table);
	assertTrue(tableSink instanceof StreamTableSink);
}
 
Example #3
Source File: HiveTableFactoryTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testHiveTable() throws Exception {
	TableSchema schema = TableSchema.builder()
		.field("name", DataTypes.STRING())
		.field("age", DataTypes.INT())
		.build();

	Map<String, String> properties = new HashMap<>();

	catalog.createDatabase("mydb", new CatalogDatabaseImpl(new HashMap<>(), ""), true);
	ObjectPath path = new ObjectPath("mydb", "mytable");
	CatalogTable table = new CatalogTableImpl(schema, properties, "hive table");
	catalog.createTable(path, table, true);
	Optional<TableFactory> opt = catalog.getTableFactory();
	assertTrue(opt.isPresent());
	HiveTableFactory tableFactory = (HiveTableFactory) opt.get();
	TableSink tableSink = tableFactory.createTableSink(path, table);
	assertTrue(tableSink instanceof HiveTableSink);
	TableSource tableSource = tableFactory.createTableSource(path, table);
	assertTrue(tableSource instanceof HiveTableSource);
}
 
Example #4
Source File: SqlToOperationConverterTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAlterTableAddUniqueConstraint() throws Exception {
	Catalog catalog = new GenericInMemoryCatalog("default", "default");
	catalogManager.registerCatalog("cat1", catalog);
	catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
	CatalogTable catalogTable = new CatalogTableImpl(
			TableSchema.builder()
					.field("a", DataTypes.STRING().notNull())
					.field("b", DataTypes.BIGINT().notNull())
					.build(),
			new HashMap<>(),
			"tb1");
	catalogManager.setCurrentCatalog("cat1");
	catalogManager.setCurrentDatabase("db1");
	catalog.createTable(new ObjectPath("db1", "tb1"), catalogTable, true);
	// Test alter add table constraint.
	thrown.expect(UnsupportedOperationException.class);
	thrown.expectMessage("UNIQUE constraint is not supported yet");
	parse("alter table tb1 add constraint ct1 unique(a, b) not enforced",
			SqlDialect.DEFAULT);
}
 
Example #5
Source File: SqlToOperationConverterTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testAlterTableAddPkConstraintEnforced() throws Exception {
	Catalog catalog = new GenericInMemoryCatalog("default", "default");
	catalogManager.registerCatalog("cat1", catalog);
	catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
	CatalogTable catalogTable = new CatalogTableImpl(
			TableSchema.builder()
					.field("a", DataTypes.STRING().notNull())
					.field("b", DataTypes.BIGINT().notNull())
					.field("c", DataTypes.BIGINT())
					.build(),
			new HashMap<>(),
			"tb1");
	catalogManager.setCurrentCatalog("cat1");
	catalogManager.setCurrentDatabase("db1");
	catalog.createTable(new ObjectPath("db1", "tb1"), catalogTable, true);
	// Test alter table add enforced
	thrown.expect(ValidationException.class);
	thrown.expectMessage("Flink doesn't support ENFORCED mode for PRIMARY KEY constaint. "
			+ "ENFORCED/NOT ENFORCED  controls if the constraint checks are performed on the "
			+ "incoming/outgoing data. Flink does not own the data therefore the "
			+ "only supported mode is the NOT ENFORCED mode");
	parse("alter table tb1 add constraint ct1 primary key(a, b)",
			SqlDialect.DEFAULT);
}
 
Example #6
Source File: SqlToOperationConverter.java    From flink with Apache License 2.0 6 votes vote down vote up
/** Convert CREATE DATABASE statement. */
private Operation convertCreateDatabase(SqlCreateDatabase sqlCreateDatabase) {
	String[] fullDatabaseName = sqlCreateDatabase.fullDatabaseName();
	if (fullDatabaseName.length > 2) {
		throw new SqlConversionException("create database identifier format error");
	}
	String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
	String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
	boolean ignoreIfExists = sqlCreateDatabase.isIfNotExists();
	String databaseComment = sqlCreateDatabase.getComment()
		.map(comment -> comment.getNlsString().getValue()).orElse(null);
	// set with properties
	Map<String, String> properties = new HashMap<>();
	sqlCreateDatabase.getPropertyList().getList().forEach(p ->
		properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
	CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, databaseComment);
	return new CreateDatabaseOperation(catalogName, databaseName, catalogDatabase, ignoreIfExists);
}
 
Example #7
Source File: SqlToOperationConverter.java    From flink with Apache License 2.0 6 votes vote down vote up
/** Convert CREATE DATABASE statement. */
private Operation convertCreateDatabase(SqlCreateDatabase sqlCreateDatabase) {
	String[] fullDatabaseName = sqlCreateDatabase.fullDatabaseName();
	if (fullDatabaseName.length > 2) {
		throw new SqlConversionException("create database identifier format error");
	}
	String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
	String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
	boolean ignoreIfExists = sqlCreateDatabase.isIfNotExists();
	String databaseComment = sqlCreateDatabase.getComment()
		.map(comment -> comment.getNlsString().getValue()).orElse(null);
	// set with properties
	Map<String, String> properties = new HashMap<>();
	sqlCreateDatabase.getPropertyList().getList().forEach(p ->
		properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
	CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, databaseComment);
	return new CreateDatabaseOperation(catalogName, databaseName, catalogDatabase, ignoreIfExists);
}
 
Example #8
Source File: HiveTableFactoryTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testGenericTable() throws Exception {
	TableSchema schema = TableSchema.builder()
		.field("name", DataTypes.STRING())
		.field("age", DataTypes.INT())
		.build();

	Map<String, String> properties = new HashMap<>();
	properties.put(CatalogConfig.IS_GENERIC, String.valueOf(true));
	properties.put("connector", "COLLECTION");

	catalog.createDatabase("mydb", new CatalogDatabaseImpl(new HashMap<>(), ""), true);
	ObjectPath path = new ObjectPath("mydb", "mytable");
	CatalogTable table = new CatalogTableImpl(schema, properties, "csv table");
	catalog.createTable(path, table, true);
	Optional<TableFactory> opt = catalog.getTableFactory();
	assertTrue(opt.isPresent());
	HiveTableFactory tableFactory = (HiveTableFactory) opt.get();
	TableSource tableSource = tableFactory.createTableSource(new TableSourceFactoryContextImpl(
			ObjectIdentifier.of("mycatalog", "mydb", "mytable"), table, new Configuration()));
	assertTrue(tableSource instanceof StreamTableSource);
	TableSink tableSink = tableFactory.createTableSink(new TableSinkFactoryContextImpl(
			ObjectIdentifier.of("mycatalog", "mydb", "mytable"),
			table,
			new Configuration(),
			true));
	assertTrue(tableSink instanceof StreamTableSink);
}
 
Example #9
Source File: DependencyTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	// Developers may already have their own production/testing hive-site.xml set in their environment,
	// and Flink tests should avoid using those hive-site.xml.
	// Thus, explicitly create a testing HiveConf for unit tests here
	Catalog hiveCatalog = HiveTestUtils.createHiveCatalog(name, properties.get(HiveCatalogValidator.CATALOG_HIVE_VERSION));

	// Creates an additional database to test tableEnv.useDatabase() will switch current database of the catalog
	hiveCatalog.open();
	try {
		hiveCatalog.createDatabase(
			ADDITIONAL_TEST_DATABASE,
			new CatalogDatabaseImpl(new HashMap<>(), null),
			false);
		hiveCatalog.createTable(
			new ObjectPath(ADDITIONAL_TEST_DATABASE, TEST_TABLE),
			new CatalogTableImpl(
				TableSchema.builder()
					.field("testcol", DataTypes.INT())
					.build(),
				new HashMap<String, String>() {{
					put(CatalogConfig.IS_GENERIC, String.valueOf(false));
				}},
				""
			),
			false
		);
		// create a table to test parameterized types
		hiveCatalog.createTable(new ObjectPath("default", TABLE_WITH_PARAMETERIZED_TYPES),
				tableWithParameterizedTypes(),
				false);
	} catch (DatabaseAlreadyExistException | TableAlreadyExistException | DatabaseNotExistException e) {
		throw new CatalogException(e);
	}

	return hiveCatalog;
}
 
Example #10
Source File: SqlToOperationConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
/** Convert ALTER DATABASE statement. */
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
	String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
	if (fullDatabaseName.length > 2) {
		throw new SqlConversionException("alter database identifier format error");
	}
	String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
	String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
	final Map<String, String> properties;
	CatalogDatabase originCatalogDatabase;
	Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
	if (catalog.isPresent()) {
		try {
			originCatalogDatabase = catalog.get().getDatabase(databaseName);
			properties = new HashMap<>(originCatalogDatabase.getProperties());
		} catch (DatabaseNotExistException e) {
			throw new SqlConversionException(String.format("Database %s not exists", databaseName), e);
		}
	} else {
		throw new SqlConversionException(String.format("Catalog %s not exists", catalogName));
	}
	// set with properties
	sqlAlterDatabase.getPropertyList().getList().forEach(p ->
		properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
	CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
	return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
 
Example #11
Source File: SqlToOperationConverter.java    From flink with Apache License 2.0 5 votes vote down vote up
/** Convert ALTER DATABASE statement. */
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
	String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
	if (fullDatabaseName.length > 2) {
		throw new SqlConversionException("alter database identifier format error");
	}
	String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
	String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
	Map<String, String> properties = new HashMap<>();
	CatalogDatabase originCatalogDatabase;
	Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
	if (catalog.isPresent()) {
		try {
			originCatalogDatabase = catalog.get().getDatabase(databaseName);
			properties.putAll(originCatalogDatabase.getProperties());
		} catch (DatabaseNotExistException e) {
			throw new SqlConversionException(String.format("Database %s not exists", databaseName), e);
		}
	} else {
		throw new SqlConversionException(String.format("Catalog %s not exists", catalogName));
	}
	// set with properties
	sqlAlterDatabase.getPropertyList().getList().forEach(p ->
		properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
	CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
	return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
 
Example #12
Source File: HiveTableFactoryTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testHiveTable() throws Exception {
	TableSchema schema = TableSchema.builder()
		.field("name", DataTypes.STRING())
		.field("age", DataTypes.INT())
		.build();

	Map<String, String> properties = new HashMap<>();
	properties.put(CatalogConfig.IS_GENERIC, String.valueOf(false));

	catalog.createDatabase("mydb", new CatalogDatabaseImpl(new HashMap<>(), ""), true);
	ObjectPath path = new ObjectPath("mydb", "mytable");
	CatalogTable table = new CatalogTableImpl(schema, properties, "hive table");
	catalog.createTable(path, table, true);
	Optional<TableFactory> opt = catalog.getTableFactory();
	assertTrue(opt.isPresent());
	HiveTableFactory tableFactory = (HiveTableFactory) opt.get();
	TableSink tableSink = tableFactory.createTableSink(new TableSinkFactoryContextImpl(
			ObjectIdentifier.of("mycatalog", "mydb", "mytable"),
			table,
			new Configuration(),
			true));
	assertTrue(tableSink instanceof HiveTableSink);
	TableSource tableSource = tableFactory.createTableSource(new TableSourceFactoryContextImpl(
			ObjectIdentifier.of("mycatalog", "mydb", "mytable"), table, new Configuration()));
	assertTrue(tableSource instanceof HiveTableSource);
}
 
Example #13
Source File: HiveCatalogDataTypeTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static CatalogDatabase createDb() {
	return new CatalogDatabaseImpl(
		new HashMap<String, String>() {{
			put("k1", "v1");
		}},
		""
	);
}
 
Example #14
Source File: HiveCatalog.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public CatalogDatabase getDatabase(String databaseName) throws DatabaseNotExistException, CatalogException {
	Database hiveDatabase = getHiveDatabase(databaseName);

	Map<String, String> properties = hiveDatabase.getParameters();

	boolean isGeneric = isGenericForGet(properties);
	if (!isGeneric) {
		properties.put(SqlCreateHiveDatabase.DATABASE_LOCATION_URI, hiveDatabase.getLocationUri());
	}

	return new CatalogDatabaseImpl(properties, hiveDatabase.getDescription());
}
 
Example #15
Source File: PostgresCatalog.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public CatalogDatabase getDatabase(String databaseName) throws DatabaseNotExistException, CatalogException {
	if (listDatabases().contains(databaseName)) {
		return new CatalogDatabaseImpl(Collections.emptyMap(), null);
	} else {
		throw new DatabaseNotExistException(getName(), databaseName);
	}
}
 
Example #16
Source File: KuduCatalog.java    From bahir-flink with Apache License 2.0 5 votes vote down vote up
@Override
public CatalogDatabase getDatabase(String databaseName) throws DatabaseNotExistException, CatalogException {
    if (databaseName.equals(getDefaultDatabase())) {
        return new CatalogDatabaseImpl(new HashMap<>(), null);
    } else {
        throw new DatabaseNotExistException(getName(), databaseName);
    }
}
 
Example #17
Source File: DependencyTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Catalog createCatalog(String name, Map<String, String> properties) {
	// Test HiveCatalogFactory.createCatalog
	// But not use it for testing purpose
	assertTrue(super.createCatalog(name, properties) != null);

	// Developers may already have their own production/testing hive-site.xml set in their environment,
	// and Flink tests should avoid using those hive-site.xml.
	// Thus, explicitly create a testing HiveConf for unit tests here
	Catalog hiveCatalog = HiveTestUtils.createHiveCatalog(name, properties.get(HiveCatalogValidator.CATALOG_HIVE_VERSION));

	// Creates an additional database to test tableEnv.useDatabase() will switch current database of the catalog
	hiveCatalog.open();
	try {
		hiveCatalog.createDatabase(
			ADDITIONAL_TEST_DATABASE,
			new CatalogDatabaseImpl(new HashMap<>(), null),
			false);
		hiveCatalog.createTable(
			new ObjectPath(ADDITIONAL_TEST_DATABASE, TEST_TABLE),
			new CatalogTableImpl(
				TableSchema.builder()
					.field("testcol", DataTypes.INT())
					.build(),
				new HashMap<String, String>() {{
					put(CatalogConfig.IS_GENERIC, String.valueOf(true));
				}},
				""
			),
			false
		);
	} catch (DatabaseAlreadyExistException | TableAlreadyExistException | DatabaseNotExistException e) {
		throw new CatalogException(e);
	}

	return hiveCatalog;
}
 
Example #18
Source File: HiveCatalogDataTypeTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private static CatalogDatabase createDb() {
	return new CatalogDatabaseImpl(
		new HashMap<String, String>() {{
			put("k1", "v1");
		}},
		""
	);
}
 
Example #19
Source File: PulsarCatalog.java    From pulsar-flink with Apache License 2.0 4 votes vote down vote up
@Override
public CatalogDatabase getDatabase(String databaseName) throws DatabaseNotExistException, CatalogException {
    Map<String, String> properties = new HashMap<>();
    return new CatalogDatabaseImpl(properties, databaseName);
}
 
Example #20
Source File: HiveCatalog.java    From flink with Apache License 2.0 3 votes vote down vote up
@Override
public CatalogDatabase getDatabase(String databaseName) throws DatabaseNotExistException, CatalogException {
	Database hiveDatabase = getHiveDatabase(databaseName);

	Map<String, String> properties = hiveDatabase.getParameters();

	properties.put(HiveCatalogConfig.DATABASE_LOCATION_URI, hiveDatabase.getLocationUri());

	return new CatalogDatabaseImpl(properties, hiveDatabase.getDescription());
}