Java Code Examples for org.apache.flink.table.factories.TableFactoryService

The following examples show how to use org.apache.flink.table.factories.TableFactoryService. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
private SerializationSchema<Row> getSerializationSchema(Map<String, String> properties) {
	final String formatType = properties.get(FORMAT_TYPE);
	// we could have added this check to the table factory context
	// but this approach allows to throw more helpful error messages
	// if the supported format has not been added
	if (formatType == null || !formatType.equals(SUPPORTED_FORMAT_TYPE)) {
		throw new ValidationException(
			"The Elasticsearch sink requires a '" + SUPPORTED_FORMAT_TYPE + "' format.");
	}

	@SuppressWarnings("unchecked")
	final SerializationSchemaFactory<Row> formatFactory = TableFactoryService.find(
		SerializationSchemaFactory.class,
		properties,
		this.getClass().getClassLoader());
	return formatFactory.createSerializationSchema(properties);
}
 
Example 2
Source Project: Flink-CEPplus   Source File: CsvRowFormatFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSchemaDerivation() {
	final Map<String, String> properties = new HashMap<>();
	properties.putAll(new Schema().schema(TableSchema.fromTypeInfo(SCHEMA)).toProperties());
	properties.putAll(new Csv().deriveSchema().toProperties());

	final CsvRowSerializationSchema expectedSer = new CsvRowSerializationSchema.Builder(SCHEMA).build();
	final CsvRowDeserializationSchema expectedDeser = new CsvRowDeserializationSchema.Builder(SCHEMA).build();

	final SerializationSchema<?> actualSer = TableFactoryService
		.find(SerializationSchemaFactory.class, properties)
		.createSerializationSchema(properties);

	assertEquals(expectedSer, actualSer);

	final DeserializationSchema<?> actualDeser = TableFactoryService
		.find(DeserializationSchemaFactory.class, properties)
		.createDeserializationSchema(properties);

	assertEquals(expectedDeser, actualDeser);
}
 
Example 3
Source Project: flink   Source File: JDBCTableSourceSinkFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testJDBCWithFilter() {
	Map<String, String> properties = getBasicProperties();
	properties.put("connector.driver", "org.apache.derby.jdbc.EmbeddedDriver");
	properties.put("connector.username", "user");
	properties.put("connector.password", "pass");

	final TableSource<?> actual = ((JDBCTableSource) TableFactoryService
		.find(StreamTableSourceFactory.class, properties)
		.createStreamTableSource(properties))
		.projectFields(new int[] {0, 2});

	Map<String, DataType> projectedFields = ((FieldsDataType) actual.getProducedDataType()).getFieldDataTypes();
	assertEquals(projectedFields.get("aaa"), DataTypes.INT());
	assertNull(projectedFields.get("bbb"));
	assertEquals(projectedFields.get("ccc"), DataTypes.DOUBLE());
}
 
Example 4
Source Project: flink   Source File: CsvRowFormatFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSchemaDerivation() {
	final Map<String, String> properties = new HashMap<>();
	properties.putAll(new Schema().schema(TableSchema.fromTypeInfo(SCHEMA)).toProperties());
	properties.putAll(new Csv().deriveSchema().toProperties());

	final CsvRowSerializationSchema expectedSer = new CsvRowSerializationSchema.Builder(SCHEMA).build();
	final CsvRowDeserializationSchema expectedDeser = new CsvRowDeserializationSchema.Builder(SCHEMA).build();

	final SerializationSchema<?> actualSer = TableFactoryService
		.find(SerializationSchemaFactory.class, properties)
		.createSerializationSchema(properties);

	assertEquals(expectedSer, actualSer);

	final DeserializationSchema<?> actualDeser = TableFactoryService
		.find(DeserializationSchemaFactory.class, properties)
		.createDeserializationSchema(properties);

	assertEquals(expectedDeser, actualDeser);
}
 
Example 5
/**
 * For sink, stream name information is mandatory.
 */
@Test (expected = IllegalStateException.class)
public void testMissingStreamNameForWriter() {
    Pravega pravega = new Pravega();

    pravega.tableSinkWriterBuilder()
            .withRoutingKeyField("name");

    final TestTableDescriptor testDesc = new TestTableDescriptor(pravega)
            .withFormat(JSON)
            .withSchema(SCHEMA)
            .inAppendMode();

    final Map<String, String> propertiesMap = testDesc.toProperties();
    TableFactoryService.find(StreamTableSinkFactory.class, propertiesMap)
            .createStreamTableSink(propertiesMap);
    fail("stream name validation failed");
}
 
Example 6
@Test (expected = ValidationException.class)
public void testInvalidWriterMode() {
    Pravega pravega = new Pravega();
    Stream stream = Stream.of(SCOPE, STREAM);

    pravega.tableSinkWriterBuilder()
            .withRoutingKeyField("name")
            .forStream(stream)
            .withPravegaConfig(PRAVEGA_CONFIG);

    final TestTableDescriptor testDesc = new TestTableDescriptor(pravega)
            .withFormat(JSON)
            .withSchema(SCHEMA)
            .inAppendMode();

    final Map<String, String> propertiesMap = testDesc.toProperties();
    Map<String, String> test = new HashMap<>(propertiesMap);
    test.put(CONNECTOR_WRITER_MODE, "foo");
    TableFactoryService.find(StreamTableSinkFactory.class, test)
            .createStreamTableSink(test);
    fail("writer mode validation failed");
}
 
Example 7
Source Project: flink   Source File: CsvRowFormatFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSchemaDerivation() {
	final Map<String, String> properties = new HashMap<>();
	properties.putAll(new Schema().schema(TableSchema.fromTypeInfo(SCHEMA)).toProperties());
	properties.putAll(new Csv().toProperties());

	final CsvRowSerializationSchema expectedSer = new CsvRowSerializationSchema.Builder(SCHEMA).build();
	final CsvRowDeserializationSchema expectedDeser = new CsvRowDeserializationSchema.Builder(SCHEMA).build();

	final SerializationSchema<?> actualSer = TableFactoryService
		.find(SerializationSchemaFactory.class, properties)
		.createSerializationSchema(properties);

	assertEquals(expectedSer, actualSer);

	final DeserializationSchema<?> actualDeser = TableFactoryService
		.find(DeserializationSchemaFactory.class, properties)
		.createDeserializationSchema(properties);

	assertEquals(expectedDeser, actualDeser);
}
 
Example 8
@Test
public void testValidWriterModeExactlyOnce() {
    Pravega pravega = new Pravega();
    Stream stream = Stream.of(SCOPE, STREAM);

    pravega.tableSinkWriterBuilder()
            .withRoutingKeyField("name").withWriterMode(PravegaWriterMode.EXACTLY_ONCE)
            .forStream(stream)
            .withPravegaConfig(PRAVEGA_CONFIG);

    final TestTableDescriptor testDesc = new TestTableDescriptor(pravega)
            .withFormat(JSON)
            .withSchema(SCHEMA)
            .inAppendMode();

    final Map<String, String> propertiesMap = testDesc.toProperties();
    final TableSink<?> sink = TableFactoryService.find(StreamTableSinkFactory.class, propertiesMap)
            .createStreamTableSink(propertiesMap);
    assertNotNull(sink);
}
 
Example 9
@Test (expected = ValidationException.class)
public void testMissingFormatDefinition() {
    Pravega pravega = new Pravega();
    Stream stream = Stream.of(SCOPE, STREAM);

    pravega.tableSinkWriterBuilder()
            .withRoutingKeyField("name")
            .forStream(stream)
            .withPravegaConfig(PRAVEGA_CONFIG);

    final TestTableDescriptor testDesc = new TestTableDescriptor(pravega)
            .withSchema(SCHEMA)
            .inAppendMode();

    final Map<String, String> propertiesMap = testDesc.toProperties();
    TableFactoryService.find(StreamTableSinkFactory.class, propertiesMap)
            .createStreamTableSink(propertiesMap);
    fail("table factory validation failed");
}
 
Example 10
@Test (expected = ValidationException.class)
public void testMissingSchemaDefinition() {
    Pravega pravega = new Pravega();
    Stream stream = Stream.of(SCOPE, STREAM);

    pravega.tableSinkWriterBuilder()
            .withRoutingKeyField("name")
            .forStream(stream)
            .withPravegaConfig(PRAVEGA_CONFIG);

    final TestTableDescriptor testDesc = new TestTableDescriptor(pravega)
            .withFormat(JSON)
            .inAppendMode();

    final Map<String, String> propertiesMap = testDesc.toProperties();
    TableFactoryService.find(StreamTableSinkFactory.class, propertiesMap)
            .createStreamTableSink(propertiesMap);
    fail("missing schema validation failed");
}
 
Example 11
Source Project: flink   Source File: JdbcTableSourceSinkFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testJdbcCommonProperties() {
	Map<String, String> properties = getBasicProperties();
	properties.put("connector.driver", "org.apache.derby.jdbc.EmbeddedDriver");
	properties.put("connector.username", "user");
	properties.put("connector.password", "pass");

	final StreamTableSource<?> actual = TableFactoryService.find(StreamTableSourceFactory.class, properties)
		.createStreamTableSource(properties);

	final JdbcOptions options = JdbcOptions.builder()
		.setDBUrl("jdbc:derby:memory:mydb")
		.setTableName("mytable")
		.setDriverName("org.apache.derby.jdbc.EmbeddedDriver")
		.setUsername("user")
		.setPassword("pass")
		.build();
	final JdbcTableSource expected = JdbcTableSource.builder()
		.setOptions(options)
		.setSchema(schema)
		.build();

	TableSourceValidation.validateTableSource(expected, schema);
	TableSourceValidation.validateTableSource(actual, schema);
	assertEquals(expected, actual);
}
 
Example 12
Source Project: flink   Source File: JdbcTableSourceSinkFactoryTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testJdbcFieldsProjection() {
	Map<String, String> properties = getBasicProperties();
	properties.put("connector.driver", "org.apache.derby.jdbc.EmbeddedDriver");
	properties.put("connector.username", "user");
	properties.put("connector.password", "pass");

	final TableSource<?> actual = ((JdbcTableSource) TableFactoryService
		.find(StreamTableSourceFactory.class, properties)
		.createStreamTableSource(properties))
		.projectFields(new int[] {0, 2});

	List<DataType> projectedFields = actual.getProducedDataType().getChildren();
	assertEquals(Arrays.asList(DataTypes.INT(), DataTypes.DOUBLE()), projectedFields);

	// test jdbc table source description
	List<String> fieldNames = ((RowType) actual.getProducedDataType().getLogicalType()).getFieldNames();
	String expectedSourceDescription = actual.getClass().getSimpleName()
		+ "(" + String.join(", ", fieldNames.stream().toArray(String[]::new)) + ")";
	assertEquals(expectedSourceDescription, actual.explainSource());
}
 
Example 13
Source Project: flink   Source File: KafkaTableSourceSinkFactoryTestBase.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testTableSourceCommitOnCheckpointsDisabled() {
	Map<String, String> propertiesMap = new HashMap<>();
	createKafkaSourceProperties().forEach((k, v) -> {
		if (!k.equals("connector.properties.group.id")) {
			propertiesMap.put(k, v);
		}
	});
	final TableSource<?> tableSource = TableFactoryService.find(StreamTableSourceFactory.class, propertiesMap)
		.createStreamTableSource(propertiesMap);
	final StreamExecutionEnvironmentMock mock = new StreamExecutionEnvironmentMock();
	// Test commitOnCheckpoints flag should be false when do not set consumer group.
	((KafkaTableSourceBase) tableSource).getDataStream(mock);
	assertTrue(mock.sourceFunction instanceof FlinkKafkaConsumerBase);
	assertFalse(((FlinkKafkaConsumerBase) mock.sourceFunction).getEnableCommitOnCheckpoints());
}
 
Example 14
private SerializationSchema<Row> getSerializationSchema(Map<String, String> properties) {
	final String formatType = properties.get(FORMAT_TYPE);
	// we could have added this check to the table factory context
	// but this approach allows to throw more helpful error messages
	// if the supported format has not been added
	if (formatType == null || !formatType.equals(SUPPORTED_FORMAT_TYPE)) {
		throw new ValidationException(
			"The Elasticsearch sink requires a '" + SUPPORTED_FORMAT_TYPE + "' format.");
	}

	@SuppressWarnings("unchecked")
	final SerializationSchemaFactory<Row> formatFactory = TableFactoryService.find(
		SerializationSchemaFactory.class,
		properties,
		this.getClass().getClassLoader());
	return formatFactory.createSerializationSchema(properties);
}
 
Example 15
@Test
public void testRequiredProperties() {
	Map<String, String> properties = getBasicProperties();

	final TableSink<?> actual = TableFactoryService.find(TableSinkFactory.class, properties)
			.createTableSink(properties);

	assertTrue(actual instanceof DatahubTableSink);
}
 
Example 16
@Test
public void testSupportedProperties() {
	Map<String, String> properties = getBasicProperties();

	properties.put(CONNECTOR_BATCH_SIZE, "1");
	properties.put(CONNECTOR_BUFFER_SIZE, "1");
	properties.put(CONNECTOR_RETRY_TIMEOUT_IN_MILLS, "3");
	properties.put(CONNECTOR_MAX_RETRY_TIMES, "10");
	properties.put(CONNECTOR_BATCH_WRITE_TIMEOUT_IN_MILLS, "5");

	final TableSink<?> actual = TableFactoryService.find(TableSinkFactory.class, properties)
			.createTableSink(properties);

	assertTrue(actual instanceof DatahubTableSink);
}
 
Example 17
private DeserializationSchema<Row> getDeserializationSchema(Map<String, String> properties) {
	@SuppressWarnings("unchecked")
	final DeserializationSchemaFactory<Row> formatFactory = TableFactoryService.find(
		DeserializationSchemaFactory.class,
		properties,
		this.getClass().getClassLoader());
	return formatFactory.createDeserializationSchema(properties);
}
 
Example 18
private SerializationSchema<Row> getSerializationSchema(Map<String, String> properties) {
	@SuppressWarnings("unchecked")
	final SerializationSchemaFactory<Row> formatFactory = TableFactoryService.find(
		SerializationSchemaFactory.class,
		properties,
		this.getClass().getClassLoader());
	return formatFactory.createSerializationSchema(properties);
}
 
Example 19
Source Project: Flink-CEPplus   Source File: AvroRowFormatFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testRecordClassSerializationSchema(Map<String, String> properties) {
	final DeserializationSchema<?> actual2 = TableFactoryService
		.find(DeserializationSchemaFactory.class, properties)
		.createDeserializationSchema(properties);
	final AvroRowDeserializationSchema expected2 = new AvroRowDeserializationSchema(AVRO_SPECIFIC_RECORD);
	assertEquals(expected2, actual2);
}
 
Example 20
Source Project: Flink-CEPplus   Source File: AvroRowFormatFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testRecordClassDeserializationSchema(Map<String, String> properties) {
	final SerializationSchema<?> actual1 = TableFactoryService
		.find(SerializationSchemaFactory.class, properties)
		.createSerializationSchema(properties);
	final SerializationSchema<?> expected1 = new AvroRowSerializationSchema(AVRO_SPECIFIC_RECORD);
	assertEquals(expected1, actual1);
}
 
Example 21
Source Project: Flink-CEPplus   Source File: AvroRowFormatFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testAvroSchemaDeserializationSchema(Map<String, String> properties) {
	final DeserializationSchema<?> actual2 = TableFactoryService
		.find(DeserializationSchemaFactory.class, properties)
		.createDeserializationSchema(properties);
	final AvroRowDeserializationSchema expected2 = new AvroRowDeserializationSchema(AVRO_SCHEMA);
	assertEquals(expected2, actual2);
}
 
Example 22
Source Project: Flink-CEPplus   Source File: AvroRowFormatFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testAvroSchemaSerializationSchema(Map<String, String> properties) {
	final SerializationSchema<?> actual1 = TableFactoryService
		.find(SerializationSchemaFactory.class, properties)
		.createSerializationSchema(properties);
	final SerializationSchema<?> expected1 = new AvroRowSerializationSchema(AVRO_SCHEMA);
	assertEquals(expected1, actual1);
}
 
Example 23
Source Project: flink   Source File: JsonRowFormatFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testSchemaSerializationSchema(Map<String, String> properties) {
	final SerializationSchema<?> actual1 = TableFactoryService
		.find(SerializationSchemaFactory.class, properties)
		.createSerializationSchema(properties);
	final SerializationSchema<?> expected1 = new JsonRowSerializationSchema.Builder(SCHEMA).build();
	assertEquals(expected1, actual1);
}
 
Example 24
Source Project: Flink-CEPplus   Source File: JsonRowFormatFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testSchemaSerializationSchema(Map<String, String> properties) {
	final SerializationSchema<?> actual1 = TableFactoryService
		.find(SerializationSchemaFactory.class, properties)
		.createSerializationSchema(properties);
	final SerializationSchema<?> expected1 = new JsonRowSerializationSchema(SCHEMA);
	assertEquals(expected1, actual1);
}
 
Example 25
Source Project: Flink-CEPplus   Source File: JsonRowFormatFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testJsonSchemaDeserializationSchema(Map<String, String> properties) {
	final DeserializationSchema<?> actual2 = TableFactoryService
		.find(DeserializationSchemaFactory.class, properties)
		.createDeserializationSchema(properties);
	final JsonRowDeserializationSchema expected2 = new JsonRowDeserializationSchema(JSON_SCHEMA);
	expected2.setFailOnMissingField(true);
	assertEquals(expected2, actual2);
}
 
Example 26
Source Project: flink   Source File: JsonRowFormatFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testJsonSchemaSerializationSchema(Map<String, String> properties) {
	final SerializationSchema<?> actual1 = TableFactoryService
		.find(SerializationSchemaFactory.class, properties)
		.createSerializationSchema(properties);
	final SerializationSchema<?> expected1 = JsonRowSerializationSchema.builder()
		.withTypeInfo(JsonRowSchemaConverter.convert(JSON_SCHEMA))
		.build();
	assertEquals(expected1, actual1);
}
 
Example 27
Source Project: flink   Source File: HBaseConnectorITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testHBaseLookupTableSource() throws Exception {
	if (OLD_PLANNER.equals(planner)) {
		// lookup table source is only supported in blink planner, skip for old planner
		return;
	}
	StreamExecutionEnvironment streamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(streamEnv, streamSettings);
	StreamITCase.clear();

	// prepare a source table
	String srcTableName = "src";
	DataStream<Row> ds = streamEnv.fromCollection(testData2).returns(testTypeInfo2);
	Table in = streamTableEnv.fromDataStream(ds, "a, b, c, proc.proctime");
	streamTableEnv.registerTable(srcTableName, in);

	Map<String, String> tableProperties = hbaseTableProperties();
	TableSource source = TableFactoryService
		.find(HBaseTableFactory.class, tableProperties)
		.createTableSource(tableProperties);
	streamTableEnv.registerTableSource("hbaseLookup", source);
	// perform a temporal table join query
	String query = "SELECT a,family1.col1, family3.col3 FROM src " +
		"JOIN hbaseLookup FOR SYSTEM_TIME AS OF src.proc as h ON src.a = h.rk";
	Table result = streamTableEnv.sqlQuery(query);

	DataStream<Row> resultSet = streamTableEnv.toAppendStream(result, Row.class);
	resultSet.addSink(new StreamITCase.StringSink<>());

	streamEnv.execute();

	List<String> expected = new ArrayList<>();
	expected.add("1,10,Welt-1");
	expected.add("2,20,Welt-2");
	expected.add("3,30,Welt-3");
	expected.add("3,30,Welt-3");

	StreamITCase.compareWithList(expected);
}
 
Example 28
Source Project: flink   Source File: HBaseTableFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testTableSourceFactory() {
	String[] columnNames = {FAMILY1, FAMILY2, ROWKEY, FAMILY3};

	TypeInformation<Row> f1 = Types.ROW_NAMED(new String[]{COL1}, Types.INT);
	TypeInformation<Row> f2 = Types.ROW_NAMED(new String[]{COL1, COL2}, Types.INT, Types.LONG);
	TypeInformation<Row> f3 = Types.ROW_NAMED(new String[]{COL1, COL2, COL3}, Types.DOUBLE, Types.BOOLEAN, Types.STRING);
	TypeInformation[] columnTypes = new TypeInformation[]{f1, f2, Types.LONG, f3};

	DescriptorProperties descriptorProperties = createDescriptor(columnNames, columnTypes);
	TableSource source = TableFactoryService.find(HBaseTableFactory.class,
		descriptorProperties.asMap()).createTableSource(descriptorProperties.asMap());
	Assert.assertTrue(source instanceof HBaseTableSource);
	TableFunction<Row> tableFunction = ((HBaseTableSource) source).getLookupFunction(new String[]{ROWKEY});
	Assert.assertTrue(tableFunction instanceof HBaseLookupFunction);
	Assert.assertEquals("testHBastTable", ((HBaseLookupFunction) tableFunction).getHTableName());

	HBaseTableSchema hbaseSchema = ((HBaseTableSource) source).getHBaseTableSchema();
	Assert.assertEquals(2, hbaseSchema.getRowKeyIndex());
	Assert.assertEquals(Optional.of(Types.LONG), hbaseSchema.getRowKeyTypeInfo());

	Assert.assertArrayEquals(new String[]{"f1", "f2", "f3"}, hbaseSchema.getFamilyNames());
	Assert.assertArrayEquals(new String[]{"c1"}, hbaseSchema.getQualifierNames("f1"));
	Assert.assertArrayEquals(new String[]{"c1", "c2"}, hbaseSchema.getQualifierNames("f2"));
	Assert.assertArrayEquals(new String[]{"c1", "c2", "c3"}, hbaseSchema.getQualifierNames("f3"));

	Assert.assertArrayEquals(new TypeInformation[]{Types.INT}, hbaseSchema.getQualifierTypes("f1"));
	Assert.assertArrayEquals(new TypeInformation[]{Types.INT, Types.LONG}, hbaseSchema.getQualifierTypes("f2"));
	Assert.assertArrayEquals(new TypeInformation[]{Types.DOUBLE, Types.BOOLEAN, Types.STRING}, hbaseSchema.getQualifierTypes("f3"));
}
 
Example 29
Source Project: flink   Source File: HiveCatalogFactoryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testLoadHDFSConfigFromEnv() throws IOException {
	final String k1 = "what is connector?";
	final String v1 = "Hive";
	final String catalogName = "HiveCatalog";

	// set HADOOP_CONF_DIR env
	final File hadoopConfDir = tempFolder.newFolder();
	final File hdfsSiteFile = new File(hadoopConfDir, "hdfs-site.xml");
	writeProperty(hdfsSiteFile, k1, v1);
	final Map<String, String> originalEnv = System.getenv();
	final Map<String, String> newEnv = new HashMap<>(originalEnv);
	newEnv.put("HADOOP_CONF_DIR", hadoopConfDir.getAbsolutePath());
	CommonTestUtils.setEnv(newEnv);

	// create HiveCatalog use the Hadoop Configuration
	final CatalogDescriptor catalogDescriptor = new HiveCatalogDescriptor();
	final Map<String, String> properties = catalogDescriptor.toProperties();
	final HiveConf hiveConf;
	try {
		final HiveCatalog hiveCatalog = (HiveCatalog) TableFactoryService.find(CatalogFactory.class, properties)
			.createCatalog(catalogName, properties);
		hiveConf = hiveCatalog.getHiveConf();
	} finally {
		// set the Env back
		CommonTestUtils.setEnv(originalEnv);
	}
	//validate the result
	assertEquals(v1, hiveConf.get(k1, null));
}
 
Example 30
Source Project: flink   Source File: KafkaTableSourceSinkFactoryBase.java    License: Apache License 2.0 5 votes vote down vote up
private DeserializationSchema<Row> getDeserializationSchema(Map<String, String> properties) {
	@SuppressWarnings("unchecked")
	final DeserializationSchemaFactory<Row> formatFactory = TableFactoryService.find(
		DeserializationSchemaFactory.class,
		properties,
		this.getClass().getClassLoader());
	return formatFactory.createDeserializationSchema(properties);
}