Java Code Examples for org.apache.flink.table.api.java.StreamTableEnvironment#registerTable()

The following examples show how to use org.apache.flink.table.api.java.StreamTableEnvironment#registerTable() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JavaSqlITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testSelect() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
	StreamITCase.clear();

	DataStream<Tuple3<Integer, Long, String>> ds = JavaStreamTestData.getSmall3TupleDataSet(env);
	Table in = tableEnv.fromDataStream(ds, "a,b,c");
	tableEnv.registerTable("MyTable", in);

	String sqlQuery = "SELECT * FROM MyTable";
	Table result = tableEnv.sqlQuery(sqlQuery);

	DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class);
	resultSet.addSink(new StreamITCase.StringSink<Row>());
	env.execute();

	List<String> expected = new ArrayList<>();
	expected.add("1,1,Hi");
	expected.add("2,2,Hello");
	expected.add("3,2,Hello world");

	StreamITCase.compareWithList(expected);
}
 
Example 2
Source File: Sort.java    From flink-training-exercises with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {

		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

		env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
		env.setParallelism(1);

		DataStream<Event> eventStream = env.addSource(new OutOfOrderEventSource())
				.assignTimestampsAndWatermarks(new TimestampsAndWatermarks());

		Table events = tableEnv.fromDataStream(eventStream, "eventTime.rowtime");
		tableEnv.registerTable("events", events);
		Table sorted = tableEnv.sqlQuery("SELECT eventTime FROM events ORDER BY eventTime ASC");
		DataStream<Row> sortedEventStream = tableEnv.toAppendStream(sorted, Row.class);

		sortedEventStream.print();

		env.execute();
	}
 
Example 3
Source File: KafkaSourceMain.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL;
    Properties properties = KafkaConfigUtil.buildKafkaProps(parameterTool);
    DataStream<String> dataStream = blinkStreamEnv.addSource(new FlinkKafkaConsumer011<>(parameterTool.get("kafka.topic"), new SimpleStringSchema(), properties));
    Table table = blinkStreamTableEnv.fromDataStream(dataStream, "word");
    blinkStreamTableEnv.registerTable("kafkaDataStream", table);

    RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
    blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

    Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word");

    wordCount.insertInto("sinkTable");

    blinkStreamTableEnv.execute("Blink Kafka Table Source");
}
 
Example 4
Source File: JavaSqlITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testSelect() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
	StreamITCase.clear();

	DataStream<Tuple3<Integer, Long, String>> ds = JavaStreamTestData.getSmall3TupleDataSet(env);
	Table in = tableEnv.fromDataStream(ds, "a,b,c");
	tableEnv.registerTable("MyTable", in);

	String sqlQuery = "SELECT * FROM MyTable";
	Table result = tableEnv.sqlQuery(sqlQuery);

	DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class);
	resultSet.addSink(new StreamITCase.StringSink<Row>());
	env.execute();

	List<String> expected = new ArrayList<>();
	expected.add("1,1,Hi");
	expected.add("2,2,Hello");
	expected.add("3,2,Hello world");

	StreamITCase.compareWithList(expected);
}
 
Example 5
Source File: KafkaSourceMain.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL;
    Properties properties = KafkaConfigUtil.buildKafkaProps(parameterTool);
    DataStream<String> dataStream = blinkStreamEnv.addSource(new FlinkKafkaConsumer011<>(parameterTool.get("kafka.topic"), new SimpleStringSchema(), properties));
    Table table = blinkStreamTableEnv.fromDataStream(dataStream, "word");
    blinkStreamTableEnv.registerTable("kafkaDataStream", table);

    RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
    blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

    Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word");

    wordCount.insertInto("sinkTable");

    blinkStreamTableEnv.execute("Blink Kafka Table Source");
}
 
Example 6
Source File: HBaseConnectorITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testHBaseLookupFunction() throws Exception {
	StreamExecutionEnvironment streamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(streamEnv, streamSettings);
	StreamITCase.clear();

	// prepare a source table
	DataStream<Row> ds = streamEnv.fromCollection(testData2).returns(testTypeInfo2);
	Table in = streamTableEnv.fromDataStream(ds, "a, b, c");
	streamTableEnv.registerTable("src", in);

	Map<String, String> tableProperties = hbaseTableProperties();
	TableSource source = TableFactoryService
		.find(HBaseTableFactory.class, tableProperties)
		.createTableSource(tableProperties);

	streamTableEnv.registerFunction("hbaseLookup", ((HBaseTableSource) source).getLookupFunction(new String[]{ROWKEY}));

	// perform a temporal table join query
	String sqlQuery = "SELECT a,family1.col1, family3.col3 FROM src, LATERAL TABLE(hbaseLookup(a))";
	Table result = streamTableEnv.sqlQuery(sqlQuery);

	DataStream<Row> resultSet = streamTableEnv.toAppendStream(result, Row.class);
	resultSet.addSink(new StreamITCase.StringSink<>());

	streamEnv.execute();

	List<String> expected = new ArrayList<>();
	expected.add("1,10,Welt-1");
	expected.add("2,20,Welt-2");
	expected.add("3,30,Welt-3");
	expected.add("3,30,Welt-3");

	StreamITCase.compareWithList(expected);
}
 
Example 7
Source File: HBaseConnectorITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testHBaseLookupTableSource() throws Exception {
	if (OLD_PLANNER.equals(planner)) {
		// lookup table source is only supported in blink planner, skip for old planner
		return;
	}
	StreamExecutionEnvironment streamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(streamEnv, streamSettings);
	StreamITCase.clear();

	// prepare a source table
	String srcTableName = "src";
	DataStream<Row> ds = streamEnv.fromCollection(testData2).returns(testTypeInfo2);
	Table in = streamTableEnv.fromDataStream(ds, "a, b, c, proc.proctime");
	streamTableEnv.registerTable(srcTableName, in);

	Map<String, String> tableProperties = hbaseTableProperties();
	TableSource source = TableFactoryService
		.find(HBaseTableFactory.class, tableProperties)
		.createTableSource(tableProperties);
	streamTableEnv.registerTableSource("hbaseLookup", source);
	// perform a temporal table join query
	String query = "SELECT a,family1.col1, family3.col3 FROM src " +
		"JOIN hbaseLookup FOR SYSTEM_TIME AS OF src.proc as h ON src.a = h.rk";
	Table result = streamTableEnv.sqlQuery(query);

	DataStream<Row> resultSet = streamTableEnv.toAppendStream(result, Row.class);
	resultSet.addSink(new StreamITCase.StringSink<>());

	streamEnv.execute();

	List<String> expected = new ArrayList<>();
	expected.add("1,10,Welt-1");
	expected.add("2,20,Welt-2");
	expected.add("3,30,Welt-3");
	expected.add("3,30,Welt-3");

	StreamITCase.compareWithList(expected);
}
 
Example 8
Source File: JDBCUpsertTableSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testUpsert() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.getConfig().enableObjectReuse();
	env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
	StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

	Table t = tEnv.fromDataStream(get3TupleDataStream(env).assignTimestampsAndWatermarks(
			new AscendingTimestampExtractor<Tuple3<Integer, Long, String>>() {
				@Override
				public long extractAscendingTimestamp(Tuple3<Integer, Long, String> element) {
					return element.f0;
				}}), "id, num, text");

	tEnv.registerTable("T", t);

	String[] fields = {"cnt", "lencnt", "cTag"};
	tEnv.registerTableSink("upsertSink", JDBCUpsertTableSink.builder()
			.setOptions(JDBCOptions.builder()
					.setDBUrl(DB_URL)
					.setTableName(OUTPUT_TABLE1)
					.build())
			.setTableSchema(TableSchema.builder().fields(
					fields, new DataType[] {BIGINT(), BIGINT(), INT()}).build())
			.build());

	tEnv.sqlUpdate("INSERT INTO upsertSink SELECT cnt, COUNT(len) AS lencnt, cTag FROM" +
			" (SELECT len, COUNT(id) as cnt, cTag FROM" +
			" (SELECT id, CHAR_LENGTH(text) AS len, (CASE WHEN id > 0 THEN 1 ELSE 0 END) cTag FROM T)" +
			" GROUP BY len, cTag)" +
			" GROUP BY cnt, cTag");
	env.execute();
	check(new Row[] {
			Row.of(1, 5, 1),
			Row.of(7, 1, 1),
			Row.of(9, 1, 1)
	}, DB_URL, OUTPUT_TABLE1, fields);
}
 
Example 9
Source File: JDBCUpsertTableSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testAppend() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.getConfig().enableObjectReuse();
	env.getConfig().setParallelism(1);
	StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

	Table t = tEnv.fromDataStream(get3TupleDataStream(env), "id, num, text");

	tEnv.registerTable("T", t);

	String[] fields = {"id", "num"};
	tEnv.registerTableSink("upsertSink", JDBCUpsertTableSink.builder()
			.setOptions(JDBCOptions.builder()
					.setDBUrl(DB_URL)
					.setTableName(OUTPUT_TABLE2)
					.build())
			.setTableSchema(TableSchema.builder().fields(
					fields, new DataType[] {INT(), BIGINT()}).build())
			.build());

	tEnv.sqlUpdate("INSERT INTO upsertSink SELECT id, num FROM T WHERE id IN (2, 10, 20)");
	env.execute();
	check(new Row[] {
			Row.of(2, 2),
			Row.of(10, 4),
			Row.of(20, 6)
	}, DB_URL, OUTPUT_TABLE2, fields);
}
 
Example 10
Source File: JavaSqlITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testRowRegisterRowWithNames() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
	StreamITCase.clear();

	List<Row> data = new ArrayList<>();
	data.add(Row.of(1, 1L, "Hi"));
	data.add(Row.of(2, 2L, "Hello"));
	data.add(Row.of(3, 2L, "Hello world"));

	TypeInformation<?>[] types = {
			BasicTypeInfo.INT_TYPE_INFO,
			BasicTypeInfo.LONG_TYPE_INFO,
			BasicTypeInfo.STRING_TYPE_INFO};
	String[] names = {"a", "b", "c"};

	RowTypeInfo typeInfo = new RowTypeInfo(types, names);

	DataStream<Row> ds = env.fromCollection(data).returns(typeInfo);

	Table in = tableEnv.fromDataStream(ds, "a,b,c");
	tableEnv.registerTable("MyTableRow", in);

	String sqlQuery = "SELECT a,c FROM MyTableRow";
	Table result = tableEnv.sqlQuery(sqlQuery);

	DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class);
	resultSet.addSink(new StreamITCase.StringSink<Row>());
	env.execute();

	List<String> expected = new ArrayList<>();
	expected.add("1,Hi");
	expected.add("2,Hello");
	expected.add("3,Hello world");

	StreamITCase.compareWithList(expected);
}
 
Example 11
Source File: JavaSqlITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testUnion() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
	StreamITCase.clear();

	DataStream<Tuple3<Integer, Long, String>> ds1 = JavaStreamTestData.getSmall3TupleDataSet(env);
	Table t1 = tableEnv.fromDataStream(ds1, "a,b,c");
	tableEnv.registerTable("T1", t1);

	DataStream<Tuple5<Integer, Long, Integer, String, Long>> ds2 = JavaStreamTestData.get5TupleDataStream(env);
	tableEnv.registerDataStream("T2", ds2, "a, b, d, c, e");

	String sqlQuery = "SELECT * FROM T1 " +
						"UNION ALL " +
						"(SELECT a, b, c FROM T2 WHERE a	< 3)";
	Table result = tableEnv.sqlQuery(sqlQuery);

	DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class);
	resultSet.addSink(new StreamITCase.StringSink<Row>());
	env.execute();

	List<String> expected = new ArrayList<>();
	expected.add("1,1,Hi");
	expected.add("2,2,Hello");
	expected.add("3,2,Hello world");
	expected.add("1,1,Hallo");
	expected.add("2,2,Hallo Welt");
	expected.add("2,3,Hallo Welt wie");

	StreamITCase.compareWithList(expected);
}
 
Example 12
Source File: JavaSqlITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testUnion() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
	StreamITCase.clear();

	DataStream<Tuple3<Integer, Long, String>> ds1 = JavaStreamTestData.getSmall3TupleDataSet(env);
	Table t1 = tableEnv.fromDataStream(ds1, "a,b,c");
	tableEnv.registerTable("T1", t1);

	DataStream<Tuple5<Integer, Long, Integer, String, Long>> ds2 = JavaStreamTestData.get5TupleDataStream(env);
	tableEnv.registerDataStream("T2", ds2, "a, b, d, c, e");

	String sqlQuery = "SELECT * FROM T1 " +
						"UNION ALL " +
						"(SELECT a, b, c FROM T2 WHERE a	< 3)";
	Table result = tableEnv.sqlQuery(sqlQuery);

	DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class);
	resultSet.addSink(new StreamITCase.StringSink<Row>());
	env.execute();

	List<String> expected = new ArrayList<>();
	expected.add("1,1,Hi");
	expected.add("2,2,Hello");
	expected.add("3,2,Hello world");
	expected.add("1,1,Hallo");
	expected.add("2,2,Hallo Welt");
	expected.add("2,3,Hallo Welt wie");

	StreamITCase.compareWithList(expected);
}
 
Example 13
Source File: JavaSqlITCase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Test
public void testRowRegisterRowWithNames() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
	StreamITCase.clear();

	List<Row> data = new ArrayList<>();
	data.add(Row.of(1, 1L, "Hi"));
	data.add(Row.of(2, 2L, "Hello"));
	data.add(Row.of(3, 2L, "Hello world"));

	TypeInformation<?>[] types = {
			BasicTypeInfo.INT_TYPE_INFO,
			BasicTypeInfo.LONG_TYPE_INFO,
			BasicTypeInfo.STRING_TYPE_INFO};
	String[] names = {"a", "b", "c"};

	RowTypeInfo typeInfo = new RowTypeInfo(types, names);

	DataStream<Row> ds = env.fromCollection(data).returns(typeInfo);

	Table in = tableEnv.fromDataStream(ds, "a,b,c");
	tableEnv.registerTable("MyTableRow", in);

	String sqlQuery = "SELECT a,c FROM MyTableRow";
	Table result = tableEnv.sqlQuery(sqlQuery);

	DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class);
	resultSet.addSink(new StreamITCase.StringSink<Row>());
	env.execute();

	List<String> expected = new ArrayList<>();
	expected.add("1,Hi");
	expected.add("2,Hello");
	expected.add("3,Hello world");

	StreamITCase.compareWithList(expected);
}
 
Example 14
Source File: Sort.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        env.setParallelism(1);

        SingleOutputStreamOperator<Event> source = env.addSource(new OutOfOrderEventSource())
                .assignTimestampsAndWatermarks(new TimestampsAndWatermarks());

        Table table = tableEnv.fromDataStream(source, "eventTime.rowtime");

        tableEnv.registerTable("zhisheng", table);
        Table sorted = tableEnv.sqlQuery("select eventTime from zhisheng order by eventTime");
        DataStream<Row> rowDataStream = tableEnv.toAppendStream(sorted, Row.class);

        rowDataStream.print();

        //把执行计划打印出来
//        System.out.println(env.getExecutionPlan());

        env.execute("sort-streaming-data");

    }
 
Example 15
Source File: JDBCLookupFunctionITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void test() throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
	StreamITCase.clear();

	Table t = tEnv.fromDataStream(env.fromCollection(Arrays.asList(
				new Tuple2<>(1, 1),
				new Tuple2<>(1, 1),
				new Tuple2<>(2, 3),
				new Tuple2<>(2, 5),
				new Tuple2<>(3, 5),
				new Tuple2<>(3, 8)
			)), "id1, id2");

	tEnv.registerTable("T", t);

	JDBCTableSource.Builder builder = JDBCTableSource.builder()
			.setOptions(JDBCOptions.builder()
					.setDBUrl(DB_URL)
					.setTableName(LOOKUP_TABLE)
					.build())
			.setSchema(TableSchema.builder().fields(
					new String[]{"id1", "id2", "comment1", "comment2"},
					new DataType[]{DataTypes.INT(), DataTypes.INT(), DataTypes.STRING(), DataTypes.STRING()})
					.build());
	if (useCache) {
		builder.setLookupOptions(JDBCLookupOptions.builder()
				.setCacheMaxSize(1000).setCacheExpireMs(1000 * 1000).build());
	}
	tEnv.registerFunction("jdbcLookup",
			builder.build().getLookupFunction(t.getSchema().getFieldNames()));

	String sqlQuery = "SELECT id1, id2, comment1, comment2 FROM T, " +
			"LATERAL TABLE(jdbcLookup(id1, id2)) AS S(l_id1, l_id2, comment1, comment2)";
	Table result = tEnv.sqlQuery(sqlQuery);

	DataStream<Row> resultSet = tEnv.toAppendStream(result, Row.class);
	resultSet.addSink(new StreamITCase.StringSink<>());
	env.execute();

	List<String> expected = new ArrayList<>();
	expected.add("1,1,11-c1-v1,11-c2-v1");
	expected.add("1,1,11-c1-v1,11-c2-v1");
	expected.add("1,1,11-c1-v2,11-c2-v2");
	expected.add("1,1,11-c1-v2,11-c2-v2");
	expected.add("2,3,null,23-c2");
	expected.add("2,5,25-c1,25-c2");
	expected.add("3,8,38-c1,38-c2");

	StreamITCase.compareWithList(expected);
}
 
Example 16
Source File: Sort.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        env.setParallelism(1);

        SingleOutputStreamOperator<Event> source = env.addSource(new OutOfOrderEventSource())
                .assignTimestampsAndWatermarks(new TimestampsAndWatermarks());

        Table table = tableEnv.fromDataStream(source, "eventTime.rowtime");

        tableEnv.registerTable("zhisheng", table);
        Table sorted = tableEnv.sqlQuery("select eventTime from zhisheng order by eventTime");
        DataStream<Row> rowDataStream = tableEnv.toAppendStream(sorted, Row.class);

        rowDataStream.print();

        //把执行计划打印出来
//        System.out.println(env.getExecutionPlan());

        env.execute("sort-streaming-data");

    }
 
Example 17
Source File: StreamSqlOperators.java    From Alink with Apache License 2.0 3 votes vote down vote up
/**
 * Register the output table of a StreamOperator to the {@link StreamTableEnvironment}
 * with a temporary table name.
 *
 * @param streamOp The StreamOperator who's output table is being registered.
 * @return The temporary table name.
 */
private static String registerTempTable(StreamOperator streamOp) {
    StreamTableEnvironment tEnv = getMLEnv(streamOp).getStreamTableEnvironment();
    String tmpTableName = TableUtil.getTempTableName();
    tEnv.registerTable(tmpTableName, streamOp.getOutputTable());
    return tmpTableName;
}