Java Code Examples for org.apache.flink.table.api.java.StreamTableEnvironment#toAppendStream()
The following examples show how to use
org.apache.flink.table.api.java.StreamTableEnvironment#toAppendStream() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JavaSqlITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testSelect() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); DataStream<Tuple3<Integer, Long, String>> ds = JavaStreamTestData.getSmall3TupleDataSet(env); Table in = tableEnv.fromDataStream(ds, "a,b,c"); tableEnv.registerTable("MyTable", in); String sqlQuery = "SELECT * FROM MyTable"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,1,Hi"); expected.add("2,2,Hello"); expected.add("3,2,Hello world"); StreamITCase.compareWithList(expected); }
Example 2
Source File: JavaSqlITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testFilter() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); DataStream<Tuple5<Integer, Long, Integer, String, Long>> ds = JavaStreamTestData.get5TupleDataStream(env); tableEnv.registerDataStream("MyTable", ds, "a, b, c, d, e"); String sqlQuery = "SELECT a, b, e FROM MyTable WHERE c < 4"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,1,1"); expected.add("2,2,2"); expected.add("2,3,1"); expected.add("3,4,2"); StreamITCase.compareWithList(expected); }
Example 3
Source File: JavaSqlITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testSelect() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); DataStream<Tuple3<Integer, Long, String>> ds = JavaStreamTestData.getSmall3TupleDataSet(env); Table in = tableEnv.fromDataStream(ds, "a,b,c"); tableEnv.registerTable("MyTable", in); String sqlQuery = "SELECT * FROM MyTable"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,1,Hi"); expected.add("2,2,Hello"); expected.add("3,2,Hello world"); StreamITCase.compareWithList(expected); }
Example 4
Source File: Sort.java From flink-training-exercises with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.setParallelism(1); DataStream<Event> eventStream = env.addSource(new OutOfOrderEventSource()) .assignTimestampsAndWatermarks(new TimestampsAndWatermarks()); Table events = tableEnv.fromDataStream(eventStream, "eventTime.rowtime"); tableEnv.registerTable("events", events); Table sorted = tableEnv.sqlQuery("SELECT eventTime FROM events ORDER BY eventTime ASC"); DataStream<Row> sortedEventStream = tableEnv.toAppendStream(sorted, Row.class); sortedEventStream.print(); env.execute(); }
Example 5
Source File: JavaSqlITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testFilter() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); DataStream<Tuple5<Integer, Long, Integer, String, Long>> ds = JavaStreamTestData.get5TupleDataStream(env); tableEnv.registerDataStream("MyTable", ds, "a, b, c, d, e"); String sqlQuery = "SELECT a, b, e FROM MyTable WHERE c < 4"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,1,1"); expected.add("2,2,2"); expected.add("2,3,1"); expected.add("3,4,2"); StreamITCase.compareWithList(expected); }
Example 6
Source File: JavaSqlITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testUnion() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); DataStream<Tuple3<Integer, Long, String>> ds1 = JavaStreamTestData.getSmall3TupleDataSet(env); Table t1 = tableEnv.fromDataStream(ds1, "a,b,c"); tableEnv.registerTable("T1", t1); DataStream<Tuple5<Integer, Long, Integer, String, Long>> ds2 = JavaStreamTestData.get5TupleDataStream(env); tableEnv.registerDataStream("T2", ds2, "a, b, d, c, e"); String sqlQuery = "SELECT * FROM T1 " + "UNION ALL " + "(SELECT a, b, c FROM T2 WHERE a < 3)"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,1,Hi"); expected.add("2,2,Hello"); expected.add("3,2,Hello world"); expected.add("1,1,Hallo"); expected.add("2,2,Hallo Welt"); expected.add("2,3,Hallo Welt wie"); StreamITCase.compareWithList(expected); }
Example 7
Source File: HBaseConnectorITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testHBaseLookupTableSource() throws Exception { if (OLD_PLANNER.equals(planner)) { // lookup table source is only supported in blink planner, skip for old planner return; } StreamExecutionEnvironment streamEnv = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment streamTableEnv = StreamTableEnvironment.create(streamEnv, streamSettings); StreamITCase.clear(); // prepare a source table String srcTableName = "src"; DataStream<Row> ds = streamEnv.fromCollection(testData2).returns(testTypeInfo2); Table in = streamTableEnv.fromDataStream(ds, "a, b, c, proc.proctime"); streamTableEnv.registerTable(srcTableName, in); Map<String, String> tableProperties = hbaseTableProperties(); TableSource source = TableFactoryService .find(HBaseTableFactory.class, tableProperties) .createTableSource(tableProperties); streamTableEnv.registerTableSource("hbaseLookup", source); // perform a temporal table join query String query = "SELECT a,family1.col1, family3.col3 FROM src " + "JOIN hbaseLookup FOR SYSTEM_TIME AS OF src.proc as h ON src.a = h.rk"; Table result = streamTableEnv.sqlQuery(query); DataStream<Row> resultSet = streamTableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<>()); streamEnv.execute(); List<String> expected = new ArrayList<>(); expected.add("1,10,Welt-1"); expected.add("2,20,Welt-2"); expected.add("3,30,Welt-3"); expected.add("3,30,Welt-3"); StreamITCase.compareWithList(expected); }
Example 8
Source File: JavaSqlITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testRowRegisterRowWithNames() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); List<Row> data = new ArrayList<>(); data.add(Row.of(1, 1L, "Hi")); data.add(Row.of(2, 2L, "Hello")); data.add(Row.of(3, 2L, "Hello world")); TypeInformation<?>[] types = { BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO}; String[] names = {"a", "b", "c"}; RowTypeInfo typeInfo = new RowTypeInfo(types, names); DataStream<Row> ds = env.fromCollection(data).returns(typeInfo); Table in = tableEnv.fromDataStream(ds, "a,b,c"); tableEnv.registerTable("MyTableRow", in); String sqlQuery = "SELECT a,c FROM MyTableRow"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,Hi"); expected.add("2,Hello"); expected.add("3,Hello world"); StreamITCase.compareWithList(expected); }
Example 9
Source File: JavaSqlITCase.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@Test public void testRowRegisterRowWithNames() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); List<Row> data = new ArrayList<>(); data.add(Row.of(1, 1L, "Hi")); data.add(Row.of(2, 2L, "Hello")); data.add(Row.of(3, 2L, "Hello world")); TypeInformation<?>[] types = { BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO}; String[] names = {"a", "b", "c"}; RowTypeInfo typeInfo = new RowTypeInfo(types, names); DataStream<Row> ds = env.fromCollection(data).returns(typeInfo); Table in = tableEnv.fromDataStream(ds, "a,b,c"); tableEnv.registerTable("MyTableRow", in); String sqlQuery = "SELECT a,c FROM MyTableRow"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,Hi"); expected.add("2,Hello"); expected.add("3,Hello world"); StreamITCase.compareWithList(expected); }
Example 10
Source File: JavaSqlITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testUnion() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); DataStream<Tuple3<Integer, Long, String>> ds1 = JavaStreamTestData.getSmall3TupleDataSet(env); Table t1 = tableEnv.fromDataStream(ds1, "a,b,c"); tableEnv.registerTable("T1", t1); DataStream<Tuple5<Integer, Long, Integer, String, Long>> ds2 = JavaStreamTestData.get5TupleDataStream(env); tableEnv.registerDataStream("T2", ds2, "a, b, d, c, e"); String sqlQuery = "SELECT * FROM T1 " + "UNION ALL " + "(SELECT a, b, c FROM T2 WHERE a < 3)"; Table result = tableEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tableEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<Row>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,1,Hi"); expected.add("2,2,Hello"); expected.add("3,2,Hello world"); expected.add("1,1,Hallo"); expected.add("2,2,Hallo Welt"); expected.add("2,3,Hallo Welt wie"); StreamITCase.compareWithList(expected); }
Example 11
Source File: SideStream.java From alchemy with Apache License 2.0 | 5 votes |
public static DataStream<Row> buildStream(StreamTableEnvironment env, SqlSelect sqlSelect, Alias leftAlias, Alias sideAlias, SourceDescriptor sideSource) throws Exception { SqlSelect leftSelect = SideParser.newSelect(sqlSelect, leftAlias.getTable(), leftAlias.getAlias(), true, false); // register leftTable Table leftTable = env.sqlQuery(leftSelect.toString()); DataStream<Row> leftStream = env.toAppendStream(leftTable, Row.class); SqlSelect rightSelect = SideParser.newSelect(sqlSelect, sideAlias.getTable(), sideAlias.getAlias(), false, false); SqlJoin sqlJoin = (SqlJoin)sqlSelect.getFrom(); List<String> equalFields = SideParser.findConditionFields(sqlJoin.getCondition(), leftAlias.getAlias()); if (sideSource.getSide().isPartition()) { leftStream = leftStream.keyBy(equalFields.toArray(new String[equalFields.size()])); } RowTypeInfo sideType = createSideType(rightSelect.getSelectList(), sideSource.getSchema()); RowTypeInfo returnType = createReturnType(leftTable.getSchema(), sideType); SideTable sideTable = createSideTable(leftTable.getSchema(), sideType, sqlJoin.getJoinType(), rightSelect, equalFields, sideAlias, sideSource.getSide()); DataStream<Row> returnStream; if (sideSource.getSide().isAsync()) { AbstractAsyncSideFunction reqRow = sideSource.transform(sideTable); returnStream = AsyncDataStream.orderedWait(leftStream, reqRow, sideSource.getSide().getTimeout(), TimeUnit.MILLISECONDS, sideSource.getSide().getCapacity()); } else { AbstractSyncSideFunction syncReqRow = sideSource.transform(sideTable); returnStream = leftStream.flatMap(syncReqRow); } returnStream.getTransformation().setOutputType(returnType); return returnStream; }
Example 12
Source File: StreamSQLTestProgram.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { ParameterTool params = ParameterTool.fromArgs(args); String outputPath = params.getRequired("outputPath"); StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment(); sEnv.setRestartStrategy(RestartStrategies.fixedDelayRestart( 3, Time.of(10, TimeUnit.SECONDS) )); sEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); sEnv.enableCheckpointing(4000); sEnv.getConfig().setAutoWatermarkInterval(1000); StreamTableEnvironment tEnv = StreamTableEnvironment.create(sEnv); tEnv.registerTableSource("table1", new GeneratorTableSource(10, 100, 60, 0)); tEnv.registerTableSource("table2", new GeneratorTableSource(5, 0.2f, 60, 5)); int overWindowSizeSeconds = 1; int tumbleWindowSizeSeconds = 10; String overQuery = String.format( "SELECT " + " key, " + " rowtime, " + " COUNT(*) OVER (PARTITION BY key ORDER BY rowtime RANGE BETWEEN INTERVAL '%d' SECOND PRECEDING AND CURRENT ROW) AS cnt " + "FROM table1", overWindowSizeSeconds); String tumbleQuery = String.format( "SELECT " + " key, " + " CASE SUM(cnt) / COUNT(*) WHEN 101 THEN 1 ELSE 99 END AS correct, " + " TUMBLE_START(rowtime, INTERVAL '%d' SECOND) AS wStart, " + " TUMBLE_ROWTIME(rowtime, INTERVAL '%d' SECOND) AS rowtime " + "FROM (%s) " + "WHERE rowtime > TIMESTAMP '1970-01-01 00:00:01' " + "GROUP BY key, TUMBLE(rowtime, INTERVAL '%d' SECOND)", tumbleWindowSizeSeconds, tumbleWindowSizeSeconds, overQuery, tumbleWindowSizeSeconds); String joinQuery = String.format( "SELECT " + " t1.key, " + " t2.rowtime AS rowtime, " + " t2.correct," + " t2.wStart " + "FROM table2 t1, (%s) t2 " + "WHERE " + " t1.key = t2.key AND " + " t1.rowtime BETWEEN t2.rowtime AND t2.rowtime + INTERVAL '%d' SECOND", tumbleQuery, tumbleWindowSizeSeconds); String finalAgg = String.format( "SELECT " + " SUM(correct) AS correct, " + " TUMBLE_START(rowtime, INTERVAL '20' SECOND) AS rowtime " + "FROM (%s) " + "GROUP BY TUMBLE(rowtime, INTERVAL '20' SECOND)", joinQuery); // get Table for SQL query Table result = tEnv.sqlQuery(finalAgg); // convert Table into append-only DataStream DataStream<Row> resultStream = tEnv.toAppendStream(result, Types.ROW(Types.INT, Types.SQL_TIMESTAMP)); final StreamingFileSink<Row> sink = StreamingFileSink .forRowFormat(new Path(outputPath), (Encoder<Row>) (element, stream) -> { PrintStream out = new PrintStream(stream); out.println(element.toString()); }) .withBucketAssigner(new KeyBucketAssigner()) .withRollingPolicy(OnCheckpointRollingPolicy.build()) .build(); resultStream // inject a KillMapper that forwards all records but terminates the first execution attempt .map(new KillMapper()).setParallelism(1) // add sink function .addSink(sink).setParallelism(1); sEnv.execute(); }
Example 13
Source File: Sort.java From flink-learning with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.setParallelism(1); SingleOutputStreamOperator<Event> source = env.addSource(new OutOfOrderEventSource()) .assignTimestampsAndWatermarks(new TimestampsAndWatermarks()); Table table = tableEnv.fromDataStream(source, "eventTime.rowtime"); tableEnv.registerTable("zhisheng", table); Table sorted = tableEnv.sqlQuery("select eventTime from zhisheng order by eventTime"); DataStream<Row> rowDataStream = tableEnv.toAppendStream(sorted, Row.class); rowDataStream.print(); //把执行计划打印出来 // System.out.println(env.getExecutionPlan()); env.execute("sort-streaming-data"); }
Example 14
Source File: JDBCLookupFunctionITCase.java From flink with Apache License 2.0 | 4 votes |
@Test public void test() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); StreamITCase.clear(); Table t = tEnv.fromDataStream(env.fromCollection(Arrays.asList( new Tuple2<>(1, 1), new Tuple2<>(1, 1), new Tuple2<>(2, 3), new Tuple2<>(2, 5), new Tuple2<>(3, 5), new Tuple2<>(3, 8) )), "id1, id2"); tEnv.registerTable("T", t); JDBCTableSource.Builder builder = JDBCTableSource.builder() .setOptions(JDBCOptions.builder() .setDBUrl(DB_URL) .setTableName(LOOKUP_TABLE) .build()) .setSchema(TableSchema.builder().fields( new String[]{"id1", "id2", "comment1", "comment2"}, new DataType[]{DataTypes.INT(), DataTypes.INT(), DataTypes.STRING(), DataTypes.STRING()}) .build()); if (useCache) { builder.setLookupOptions(JDBCLookupOptions.builder() .setCacheMaxSize(1000).setCacheExpireMs(1000 * 1000).build()); } tEnv.registerFunction("jdbcLookup", builder.build().getLookupFunction(t.getSchema().getFieldNames())); String sqlQuery = "SELECT id1, id2, comment1, comment2 FROM T, " + "LATERAL TABLE(jdbcLookup(id1, id2)) AS S(l_id1, l_id2, comment1, comment2)"; Table result = tEnv.sqlQuery(sqlQuery); DataStream<Row> resultSet = tEnv.toAppendStream(result, Row.class); resultSet.addSink(new StreamITCase.StringSink<>()); env.execute(); List<String> expected = new ArrayList<>(); expected.add("1,1,11-c1-v1,11-c2-v1"); expected.add("1,1,11-c1-v1,11-c2-v1"); expected.add("1,1,11-c1-v2,11-c2-v2"); expected.add("1,1,11-c1-v2,11-c2-v2"); expected.add("2,3,null,23-c2"); expected.add("2,5,25-c1,25-c2"); expected.add("3,8,38-c1,38-c2"); StreamITCase.compareWithList(expected); }
Example 15
Source File: StreamSQLTestProgram.java From flink with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { ParameterTool params = ParameterTool.fromArgs(args); String outputPath = params.getRequired("outputPath"); String planner = params.get("planner", "old"); final EnvironmentSettings.Builder builder = EnvironmentSettings.newInstance(); builder.inStreamingMode(); if (planner.equals("old")) { builder.useOldPlanner(); } else if (planner.equals("blink")) { builder.useBlinkPlanner(); } final EnvironmentSettings settings = builder.build(); final StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment(); sEnv.setRestartStrategy(RestartStrategies.fixedDelayRestart( 3, Time.of(10, TimeUnit.SECONDS) )); sEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); sEnv.enableCheckpointing(4000); sEnv.getConfig().setAutoWatermarkInterval(1000); final StreamTableEnvironment tEnv = StreamTableEnvironment.create(sEnv, settings); tEnv.registerTableSource("table1", new GeneratorTableSource(10, 100, 60, 0)); tEnv.registerTableSource("table2", new GeneratorTableSource(5, 0.2f, 60, 5)); int overWindowSizeSeconds = 1; int tumbleWindowSizeSeconds = 10; String overQuery = String.format( "SELECT " + " key, " + " rowtime, " + " COUNT(*) OVER (PARTITION BY key ORDER BY rowtime RANGE BETWEEN INTERVAL '%d' SECOND PRECEDING AND CURRENT ROW) AS cnt " + "FROM table1", overWindowSizeSeconds); String tumbleQuery = String.format( "SELECT " + " key, " + " CASE SUM(cnt) / COUNT(*) WHEN 101 THEN 1 ELSE 99 END AS correct, " + " TUMBLE_START(rowtime, INTERVAL '%d' SECOND) AS wStart, " + " TUMBLE_ROWTIME(rowtime, INTERVAL '%d' SECOND) AS rowtime " + "FROM (%s) " + "WHERE rowtime > TIMESTAMP '1970-01-01 00:00:01' " + "GROUP BY key, TUMBLE(rowtime, INTERVAL '%d' SECOND)", tumbleWindowSizeSeconds, tumbleWindowSizeSeconds, overQuery, tumbleWindowSizeSeconds); String joinQuery = String.format( "SELECT " + " t1.key, " + " t2.rowtime AS rowtime, " + " t2.correct," + " t2.wStart " + "FROM table2 t1, (%s) t2 " + "WHERE " + " t1.key = t2.key AND " + " t1.rowtime BETWEEN t2.rowtime AND t2.rowtime + INTERVAL '%d' SECOND", tumbleQuery, tumbleWindowSizeSeconds); String finalAgg = String.format( "SELECT " + " SUM(correct) AS correct, " + " TUMBLE_START(rowtime, INTERVAL '20' SECOND) AS rowtime " + "FROM (%s) " + "GROUP BY TUMBLE(rowtime, INTERVAL '20' SECOND)", joinQuery); // get Table for SQL query Table result = tEnv.sqlQuery(finalAgg); // convert Table into append-only DataStream DataStream<Row> resultStream = tEnv.toAppendStream(result, Types.ROW(Types.INT, Types.SQL_TIMESTAMP)); final StreamingFileSink<Row> sink = StreamingFileSink .forRowFormat(new Path(outputPath), (Encoder<Row>) (element, stream) -> { PrintStream out = new PrintStream(stream); out.println(element.toString()); }) .withBucketAssigner(new KeyBucketAssigner()) .withRollingPolicy(OnCheckpointRollingPolicy.build()) .build(); resultStream // inject a KillMapper that forwards all records but terminates the first execution attempt .map(new KillMapper()).setParallelism(1) // add sink function .addSink(sink).setParallelism(1); sEnv.execute(); }
Example 16
Source File: Sort.java From flink-learning with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); env.setParallelism(1); SingleOutputStreamOperator<Event> source = env.addSource(new OutOfOrderEventSource()) .assignTimestampsAndWatermarks(new TimestampsAndWatermarks()); Table table = tableEnv.fromDataStream(source, "eventTime.rowtime"); tableEnv.registerTable("zhisheng", table); Table sorted = tableEnv.sqlQuery("select eventTime from zhisheng order by eventTime"); DataStream<Row> rowDataStream = tableEnv.toAppendStream(sorted, Row.class); rowDataStream.print(); //把执行计划打印出来 // System.out.println(env.getExecutionPlan()); env.execute("sort-streaming-data"); }
Example 17
Source File: TestTableFunction.java From yauaa with Apache License 2.0 | 4 votes |
@Test public void testFunctionExtractDirect() throws Exception { // The base input stream StreamExecutionEnvironment senv = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource<Tuple3<String, String, String>> inputStream = getTestAgentStream(senv); // The table environment StreamTableEnvironment tableEnv = StreamTableEnvironment.create(senv); // Give the stream a Table Name tableEnv.createTemporaryView("AgentStream", inputStream, "useragent, expectedDeviceClass, expectedAgentNameVersionMajor"); // register the function tableEnv.registerFunction("ParseUserAgent", new AnalyzeUseragentFunction("DeviceClass", "AgentNameVersionMajor")); // The downside of doing it this way is that the parsing function (i.e. parsing and converting all results into a map) // is called for each field you want. So in this simple case twice. String sqlQuery = "SELECT useragent,"+ " ParseUserAgent(useragent)['DeviceClass' ] as DeviceClass," + " ParseUserAgent(useragent)['AgentNameVersionMajor'] as AgentNameVersionMajor," + " expectedDeviceClass," + " expectedAgentNameVersionMajor " + "FROM AgentStream"; Table resultTable = tableEnv.sqlQuery(sqlQuery); TypeInformation<Row> tupleType = new RowTypeInfo(STRING, STRING, STRING, STRING, STRING); DataStream<Row> resultSet = tableEnv.toAppendStream(resultTable, tupleType); resultSet.map((MapFunction<Row, String>) row -> { Object useragent = row.getField(0); Object deviceClass = row.getField(1); Object agentNameVersionMajor = row.getField(2); Object expectedDeviceClass = row.getField(3); Object expectedAgentNameVersionMajor = row.getField(4); assertTrue(useragent instanceof String); assertTrue(deviceClass instanceof String); assertTrue(agentNameVersionMajor instanceof String); assertTrue(expectedDeviceClass instanceof String); assertTrue(expectedAgentNameVersionMajor instanceof String); assertEquals(expectedDeviceClass, deviceClass, "Wrong DeviceClass: " + useragent); assertEquals(expectedAgentNameVersionMajor, agentNameVersionMajor, "Wrong AgentNameVersionMajor: " + useragent); return useragent.toString(); }).printToErr(); senv.execute(); }
Example 18
Source File: TestTableFunction.java From yauaa with Apache License 2.0 | 4 votes |
@Test public void testMapFunctionExtractInSQLSubSelect() throws Exception { // The base input stream StreamExecutionEnvironment senv = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource<Tuple3<String, String, String>> inputStream = getTestAgentStream(senv); // The table environment StreamTableEnvironment tableEnv = StreamTableEnvironment.create(senv); // Give the stream a Table Name tableEnv.createTemporaryView("AgentStream", inputStream, "useragent, expectedDeviceClass, expectedAgentNameVersionMajor"); // register the function tableEnv.registerFunction("ParseUserAgent", new AnalyzeUseragentFunction("DeviceClass", "AgentNameVersionMajor")); String sqlQuery = "SELECT useragent,"+ " parsedUseragent['DeviceClass'] AS deviceClass," + " parsedUseragent['AgentNameVersionMajor'] AS agentNameVersionMajor," + " expectedDeviceClass," + " expectedAgentNameVersionMajor " + "FROM ( " + " SELECT useragent," + " ParseUserAgent(useragent) AS parsedUseragent," + " expectedDeviceClass," + " expectedAgentNameVersionMajor " + " FROM AgentStream " + ")"; Table resultTable = tableEnv.sqlQuery(sqlQuery); TypeInformation<Row> tupleType = new RowTypeInfo(STRING, STRING, STRING, STRING, STRING); DataStream<Row> resultSet = tableEnv.toAppendStream(resultTable, tupleType); resultSet.map((MapFunction<Row, String>) row -> { Object useragent = row.getField(0); Object deviceClass = row.getField(1); Object agentNameVersionMajor = row.getField(2); Object expectedDeviceClass = row.getField(3); Object expectedAgentNameVersionMajor = row.getField(4); assertTrue(useragent instanceof String); assertTrue(deviceClass instanceof String); assertTrue(agentNameVersionMajor instanceof String); assertTrue(expectedDeviceClass instanceof String); assertTrue(expectedAgentNameVersionMajor instanceof String); assertEquals(expectedDeviceClass, deviceClass, "Wrong DeviceClass: " + useragent); assertEquals(expectedAgentNameVersionMajor, agentNameVersionMajor, "Wrong AgentNameVersionMajor: " + useragent); return useragent.toString(); }).printToErr(); senv.execute(); }
Example 19
Source File: TestTableFunction.java From yauaa with Apache License 2.0 | 4 votes |
@Test public void testMapFunctionReturnMap() throws Exception { // The base input stream StreamExecutionEnvironment senv = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource<Tuple3<String, String, String>> inputStream = getTestAgentStream(senv); // The table environment StreamTableEnvironment tableEnv = StreamTableEnvironment.create(senv); // Give the stream a Table Name tableEnv.createTemporaryView("AgentStream", inputStream, "useragent, expectedDeviceClass, expectedAgentNameVersionMajor"); // register the function tableEnv.registerFunction("ParseUserAgent", new AnalyzeUseragentFunction("DeviceClass", "AgentNameVersionMajor")); String sqlQuery = "SELECT useragent," + " ParseUserAgent(useragent) AS parsedUseragent," + " expectedDeviceClass AS expectedDeviceClass," + " expectedAgentNameVersionMajor AS expectedAgentNameVersionMajor " + "FROM AgentStream"; Table resultTable = tableEnv.sqlQuery(sqlQuery); TypeInformation<Row> tupleType = new RowTypeInfo(STRING, MAP(STRING, STRING), STRING, STRING); DataStream<Row> resultSet = tableEnv.toAppendStream(resultTable, tupleType); resultSet.map((MapFunction<Row, String>) row -> { Object useragent = row.getField(0); Object parsedUseragent = row.getField(1); Object expectedDeviceClass = row.getField(2); Object expectedAgentNameVersionMajor = row.getField(3); assertTrue(useragent instanceof String); assertTrue(parsedUseragent instanceof Map<?, ?>); assertTrue(expectedDeviceClass instanceof String); assertTrue(expectedAgentNameVersionMajor instanceof String); assertEquals( expectedDeviceClass, ((Map<?, ?>)parsedUseragent).get("DeviceClass"), "Wrong DeviceClass: " + useragent); assertEquals( expectedAgentNameVersionMajor, ((Map<?, ?>)parsedUseragent).get("AgentNameVersionMajor"), "Wrong AgentNameVersionMajor: " + useragent); return useragent.toString(); }).printToErr(); senv.execute(); }
Example 20
Source File: DemonstrationOfTumblingTableSQLFunction.java From yauaa with Apache License 2.0 | 4 votes |
@Disabled @Test public void runDemonstration() throws Exception { // The base input stream StreamExecutionEnvironment senv = StreamExecutionEnvironment.getExecutionEnvironment(); senv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); senv.getConfig().setAutoWatermarkInterval(1000); DataStream<Tuple4<Long, String, String, String>> inputStream = senv .addSource(new UAStreamSource()) .assignTimestampsAndWatermarks(new UAWatermarker()); // The table environment StreamTableEnvironment tableEnv = StreamTableEnvironment.create(senv); // Give the stream a Table Name tableEnv.createTemporaryView("AgentStream", inputStream, "eventTime.rowtime, useragent, expectedDeviceClass, expectedAgentNameVersionMajor"); // register the function tableEnv.registerFunction("ParseUserAgent", new AnalyzeUseragentFunction("DeviceClass", "AgentNameVersionMajor")); int windowIntervalCount = 5; String windowIntervalScale = "MINUTE"; String sqlQuery = String.format( "SELECT" + " TUMBLE_START(eventTime, INTERVAL '%d' %s) AS wStart," + " deviceClass," + " agentNameVersionMajor," + " expectedDeviceClass," + " expectedAgentNameVersionMajor," + " Count('') " + "FROM ( "+ " SELECT " + " eventTime, " + " parsedUserAgent['DeviceClass' ] AS deviceClass," + " parsedUserAgent['AgentNameVersionMajor'] AS agentNameVersionMajor," + " expectedDeviceClass," + " expectedAgentNameVersionMajor" + " FROM ( "+ " SELECT " + " eventTime, " + " ParseUserAgent(useragent) AS parsedUserAgent," + " expectedDeviceClass," + " expectedAgentNameVersionMajor" + " FROM AgentStream " + " )" + ")" + "GROUP BY TUMBLE(eventTime, INTERVAL '%d' %s), " + " deviceClass," + " agentNameVersionMajor," + " expectedDeviceClass," + " expectedAgentNameVersionMajor", windowIntervalCount, windowIntervalScale, windowIntervalCount, windowIntervalScale ); Table resultTable = tableEnv.sqlQuery(sqlQuery); TypeInformation<Row> tupleType = new RowTypeInfo(SQL_TIMESTAMP, STRING, STRING, STRING, STRING, LONG); DataStream<Row> resultSet = tableEnv.toAppendStream(resultTable, tupleType); resultSet.print(); resultSet.map((MapFunction<Row, String>) row -> { Object useragent = row.getField(0); Object deviceClass = row.getField(1); Object agentNameVersionMajor = row.getField(2); Object expectedDeviceClass = row.getField(3); Object expectedAgentNameVersionMajor = row.getField(4); assertEquals( expectedDeviceClass, deviceClass, "Wrong DeviceClass: " + useragent); assertEquals( expectedAgentNameVersionMajor, agentNameVersionMajor, "Wrong AgentNameVersionMajor: " + useragent); return useragent.toString(); }); senv.execute(); }