Java Code Examples for org.apache.flink.table.api.java.StreamTableEnvironment#execute()

The following examples show how to use org.apache.flink.table.api.java.StreamTableEnvironment#execute() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CustomKafkaSourceMain.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    blinkStreamTableEnv.registerTableSource("kafkaDataStream", new MyKafkaTableSource(ExecutionEnvUtil.PARAMETER_TOOL));

    RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
    blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

    Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word");

    wordCount.insertInto("sinkTable");

    blinkStreamTableEnv.execute("Blink Custom Kafka Table Source");
}
 
Example 2
Source File: KafkaSourceMain.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL;
    Properties properties = KafkaConfigUtil.buildKafkaProps(parameterTool);
    DataStream<String> dataStream = blinkStreamEnv.addSource(new FlinkKafkaConsumer011<>(parameterTool.get("kafka.topic"), new SimpleStringSchema(), properties));
    Table table = blinkStreamTableEnv.fromDataStream(dataStream, "word");
    blinkStreamTableEnv.registerTable("kafkaDataStream", table);

    RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
    blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

    Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word");

    wordCount.insertInto("sinkTable");

    blinkStreamTableEnv.execute("Blink Kafka Table Source");
}
 
Example 3
Source File: SQLExampleWordCount.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String path = SQLExampleWordCount.class.getClassLoader().getResource("words.txt").getPath();

    CsvTableSource csvTableSource = CsvTableSource.builder()
            .field("word", Types.STRING)
            .path(path)
            .build();
    blinkStreamTableEnv.registerTableSource("zhisheng", csvTableSource);
    Table wordWithCount = blinkStreamTableEnv.sqlQuery("SELECT count(word), word FROM zhisheng GROUP BY word");
    blinkStreamTableEnv.toRetractStream(wordWithCount, Row.class).print();

    blinkStreamTableEnv.execute("Blink Stream SQL Job");
}
 
Example 4
Source File: SqlScriptExecutor.java    From flink-tutorials with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {

		HiveCatalog hiveCatalog = new HiveCatalog(HIVE_CATALOG, HIVE_DATABASE, HIVE_CONF_DIR, HIVE_VERSION);
		StreamTableEnvironment env = createTableEnv();
		env.registerCatalog(HIVE_CATALOG, hiveCatalog);

		File script = new File(args[0]);
		String[] commands = FileUtils.readFileUtf8(script).split(";");

		for (String command : commands) {
			if (command.trim().isEmpty()) {
				continue;
			}

			LOG.info("Executing SQL statement: {}", command.trim());
			env.sqlUpdate(command.trim());
		}

		env.execute("SQL Script: " + script.getName());
	}
 
Example 5
Source File: CustomKafkaSourceMain.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    blinkStreamTableEnv.registerTableSource("kafkaDataStream", new MyKafkaTableSource(ExecutionEnvUtil.PARAMETER_TOOL));

    RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
    blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

    Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word");

    wordCount.insertInto("sinkTable");

    blinkStreamTableEnv.execute("Blink Custom Kafka Table Source");
}
 
Example 6
Source File: KafkaSourceMain.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL;
    Properties properties = KafkaConfigUtil.buildKafkaProps(parameterTool);
    DataStream<String> dataStream = blinkStreamEnv.addSource(new FlinkKafkaConsumer011<>(parameterTool.get("kafka.topic"), new SimpleStringSchema(), properties));
    Table table = blinkStreamTableEnv.fromDataStream(dataStream, "word");
    blinkStreamTableEnv.registerTable("kafkaDataStream", table);

    RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
    blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

    Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word");

    wordCount.insertInto("sinkTable");

    blinkStreamTableEnv.execute("Blink Kafka Table Source");
}
 
Example 7
Source File: TableExampleWordCount.java    From flink-learning with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String path = TableExampleWordCount.class.getClassLoader().getResource("words.txt").getPath();
    blinkStreamTableEnv
            .connect(new FileSystem().path(path))
            .withFormat(new OldCsv().field("word", Types.STRING).lineDelimiter("\n"))
            .withSchema(new Schema().field("word", Types.STRING))
            .inAppendMode()
            .registerTableSource("FlieSourceTable");

    Table wordWithCount = blinkStreamTableEnv.scan("FlieSourceTable")
            .groupBy("word")
            .select("word,count(word) as _count");
    blinkStreamTableEnv.toRetractStream(wordWithCount, Row.class).print();

    //打印结果中的 true 和 false,可能会有点疑问,为啥会多出一个字段。
    //Sink 做的事情是先删除再插入,false 表示删除上一条数据,true 表示插入该条数据

    blinkStreamTableEnv.execute("Blink Stream SQL Job");
}
 
Example 8
Source File: FlinkSQLDistinctExample.java    From flink-learning with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String ddlSource = "CREATE TABLE user_behavior (\n" +
            "    user_id BIGINT,\n" +
            "    item_id BIGINT,\n" +
            "    category_id BIGINT,\n" +
            "    behavior STRING,\n" +
            "    ts TIMESTAMP(3)\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior',\n" +
            "    'connector.startup-mode' = 'latest-offset',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json'\n" +
            ")";

    String countSql = "select user_id, count(user_id) from user_behavior group by user_id";

    blinkStreamTableEnv.sqlUpdate(ddlSource);
    Table countTable = blinkStreamTableEnv.sqlQuery(countSql);
    blinkStreamTableEnv.toRetractStream(countTable, Row.class).print();

    String distinctSql = "select distinct(user_id) from user_behavior";
    Table distinctTable = blinkStreamTableEnv.sqlQuery(distinctSql);
    blinkStreamTableEnv.toRetractStream(distinctTable, Row.class).print("==");

    blinkStreamTableEnv.execute("Blink Stream SQL count/distinct demo");
}
 
Example 9
Source File: CustomTableSinkMain.java    From flink-learning with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
        blinkStreamEnv.setParallelism(1);
        EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

        String path = SQLExampleWordCount.class.getClassLoader().getResource("words.txt").getPath();

        CsvTableSource csvTableSource = CsvTableSource.builder()
                .field("word", Types.STRING)
                .path(path)
                .build();
        blinkStreamTableEnv.registerTableSource("zhisheng", csvTableSource);

        RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"c", "word"}, new TypeInformation[]{Types.LONG, Types.STRING});
        //或者
//        RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"c", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
        blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

        Table wordWithCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS c, word FROM zhisheng GROUP BY word");

        wordWithCount.insertInto("sinkTable");
        blinkStreamTableEnv.execute("Blink Custom Table Sink");
    }
 
Example 10
Source File: TableSqlTest.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Test
public void selectNullTest()
        throws Exception
{
    StreamTableEnvironment tableEnv = getTableEnv();

    tableEnv.toAppendStream(tableEnv.sqlQuery("select cast(null as varchar) as a1"), Row.class).print();
    tableEnv.execute("");
}
 
Example 11
Source File: TableSqlTest.java    From sylph with Apache License 2.0 5 votes vote down vote up
@Test
public void selectLocalTimeTest()
        throws Exception
{
    StreamTableEnvironment tableEnv = getTableEnv();

    tableEnv.toAppendStream(tableEnv.sqlQuery("select LOCALTIMESTAMP as `check_time`"), Row.class).print();
    tableEnv.execute("");
}
 
Example 12
Source File: SQLExampleKafkaData2ES.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String ddlSource = "CREATE TABLE user_behavior (\n" +
            "    user_id BIGINT,\n" +
            "    item_id BIGINT,\n" +
            "    category_id BIGINT,\n" +
            "    behavior STRING,\n" +
            "    ts TIMESTAMP(3)\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior',\n" +
            "    'connector.startup-mode' = 'latest-offset',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json'\n" +
            ")";

    String ddlSink = "CREATE TABLE user_behavior_es (\n" +
            "    user_id BIGINT,\n" +
            "    item_id BIGINT\n" +
            ") WITH (\n" +
            "    'connector.type' = 'elasticsearch',\n" +
            "    'connector.version' = '6',\n" +
            "    'connector.hosts' = 'http://localhost:9200',\n" +
            "    'connector.index' = 'user_behavior_es',\n" +
            "    'connector.document-type' = 'user_behavior_es',\n" +
            "    'format.type' = 'json',\n" +
            "    'update-mode' = 'append',\n" +
            "    'connector.bulk-flush.max-actions' = '10'\n" +
            ")";

    //提取读取到的数据,然后只要两个字段,写入到 ES
    String sql = "insert into user_behavior_es select user_id, item_id from user_behavior";

    System.out.println(ddlSource);
    System.out.println(ddlSink);
    blinkStreamTableEnv.sqlUpdate(ddlSource);
    blinkStreamTableEnv.sqlUpdate(ddlSink);
    blinkStreamTableEnv.sqlUpdate(sql);

    blinkStreamTableEnv.execute("Blink Stream SQL Job2 —— read data from kafka,sink to es");
}
 
Example 13
Source File: SQLExampleKafkaRowData2ES.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String ddlSource = "CREATE TABLE user_behavior (\n" +
            "    userDetail  Row<userId BIGINT, name STRING, age BIGINT>,\n" +
            "    item_id BIGINT,\n" +
            "    category_id BIGINT,\n" +
            "    behavior STRING\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior',\n" +
            "    'connector.startup-mode' = 'latest-offset',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json'\n" +
            ")";

    String ddlSink = "CREATE TABLE user_behavior_es (\n" +
            "    user_id BIGINT,\n" +
            "    item_id BIGINT\n" +
            ") WITH (\n" +
            "    'connector.type' = 'elasticsearch',\n" +
            "    'connector.version' = '6',\n" +
            "    'connector.hosts' = 'http://localhost:9200',\n" +
            "    'connector.index' = 'user_behavior_es',\n" +
            "    'connector.document-type' = 'user_behavior_es',\n" +
            "    'format.type' = 'json',\n" +
            "    'update-mode' = 'append',\n" +
            "    'connector.bulk-flush.max-actions' = '10'\n" +
            ")";

    //提取读取到的数据,然后只要两个字段,写入到 ES
    String sql = "insert into user_behavior_es select userDetail.userId, item_id from user_behavior";
    System.out.println(ddlSource);
    System.out.println(ddlSink);
    blinkStreamTableEnv.sqlUpdate(ddlSource);
    blinkStreamTableEnv.sqlUpdate(ddlSink);
    blinkStreamTableEnv.sqlUpdate(sql);

    blinkStreamTableEnv.execute("Blink Stream SQL Job2 —— read data from kafka,sink to es");
}
 
Example 14
Source File: SQLExampleKafkaData2HBase.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String ddlSource = "CREATE TABLE user_behavior (\n" +
            "    user_id BIGINT,\n" +
            "    item_id BIGINT,\n" +
            "    category_id BIGINT,\n" +
            "    behavior STRING,\n" +
            "    ts TIMESTAMP(3)\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior',\n" +
            "    'connector.startup-mode' = 'latest-offset',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json'\n" +
            ")";

    String ddlSink = "CREATE TABLE user_behavior_hbase (\n" +
            "  rowkey BIGINT,\n" +
            "  cf ROW<item_id BIGINT, category_id BIGINT>\n" +
            ") WITH (\n" +
            "  'connector.type' = 'hbase',\n" +
            "  'connector.version' = '1.4.3',\n" +
            "  'connector.table-name' = 'zhisheng01',\n" +
            "  'connector.zookeeper.quorum' = 'localhost:2181',\n" +
            "  'connector.zookeeper.znode.parent' = '/hbase',\n" +
            "  'connector.write.buffer-flush.max-size' = '2mb',\n" +
            "  'connector.write.buffer-flush.max-rows' = '1000',\n" +
            "  'connector.write.buffer-flush.interval' = '2s'\n" +
            ")";

    //提取读取到的数据,然后只要两个字段,写入到 HBase
    String sql = "insert into user_behavior_hbase select user_id, ROW(item_id, category_id) from user_behavior";

    System.out.println(ddlSource);
    System.out.println(ddlSink);
    blinkStreamTableEnv.sqlUpdate(ddlSource);
    blinkStreamTableEnv.sqlUpdate(ddlSink);
    blinkStreamTableEnv.sqlUpdate(sql);

    blinkStreamTableEnv.execute("Blink Stream SQL Job5 —— read data from kafka,sink to HBase");
}
 
Example 15
Source File: HBaseConnectorITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test
public void testTableSink() throws Exception {
	HBaseTableSchema schema = new HBaseTableSchema();
	schema.addColumn(FAMILY1, F1COL1, Integer.class);
	schema.addColumn(FAMILY2, F2COL1, String.class);
	schema.addColumn(FAMILY2, F2COL2, Long.class);
	schema.setRowKey("rk", Integer.class);
	schema.addColumn(FAMILY3, F3COL1, Double.class);
	schema.addColumn(FAMILY3, F3COL2, Boolean.class);
	schema.addColumn(FAMILY3, F3COL3, String.class);

	Map<String, String> tableProperties = new HashMap<>();
	tableProperties.put("connector.type", "hbase");
	tableProperties.put("connector.version", "1.4.3");
	tableProperties.put("connector.property-version", "1");
	tableProperties.put("connector.table-name", TEST_TABLE_2);
	tableProperties.put("connector.zookeeper.quorum", getZookeeperQuorum());
	tableProperties.put("connector.zookeeper.znode.parent", "/hbase");
	DescriptorProperties descriptorProperties = new DescriptorProperties(true);
	descriptorProperties.putTableSchema(SCHEMA, schema.convertsToTableSchema());
	descriptorProperties.putProperties(tableProperties);
	TableSink tableSink = TableFactoryService
		.find(HBaseTableFactory.class, descriptorProperties.asMap())
		.createTableSink(descriptorProperties.asMap());

	StreamExecutionEnvironment execEnv = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment tEnv = StreamTableEnvironment.create(execEnv, streamSettings);

	DataStream<Row> ds = execEnv.fromCollection(testData1).returns(testTypeInfo1);
	tEnv.registerDataStream("src", ds);
	tEnv.registerTableSink("hbase", tableSink);

	String query = "INSERT INTO hbase SELECT ROW(f1c1), ROW(f2c1, f2c2), rowkey, ROW(f3c1, f3c2, f3c3) FROM src";
	tEnv.sqlUpdate(query);

	// wait to finish
	tEnv.execute("HBase Job");

	// start a batch scan job to verify contents in HBase table
	// start a batch scan job to verify contents in HBase table
	TableEnvironment batchTableEnv = createBatchTableEnv();

	HBaseTableSource hbaseTable = new HBaseTableSource(getConf(), TEST_TABLE_2);
	hbaseTable.setRowKey("rowkey", Integer.class);
	hbaseTable.addColumn(FAMILY1, F1COL1, Integer.class);
	hbaseTable.addColumn(FAMILY2, F2COL1, String.class);
	hbaseTable.addColumn(FAMILY2, F2COL2, Long.class);
	hbaseTable.addColumn(FAMILY3, F3COL1, Double.class);
	hbaseTable.addColumn(FAMILY3, F3COL2, Boolean.class);
	hbaseTable.addColumn(FAMILY3, F3COL3, String.class);
	batchTableEnv.registerTableSource("hTable", hbaseTable);

	Table table = batchTableEnv.sqlQuery(
		"SELECT " +
			"  h.rowkey, " +
			"  h.family1.col1, " +
			"  h.family2.col1, " +
			"  h.family2.col2, " +
			"  h.family3.col1, " +
			"  h.family3.col2, " +
			"  h.family3.col3 " +
			"FROM hTable AS h"
	);

	List<Row> results = collectBatchResult(table);
	String expected =
		"1,10,Hello-1,100,1.01,false,Welt-1\n" +
			"2,20,Hello-2,200,2.02,true,Welt-2\n" +
			"3,30,Hello-3,300,3.03,false,Welt-3\n" +
			"4,40,,400,4.04,true,Welt-4\n" +
			"5,50,Hello-5,500,5.05,false,Welt-5\n" +
			"6,60,Hello-6,600,6.06,true,Welt-6\n" +
			"7,70,Hello-7,700,7.07,false,Welt-7\n" +
			"8,80,,800,8.08,true,Welt-8\n";

	TestBaseUtils.compareResultAsText(results, expected);
}
 
Example 16
Source File: SQLExampleKafkaData2Kafka.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String ddlSource = "CREATE TABLE user_behavior (\n" +
            "    user_id BIGINT,\n" +
            "    item_id BIGINT,\n" +
            "    category_id BIGINT,\n" +
            "    behavior STRING,\n" +
            "    ts TIMESTAMP(3)\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior',\n" +
            "    'connector.startup-mode' = 'latest-offset',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json'\n" +
            ")";

    String ddlSink = "CREATE TABLE user_behavior_sink (\n" +
            "    user_id BIGINT,\n" +
            "    item_id BIGINT\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior_sink',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json',\n" +
            "    'update-mode' = 'append'\n" +
            ")";

    //提取读取到的数据,然后只要两个字段,重新发送到 Kafka 新 topic
    String sql = "insert into user_behavior_sink select user_id, item_id from user_behavior";

    System.out.println(ddlSource);
    System.out.println(ddlSink);
    blinkStreamTableEnv.sqlUpdate(ddlSource);
    blinkStreamTableEnv.sqlUpdate(ddlSink);
    blinkStreamTableEnv.sqlUpdate(sql);

    blinkStreamTableEnv.execute("Blink Stream SQL Job2");
}
 
Example 17
Source File: SQLExampleKafkaRowData2ES.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String ddlSource = "CREATE TABLE user_behavior (\n" +
            "    userDetail  Row<userId BIGINT, name STRING, age BIGINT>,\n" +
            "    item_id BIGINT,\n" +
            "    category_id BIGINT,\n" +
            "    behavior STRING\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior',\n" +
            "    'connector.startup-mode' = 'latest-offset',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json'\n" +
            ")";

    String ddlSink = "CREATE TABLE user_behavior_es (\n" +
            "    user_id BIGINT,\n" +
            "    item_id BIGINT\n" +
            ") WITH (\n" +
            "    'connector.type' = 'elasticsearch',\n" +
            "    'connector.version' = '6',\n" +
            "    'connector.hosts' = 'http://localhost:9200',\n" +
            "    'connector.index' = 'user_behavior_es',\n" +
            "    'connector.document-type' = 'user_behavior_es',\n" +
            "    'format.type' = 'json',\n" +
            "    'update-mode' = 'append',\n" +
            "    'connector.bulk-flush.max-actions' = '10'\n" +
            ")";

    //提取读取到的数据,然后只要两个字段,写入到 ES
    String sql = "insert into user_behavior_es select userDetail.userId, item_id from user_behavior";
    System.out.println(ddlSource);
    System.out.println(ddlSink);
    blinkStreamTableEnv.sqlUpdate(ddlSource);
    blinkStreamTableEnv.sqlUpdate(ddlSink);
    blinkStreamTableEnv.sqlUpdate(sql);

    blinkStreamTableEnv.execute("Blink Stream SQL Job2 —— read data from kafka,sink to es");
}
 
Example 18
Source File: SQLExampleData2PG.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String ddlSource = "CREATE TABLE user_behavior (\n" +
            "    score numeric(38, 18)\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior',\n" +
            "    'connector.startup-mode' = 'latest-offset',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json'\n" +
            ")";

    String ddlSink = "CREATE TABLE user_behavior_aggregate (\n" +
            "    score numeric(38, 18)\n" +
            ") WITH (\n" +
            "    'connector.type' = 'jdbc',\n" +
            "    'connector.driver' = 'org.postgresql.Driver',\n" +
            "    'connector.url' = 'jdbc:postgresql://localhost:3600/hello_hitch_user',\n" +
            "    'connector.table' = 't_hitch_user_ltv_aggregate', \n" +
            "    'connector.username' = 'hello_hitch_user', \n" +
            "    'connector.password' = 'hello_hitch_user',\n" +
            "    'connector.write.flush.max-rows' = '1' \n" +
            ")";

    String sql = "insert into user_behavior_aggregate select yidun_score from user_behavior";

    blinkStreamTableEnv.sqlUpdate(ddlSource);
    blinkStreamTableEnv.sqlUpdate(ddlSink);
    blinkStreamTableEnv.sqlUpdate(sql);

    blinkStreamTableEnv.execute("Blink Stream SQL demo PG");
}
 
Example 19
Source File: SQLExampleKafkaData2HBase.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String ddlSource = "CREATE TABLE user_behavior (\n" +
            "    user_id BIGINT,\n" +
            "    item_id BIGINT,\n" +
            "    category_id BIGINT,\n" +
            "    behavior STRING,\n" +
            "    ts TIMESTAMP(3)\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior',\n" +
            "    'connector.startup-mode' = 'latest-offset',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json'\n" +
            ")";

    String ddlSink = "CREATE TABLE user_behavior_hbase (\n" +
            "  rowkey BIGINT,\n" +
            "  cf ROW<item_id BIGINT, category_id BIGINT>\n" +
            ") WITH (\n" +
            "  'connector.type' = 'hbase',\n" +
            "  'connector.version' = '1.4.3',\n" +
            "  'connector.table-name' = 'zhisheng01',\n" +
            "  'connector.zookeeper.quorum' = 'localhost:2181',\n" +
            "  'connector.zookeeper.znode.parent' = '/hbase',\n" +
            "  'connector.write.buffer-flush.max-size' = '2mb',\n" +
            "  'connector.write.buffer-flush.max-rows' = '1000',\n" +
            "  'connector.write.buffer-flush.interval' = '2s'\n" +
            ")";

    //提取读取到的数据,然后只要两个字段,写入到 HBase
    String sql = "insert into user_behavior_hbase select user_id, ROW(item_id, category_id) from user_behavior";

    System.out.println(ddlSource);
    System.out.println(ddlSink);
    blinkStreamTableEnv.sqlUpdate(ddlSource);
    blinkStreamTableEnv.sqlUpdate(ddlSink);
    blinkStreamTableEnv.sqlUpdate(sql);

    blinkStreamTableEnv.execute("Blink Stream SQL Job5 —— read data from kafka,sink to HBase");
}
 
Example 20
Source File: SQLExampleData2PG.java    From flink-learning with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String ddlSource = "CREATE TABLE user_behavior (\n" +
            "    score numeric(38, 18)\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior',\n" +
            "    'connector.startup-mode' = 'latest-offset',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json'\n" +
            ")";

    String ddlSink = "CREATE TABLE user_behavior_aggregate (\n" +
            "    score numeric(38, 18)\n" +
            ") WITH (\n" +
            "    'connector.type' = 'jdbc',\n" +
            "    'connector.driver' = 'org.postgresql.Driver',\n" +
            "    'connector.url' = 'jdbc:postgresql://localhost:3600/hello_hitch_user',\n" +
            "    'connector.table' = 't_hitch_user_ltv_aggregate', \n" +
            "    'connector.username' = 'hello_hitch_user', \n" +
            "    'connector.password' = 'hello_hitch_user',\n" +
            "    'connector.write.flush.max-rows' = '1' \n" +
            ")";

    String sql = "insert into user_behavior_aggregate select yidun_score from user_behavior";

    blinkStreamTableEnv.sqlUpdate(ddlSource);
    blinkStreamTableEnv.sqlUpdate(ddlSink);
    blinkStreamTableEnv.sqlUpdate(sql);

    blinkStreamTableEnv.execute("Blink Stream SQL demo PG");
}