org.apache.flink.table.api.EnvironmentSettings Java Examples

The following examples show how to use org.apache.flink.table.api.EnvironmentSettings. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BatchSQLTestProgram.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
	ParameterTool params = ParameterTool.fromArgs(args);
	String outputPath = params.getRequired("outputPath");
	String sqlStatement = params.getRequired("sqlStatement");

	TableEnvironment tEnv = TableEnvironment.create(EnvironmentSettings.newInstance()
		.useBlinkPlanner()
		.inBatchMode()
		.build());

	((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table1", new GeneratorTableSource(10, 100, 60, 0));
	((TableEnvironmentInternal) tEnv).registerTableSourceInternal("table2", new GeneratorTableSource(5, 0.2f, 60, 5));
	((TableEnvironmentInternal) tEnv).registerTableSinkInternal("sinkTable",
		new CsvTableSink(outputPath)
			.configure(new String[]{"f0", "f1"}, new TypeInformation[]{Types.INT, Types.SQL_TIMESTAMP}));

	TableResult result = tEnv.executeSql(sqlStatement);
	// wait job finish
	result.getJobClient().get().getJobExecutionResult(Thread.currentThread().getContextClassLoader()).get();
}
 
Example #2
Source File: FlinkStreamPythonUdfSqlJob.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(1);
	StreamTableEnvironment tEnv = StreamTableEnvironment.create(
		env,
		EnvironmentSettings.newInstance().useOldPlanner().inStreamingMode().build());
	tEnv.executeSql("create temporary system function add_one as 'add_one.add_one' language python");

	tEnv.createTemporaryView("source", tEnv.fromValues(1L, 2L, 3L).as("a"));

	Iterator<Row> result = tEnv.executeSql("select add_one(a) as a from source").collect();

	List<Long> actual = new ArrayList<>();
	while (result.hasNext()) {
		Row r = result.next();
		actual.add((Long) r.getField(0));
	}

	List<Long> expected = Arrays.asList(2L, 3L, 4L);
	if (!actual.equals(expected)) {
		throw new AssertionError(String.format("The output result: %s is not as expected: %s!", actual, expected));
	}
}
 
Example #3
Source File: SQLExampleWordCount.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String path = SQLExampleWordCount.class.getClassLoader().getResource("words.txt").getPath();

    CsvTableSource csvTableSource = CsvTableSource.builder()
            .field("word", Types.STRING)
            .path(path)
            .build();
    blinkStreamTableEnv.registerTableSource("zhisheng", csvTableSource);
    Table wordWithCount = blinkStreamTableEnv.sqlQuery("SELECT count(word), word FROM zhisheng GROUP BY word");
    blinkStreamTableEnv.toRetractStream(wordWithCount, Row.class).print();

    blinkStreamTableEnv.execute("Blink Stream SQL Job");
}
 
Example #4
Source File: JdbcDataTypeTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testDataTypeValidate() {
	String sqlDDL = String.format(DDL_FORMAT, testItem.dataTypeExpr, testItem.dialect);

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	EnvironmentSettings envSettings = EnvironmentSettings.newInstance()
			.useBlinkPlanner()
			.inStreamingMode()
			.build();
	StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, envSettings);

	tEnv.executeSql(sqlDDL);

	if (testItem.expectError != null) {
		try {
			tEnv.sqlQuery("SELECT * FROM T");
		} catch (Exception ex) {
			Assert.assertTrue(ex.getCause() instanceof ValidationException);
			Assert.assertEquals(testItem.expectError, ex.getCause().getMessage());
		}
	} else {
		tEnv.sqlQuery("SELECT * FROM T");
	}
}
 
Example #5
Source File: ExecutionContext.java    From flink with Apache License 2.0 6 votes vote down vote up
private static TableEnvironment createStreamTableEnvironment(
		StreamExecutionEnvironment env,
		EnvironmentSettings settings,
		TableConfig config,
		Executor executor,
		CatalogManager catalogManager,
		ModuleManager moduleManager,
		FunctionCatalog functionCatalog) {

	final Map<String, String> plannerProperties = settings.toPlannerProperties();
	final Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
		.create(plannerProperties, executor, config, functionCatalog, catalogManager);

	return new StreamTableEnvironmentImpl(
		catalogManager,
		moduleManager,
		functionCatalog,
		config,
		env,
		planner,
		executor,
		settings.isStreamingMode());
}
 
Example #6
Source File: BlinkStreamPythonUdfSqlJob.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setParallelism(1);
	StreamTableEnvironment tEnv = StreamTableEnvironment.create(
		env,
		EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build());
	tEnv.executeSql("create temporary system function add_one as 'add_one.add_one' language python");

	tEnv.createTemporaryView("source", tEnv.fromValues(1L, 2L, 3L).as("a"));

	Iterator<Row> result = tEnv.executeSql("select add_one(a) as a from source").collect();

	List<Long> actual = new ArrayList<>();
	while (result.hasNext()) {
		Row r = result.next();
		actual.add((Long) r.getField(0));
	}

	List<Long> expected = Arrays.asList(2L, 3L, 4L);
	if (!actual.equals(expected)) {
		throw new AssertionError(String.format("The output result: %s is not as expected: %s!", actual, expected));
	}
}
 
Example #7
Source File: SQLExampleWordCount.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String path = SQLExampleWordCount.class.getClassLoader().getResource("words.txt").getPath();

    CsvTableSource csvTableSource = CsvTableSource.builder()
            .field("word", Types.STRING)
            .path(path)
            .build();
    blinkStreamTableEnv.registerTableSource("zhisheng", csvTableSource);
    Table wordWithCount = blinkStreamTableEnv.sqlQuery("SELECT count(word), word FROM zhisheng GROUP BY word");
    blinkStreamTableEnv.toRetractStream(wordWithCount, Row.class).print();

    blinkStreamTableEnv.execute("Blink Stream SQL Job");
}
 
Example #8
Source File: BlinkBatchPythonUdfSqlJob.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	TableEnvironment tEnv = TableEnvironment.create(
		EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build());
	tEnv.getConfig().getConfiguration().set(CoreOptions.DEFAULT_PARALLELISM, 1);
	tEnv.executeSql("create temporary system function add_one as 'add_one.add_one' language python");

	tEnv.createTemporaryView("source", tEnv.fromValues(1L, 2L, 3L).as("a"));

	Iterator<Row> result = tEnv.executeSql("select add_one(a) as a from source").collect();

	List<Long> actual = new ArrayList<>();
	while (result.hasNext()) {
		Row r = result.next();
		actual.add((Long) r.getField(0));
	}

	List<Long> expected = Arrays.asList(2L, 3L, 4L);
	if (!actual.equals(expected)) {
		throw new AssertionError(String.format("The output result: %s is not as expected: %s!", actual, expected));
	}
}
 
Example #9
Source File: CatalogStatisticsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetStatsFromCatalog() throws Exception {
	EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build();
	TableEnvironment tEnv = TableEnvironment.create(settings);
	tEnv.registerTableSource("T1", new TestTableSource(true, tableSchema));
	tEnv.registerTableSource("T2", new TestTableSource(true, tableSchema));
	Catalog catalog = tEnv.getCatalog(tEnv.getCurrentCatalog()).orElse(null);
	assertNotNull(catalog);

	catalog.alterTableStatistics(ObjectPath.fromString("default_database.T1"),
			new CatalogTableStatistics(100, 10, 1000L, 2000L), true);
	catalog.alterTableStatistics(ObjectPath.fromString("default_database.T2"),
			new CatalogTableStatistics(100000000, 1000, 1000000000L, 2000000000L), true);
	catalog.alterTableColumnStatistics(ObjectPath.fromString("default_database.T1"), createColumnStats(), true);
	catalog.alterTableColumnStatistics(ObjectPath.fromString("default_database.T2"), createColumnStats(), true);

	Table table = tEnv.sqlQuery("select * from T1, T2 where T1.s3 = T2.s3");
	String result = tEnv.explain(table);
	// T1 is broadcast side
	String expected = TableTestUtil.readFromResource("/explain/testGetStatsFromCatalog.out");
	assertEquals(expected, TableTestUtil.replaceStageId(result));
}
 
Example #10
Source File: KafkaSourceMain.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    ParameterTool parameterTool = ExecutionEnvUtil.PARAMETER_TOOL;
    Properties properties = KafkaConfigUtil.buildKafkaProps(parameterTool);
    DataStream<String> dataStream = blinkStreamEnv.addSource(new FlinkKafkaConsumer011<>(parameterTool.get("kafka.topic"), new SimpleStringSchema(), properties));
    Table table = blinkStreamTableEnv.fromDataStream(dataStream, "word");
    blinkStreamTableEnv.registerTable("kafkaDataStream", table);

    RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"_count", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
    blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

    Table wordCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS _count,word FROM kafkaDataStream GROUP BY word");

    wordCount.insertInto("sinkTable");

    blinkStreamTableEnv.execute("Blink Kafka Table Source");
}
 
Example #11
Source File: CatalogAPI.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    //Changing the Current Catalog And Database
    blinkStreamTableEnv.useCatalog("zhisheng");
    blinkStreamTableEnv.useDatabase("zhisheng");

    blinkStreamTableEnv.scan("not_the_current_catalog", "not_the_current_db", "zhisheng");

    //List Available Catalogs/Databases/Tables
    blinkStreamTableEnv.listCatalogs();
    blinkStreamTableEnv.listDatabases();
    blinkStreamTableEnv.listTables();

}
 
Example #12
Source File: CatalogTypes.java    From flink-learning with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
        StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
        blinkStreamEnv.setParallelism(1);
        EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

        blinkStreamTableEnv.registerCatalog("zhisheng", new GenericInMemoryCatalog("zhisheng"));
        //GenericInMemoryCatalog,默认的 catalog


        //HiveCatalog,这个需要添加 Hive connector 和 Hive 的依赖
//        blinkStreamTableEnv.registerCatalog("zhisheng", new HiveCatalog("zhisheng", "zhisheng", "~/zhisheng/hive/conf", "2.3.4"));


    }
 
Example #13
Source File: ExecutionEntry.java    From flink with Apache License 2.0 6 votes vote down vote up
public EnvironmentSettings getEnvironmentSettings() {
	final EnvironmentSettings.Builder builder = EnvironmentSettings.newInstance();

	if (inStreamingMode()) {
		builder.inStreamingMode();
	} else if (inBatchMode()) {
		builder.inBatchMode();
	}

	final String planner = properties.getOptionalString(EXECUTION_PLANNER)
		.orElse(EXECUTION_PLANNER_VALUE_OLD);

	if (planner.equals(EXECUTION_PLANNER_VALUE_OLD)) {
		builder.useOldPlanner();
	} else if (planner.equals(EXECUTION_PLANNER_VALUE_BLINK)) {
		builder.useBlinkPlanner();
	}

	return builder.build();
}
 
Example #14
Source File: SqlScriptExecutor.java    From flink-tutorials with Apache License 2.0 5 votes vote down vote up
public static StreamTableEnvironment createTableEnv() {
	EnvironmentSettings settings = EnvironmentSettings
			.newInstance()
			.useBlinkPlanner()
			.inStreamingMode()
			.build();

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
	StreamTableEnvironment tableEnv = StreamTableEnvironment
			.create(env, settings);

	return tableEnv;
}
 
Example #15
Source File: TableExampleWordCount.java    From flink-learning with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String path = TableExampleWordCount.class.getClassLoader().getResource("words.txt").getPath();
    blinkStreamTableEnv
            .connect(new FileSystem().path(path))
            .withFormat(new OldCsv().field("word", Types.STRING).lineDelimiter("\n"))
            .withSchema(new Schema().field("word", Types.STRING))
            .inAppendMode()
            .registerTableSource("FlieSourceTable");

    Table wordWithCount = blinkStreamTableEnv.scan("FlieSourceTable")
            .groupBy("word")
            .select("word,count(word) as _count");
    blinkStreamTableEnv.toRetractStream(wordWithCount, Row.class).print();

    //打印结果中的 true 和 false,可能会有点疑问,为啥会多出一个字段。
    //Sink 做的事情是先删除再插入,false 表示删除上一条数据,true 表示插入该条数据

    blinkStreamTableEnv.execute("Blink Stream SQL Job");
}
 
Example #16
Source File: BlinkExecutorFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a corresponding {@link ExecutorBase}.
 *
 * @param properties Static properties of the {@link Executor}, the same that were used for factory lookup.
 * @param executionEnvironment a {@link StreamExecutionEnvironment} to use while executing Table programs.
 * @return instance of a {@link Executor}
 */
public Executor create(Map<String, String> properties, StreamExecutionEnvironment executionEnvironment) {
	if (Boolean.valueOf(properties.getOrDefault(EnvironmentSettings.STREAMING_MODE, "true"))) {
		return new StreamExecutor(executionEnvironment);
	} else {
		return new BatchExecutor(executionEnvironment);
	}
}
 
Example #17
Source File: BlinkPlannerFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public Planner create(
	Map<String, String> properties,
	Executor executor,
	TableConfig tableConfig,
	FunctionCatalog functionCatalog,
	CatalogManager catalogManager) {
	if (Boolean.valueOf(properties.getOrDefault(EnvironmentSettings.STREAMING_MODE, "true"))) {
		return new StreamPlanner(executor, tableConfig, functionCatalog, catalogManager);
	} else {
		return new BatchPlanner(executor, tableConfig, functionCatalog, catalogManager);
	}
}
 
Example #18
Source File: BlinkExecutorFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a corresponding {@link ExecutorBase}.
 *
 * @param properties Static properties of the {@link Executor}, the same that were used for factory lookup.
 * @param executionEnvironment a {@link StreamExecutionEnvironment} to use while executing Table programs.
 * @return instance of a {@link Executor}
 */
public Executor create(Map<String, String> properties, StreamExecutionEnvironment executionEnvironment) {
	if (Boolean.valueOf(properties.getOrDefault(EnvironmentSettings.STREAMING_MODE, "true"))) {
		return new StreamExecutor(executionEnvironment);
	} else {
		return new BatchExecutor(executionEnvironment);
	}
}
 
Example #19
Source File: StreamTableEnvironmentImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
public static StreamTableEnvironment create(
		StreamExecutionEnvironment executionEnvironment,
		EnvironmentSettings settings,
		TableConfig tableConfig) {

	if (!settings.isStreamingMode()) {
		throw new TableException(
			"StreamTableEnvironment can not run in batch mode for now, please use TableEnvironment.");
	}

	CatalogManager catalogManager = new CatalogManager(
		settings.getBuiltInCatalogName(),
		new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName()));

	FunctionCatalog functionCatalog = new FunctionCatalog(catalogManager);

	Map<String, String> executorProperties = settings.toExecutorProperties();
	Executor executor = lookupExecutor(executorProperties, executionEnvironment);

	Map<String, String> plannerProperties = settings.toPlannerProperties();
	Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
		.create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager);

	return new StreamTableEnvironmentImpl(
		catalogManager,
		functionCatalog,
		tableConfig,
		executionEnvironment,
		planner,
		executor,
		settings.isStreamingMode()
	);
}
 
Example #20
Source File: MLEnvironmentTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testConstructWithStreamEnv() {
	StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
	StreamTableEnvironment streamTableEnvironment = StreamTableEnvironment.create(
			streamExecutionEnvironment, EnvironmentSettings.newInstance().useOldPlanner().build());

	MLEnvironment mlEnvironment = new MLEnvironment(streamExecutionEnvironment, streamTableEnvironment);

	Assert.assertSame(mlEnvironment.getStreamExecutionEnvironment(), streamExecutionEnvironment);
	Assert.assertSame(mlEnvironment.getStreamTableEnvironment(), streamTableEnvironment);
}
 
Example #21
Source File: SqlTest.java    From flink-tutorials with Apache License 2.0 5 votes vote down vote up
public static StreamTableEnvironment createTableEnv(StreamExecutionEnvironment env) {
	EnvironmentSettings settings = EnvironmentSettings
			.newInstance()
			.useBlinkPlanner()
			.inStreamingMode()
			.build();

	StreamTableEnvironment tableEnv = StreamTableEnvironment
			.create(env, settings);

	return tableEnv;
}
 
Example #22
Source File: SqlSubmit.java    From flink-sql-submit with Apache License 2.0 5 votes vote down vote up
private void run() throws Exception {
    EnvironmentSettings settings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    this.tEnv = TableEnvironment.create(settings);
    List<String> sql = Files.readAllLines(Paths.get(workSpace + "/" + sqlFilePath));
    List<SqlCommandCall> calls = SqlCommandParser.parse(sql);
    for (SqlCommandCall call : calls) {
        callCommand(call);
    }
    tEnv.execute("SQL Job");
}
 
Example #23
Source File: FlinkSQLDistinctExample.java    From flink-learning with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
    StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
    blinkStreamEnv.setParallelism(1);
    EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

    String ddlSource = "CREATE TABLE user_behavior (\n" +
            "    user_id BIGINT,\n" +
            "    item_id BIGINT,\n" +
            "    category_id BIGINT,\n" +
            "    behavior STRING,\n" +
            "    ts TIMESTAMP(3)\n" +
            ") WITH (\n" +
            "    'connector.type' = 'kafka',\n" +
            "    'connector.version' = '0.11',\n" +
            "    'connector.topic' = 'user_behavior',\n" +
            "    'connector.startup-mode' = 'latest-offset',\n" +
            "    'connector.properties.zookeeper.connect' = 'localhost:2181',\n" +
            "    'connector.properties.bootstrap.servers' = 'localhost:9092',\n" +
            "    'format.type' = 'json'\n" +
            ")";

    String countSql = "select user_id, count(user_id) from user_behavior group by user_id";

    blinkStreamTableEnv.sqlUpdate(ddlSource);
    Table countTable = blinkStreamTableEnv.sqlQuery(countSql);
    blinkStreamTableEnv.toRetractStream(countTable, Row.class).print();

    String distinctSql = "select distinct(user_id) from user_behavior";
    Table distinctTable = blinkStreamTableEnv.sqlQuery(distinctSql);
    blinkStreamTableEnv.toRetractStream(distinctTable, Row.class).print("==");

    blinkStreamTableEnv.execute("Blink Stream SQL count/distinct demo");
}
 
Example #24
Source File: SpendReportTest.java    From flink-playgrounds with Apache License 2.0 5 votes vote down vote up
@Test
public void testReport() {
    EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
    TableEnvironment tEnv = TableEnvironment.create(settings);

    Table transactions =
            tEnv.fromValues(
                    DataTypes.ROW(
                            DataTypes.FIELD("account_id", DataTypes.BIGINT()),
                            DataTypes.FIELD("amount", DataTypes.BIGINT()),
                            DataTypes.FIELD("transaction_time", DataTypes.TIMESTAMP(3))),
                    Row.of(1, 188, DATE_TIME.plusMinutes(12)),
                    Row.of(2, 374, DATE_TIME.plusMinutes(47)),
                    Row.of(3, 112, DATE_TIME.plusMinutes(36)),
                    Row.of(4, 478, DATE_TIME.plusMinutes(3)),
                    Row.of(5, 208, DATE_TIME.plusMinutes(8)),
                    Row.of(1, 379, DATE_TIME.plusMinutes(53)),
                    Row.of(2, 351, DATE_TIME.plusMinutes(32)),
                    Row.of(3, 320, DATE_TIME.plusMinutes(31)),
                    Row.of(4, 259, DATE_TIME.plusMinutes(19)),
                    Row.of(5, 273, DATE_TIME.plusMinutes(42)));

    try {
        TableResult results = SpendReport.report(transactions).execute();

        MatcherAssert.assertThat(
                materialize(results),
                Matchers.containsInAnyOrder(
                        Row.of(1L, DATE_TIME, 567L),
                        Row.of(2L, DATE_TIME, 725L),
                        Row.of(3L, DATE_TIME, 432L),
                        Row.of(4L, DATE_TIME, 737L),
                        Row.of(5L, DATE_TIME, 481L)));
    } catch (UnimplementedException e) {
        Assume.assumeNoException("The walkthrough has not been implemented", e);
    }
}
 
Example #25
Source File: JdbcDynamicTableSinkITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testBatchSink() throws Exception {
	EnvironmentSettings bsSettings = EnvironmentSettings.newInstance()
		.useBlinkPlanner().inBatchMode().build();
	TableEnvironment tEnv = TableEnvironment.create(bsSettings);

	tEnv.executeSql(
		"CREATE TABLE USER_RESULT(" +
			"NAME VARCHAR," +
			"SCORE BIGINT" +
			") WITH ( " +
			"'connector' = 'jdbc'," +
			"'url'='" + DB_URL + "'," +
			"'table-name' = '" + OUTPUT_TABLE3 + "'," +
			"'sink.buffer-flush.max-rows' = '2'," +
			"'sink.buffer-flush.interval' = '300ms'," +
			"'sink.max-retries' = '4'" +
			")");

	TableResult tableResult  = tEnv.executeSql("INSERT INTO USER_RESULT\n" +
		"SELECT user_name, score " +
		"FROM (VALUES (1, 'Bob'), (22, 'Tom'), (42, 'Kim'), " +
		"(42, 'Kim'), (1, 'Bob')) " +
		"AS UserCountTable(score, user_name)");
	// wait to finish
	tableResult.getJobClient().get().getJobExecutionResult(Thread.currentThread().getContextClassLoader()).get();

	check(new Row[] {
		Row.of("Bob", 1),
		Row.of("Tom", 22),
		Row.of("Kim", 42),
		Row.of("Kim", 42),
		Row.of("Bob", 1)
	}, DB_URL, OUTPUT_TABLE3, new String[]{"NAME", "SCORE"});
}
 
Example #26
Source File: CustomTableSinkMain.java    From flink-learning with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
        blinkStreamEnv.setParallelism(1);
        EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

        String path = SQLExampleWordCount.class.getClassLoader().getResource("words.txt").getPath();

        CsvTableSource csvTableSource = CsvTableSource.builder()
                .field("word", Types.STRING)
                .path(path)
                .build();
        blinkStreamTableEnv.registerTableSource("zhisheng", csvTableSource);

        RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"c", "word"}, new TypeInformation[]{Types.LONG, Types.STRING});
        //或者
//        RetractStreamTableSink<Row> retractStreamTableSink = new MyRetractStreamTableSink(new String[]{"c", "word"}, new DataType[]{DataTypes.BIGINT(), DataTypes.STRING()});
        blinkStreamTableEnv.registerTableSink("sinkTable", retractStreamTableSink);

        Table wordWithCount = blinkStreamTableEnv.sqlQuery("SELECT count(word) AS c, word FROM zhisheng GROUP BY word");

        wordWithCount.insertInto("sinkTable");
        blinkStreamTableEnv.execute("Blink Custom Table Sink");
    }
 
Example #27
Source File: HiveCatalogITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testReadWriteCsv() throws Exception {
	// similar to CatalogTableITCase::testReadWriteCsvUsingDDL but uses HiveCatalog
	EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
	TableEnvironment tableEnv = TableEnvironment.create(settings);
	tableEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1);

	tableEnv.registerCatalog("myhive", hiveCatalog);
	tableEnv.useCatalog("myhive");

	String srcPath = this.getClass().getResource("/csv/test3.csv").getPath();

	tableEnv.executeSql("CREATE TABLE src (" +
			"price DECIMAL(10, 2),currency STRING,ts6 TIMESTAMP(6),ts AS CAST(ts6 AS TIMESTAMP(3)),WATERMARK FOR ts AS ts) " +
			String.format("WITH ('connector.type' = 'filesystem','connector.path' = 'file://%s','format.type' = 'csv')", srcPath));

	String sinkPath = new File(tempFolder.newFolder(), "csv-order-sink").toURI().toString();

	tableEnv.executeSql("CREATE TABLE sink (" +
			"window_end TIMESTAMP(3),max_ts TIMESTAMP(6),counter BIGINT,total_price DECIMAL(10, 2)) " +
			String.format("WITH ('connector.type' = 'filesystem','connector.path' = '%s','format.type' = 'csv')", sinkPath));

	TableEnvUtil.execInsertSqlAndWaitResult(tableEnv, "INSERT INTO sink " +
			"SELECT TUMBLE_END(ts, INTERVAL '5' SECOND),MAX(ts6),COUNT(*),MAX(price) FROM src " +
			"GROUP BY TUMBLE(ts, INTERVAL '5' SECOND)");

	String expected = "2019-12-12 00:00:05.0,2019-12-12 00:00:04.004001,3,50.00\n" +
			"2019-12-12 00:00:10.0,2019-12-12 00:00:06.006001,2,5.33\n";
	assertEquals(expected, FileUtils.readFileUtf8(new File(new URI(sinkPath))));
}
 
Example #28
Source File: PostgresCatalogITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Before
public void setup() {
	EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
	this.tEnv = TableEnvironment.create(settings);
	tEnv.getConfig().getConfiguration().setInteger(TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM.key(), 1);

	// use PG catalog
	tEnv.registerCatalog(TEST_CATALOG_NAME, catalog);
	tEnv.useCatalog(TEST_CATALOG_NAME);
}
 
Example #29
Source File: RedisDescriptorTest.java    From bahir-flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testRedisDescriptor() throws Exception {
    DataStreamSource<Row> source = (DataStreamSource<Row>) env.addSource(new TestSourceFunctionString())
            .returns(new RowTypeInfo(TypeInformation.of(String.class), TypeInformation.of(Long.class)));

    EnvironmentSettings settings = EnvironmentSettings
            .newInstance()
            .useOldPlanner()
            .inStreamingMode()
            .build();
    StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env, settings);
    tableEnvironment.registerDataStream("t1", source, "k, v");

    Redis redis = new Redis()
            .mode(RedisValidator.REDIS_CLUSTER)
            .command(RedisCommand.INCRBY_EX.name())
            .ttl(100000)
            .property(RedisValidator.REDIS_NODES, REDIS_HOST+ ":" + REDIS_PORT);

    tableEnvironment
            .connect(redis).withSchema(new Schema()
            .field("k", TypeInformation.of(String.class))
            .field("v", TypeInformation.of(Long.class)))
            .registerTableSink("redis");


    tableEnvironment.sqlUpdate("insert into redis select k, v from t1");
    env.execute("Test Redis Table");
}
 
Example #30
Source File: TableEnvFactory.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
public TableEnvironment createJavaBlinkBatchTableEnvironment(
        EnvironmentSettings settings) {
  try {
    final Map<String, String> executorProperties = settings.toExecutorProperties();
    executor = lookupExecutor(executorProperties, senv.getJavaEnv());
    final Map<String, String> plannerProperties = settings.toPlannerProperties();
    final Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
            .create(plannerProperties, executor, tblConfig, blinkFunctionCatalog, catalogManager);

    Class clazz = null;
    if (flinkVersion.isFlink110()) {
      clazz = Class
              .forName("org.apache.flink.table.api.java.internal.StreamTableEnvironmentImpl");
    } else {
      clazz = Class
              .forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl");
    }
    Constructor constructor = clazz.getConstructor(
                    CatalogManager.class,
                    ModuleManager.class,
                    FunctionCatalog.class,
                    TableConfig.class,
                    StreamExecutionEnvironment.class,
                    Planner.class,
                    Executor.class,
                    boolean.class);
    return (TableEnvironment) constructor.newInstance(
            catalogManager,
            moduleManager,
            blinkFunctionCatalog,
            tblConfig,
            senv.getJavaEnv(),
            planner,
            executor,
            settings.isStreamingMode());
  } catch (Exception e) {
    LOGGER.info(ExceptionUtils.getStackTrace(e));
    throw new TableException("Fail to createJavaBlinkBatchTableEnvironment", e);
  }
}