org.apache.flink.table.client.gateway.SessionContext Java Examples

The following examples show how to use org.apache.flink.table.client.gateway.SessionContext. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source Project: Flink-CEPplus   Author: ljygz   File: LocalExecutor.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public List<String> completeStatement(SessionContext session, String statement, int position) {
	final TableEnvironment tableEnv = getOrCreateExecutionContext(session)
			.createEnvironmentInstance()
			.getTableEnvironment();

	try {
		return Arrays.asList(tableEnv.getCompletionHints(statement, position));
	} catch (Throwable t) {
		// catch everything such that the query does not crash the executor
		if (LOG.isDebugEnabled()) {
			LOG.debug("Could not complete statement at " + position + ":" + statement, t);
		}
		return Collections.emptyList();
	}
}
 
Example #2
Source Project: flink   Author: flink-tpc-ds   File: LocalExecutorITCase.java    License: Apache License 2.0 6 votes vote down vote up
private List<String> retrieveChangelogResult(
		Executor executor,
		SessionContext session,
		String resultID) throws InterruptedException {

	final List<String> actualResults = new ArrayList<>();
	while (true) {
		Thread.sleep(50); // slow the processing down
		final TypedResult<List<Tuple2<Boolean, Row>>> result =
				executor.retrieveResultChanges(session, resultID);
		if (result.getType() == TypedResult.ResultType.PAYLOAD) {
			for (Tuple2<Boolean, Row> change : result.getPayload()) {
				actualResults.add(change.toString());
			}
		} else if (result.getType() == TypedResult.ResultType.EOS) {
			break;
		}
	}
	return actualResults;
}
 
Example #3
Source Project: flink   Author: apache   File: LocalExecutorITCase.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCompleteStatement() throws Exception {
	final Executor executor = createDefaultExecutor(clusterClient);
	final SessionContext session = new SessionContext("test-session", new Environment());
	String sessionId = executor.openSession(session);
	assertEquals("test-session", sessionId);

	final List<String> expectedTableHints = Arrays.asList(
		"default_catalog.default_database.TableNumber1",
		"default_catalog.default_database.TableNumber2",
		"default_catalog.default_database.TableSourceSink");
	assertEquals(expectedTableHints, executor.completeStatement(sessionId, "SELECT * FROM Ta", 16));

	final List<String> expectedClause = Collections.singletonList("WHERE");
	assertEquals(expectedClause, executor.completeStatement(sessionId, "SELECT * FROM TableNumber2 WH", 29));

	final List<String> expectedField = Arrays.asList("IntegerField1");
	assertEquals(expectedField, executor.completeStatement(sessionId, "SELECT * FROM TableNumber1 WHERE Inte", 37));
	executor.closeSession(sessionId);
}
 
Example #4
Source Project: Flink-CEPplus   Author: ljygz   File: LocalExecutorITCase.java    License: Apache License 2.0 6 votes vote down vote up
private List<String> retrieveTableResult(
		Executor executor,
		SessionContext session,
		String resultID) throws InterruptedException {

	final List<String> actualResults = new ArrayList<>();
	while (true) {
		Thread.sleep(50); // slow the processing down
		final TypedResult<Integer> result = executor.snapshotResult(session, resultID, 2);
		if (result.getType() == TypedResult.ResultType.PAYLOAD) {
			actualResults.clear();
			IntStream.rangeClosed(1, result.getPayload()).forEach((page) -> {
				for (Row row : executor.retrieveResultPage(resultID, page)) {
					actualResults.add(row.toString());
				}
			});
		} else if (result.getType() == TypedResult.ResultType.EOS) {
			break;
		}
	}

	return actualResults;
}
 
Example #5
Source Project: flink   Author: apache   File: CliClientTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testUseNonExistingDB() throws Exception {
	TestingExecutor executor = new TestingExecutorBuilder()
		.setUseDatabaseConsumer((ignored1, ignored2) -> {
			throw new SqlExecutionException("mocked exception");
		})
		.build();
	InputStream inputStream = new ByteArrayInputStream("use db;\n".getBytes());
	SessionContext session = new SessionContext("test-session", new Environment());
	String sessionId = executor.openSession(session);

	CliClient cliClient = null;
	try (Terminal terminal = new DumbTerminal(inputStream, new MockOutputStream())) {
		cliClient = new CliClient(terminal, sessionId, executor, File.createTempFile("history", "tmp").toPath());

		cliClient.open();
		assertThat(executor.getNumUseDatabaseCalls(), is(1));
	} finally {
		if (cliClient != null) {
			cliClient.close();
		}
	}
}
 
Example #6
Source Project: flink   Author: flink-tpc-ds   File: CliClientTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testUseNonExistingDB() throws Exception {
	Executor executor = mock(Executor.class);
	doThrow(new SqlExecutionException("mocked exception")).when(executor).useDatabase(any(), any());
	InputStream inputStream = new ByteArrayInputStream("use db;\n".getBytes());
	// don't care about the output
	OutputStream outputStream = new OutputStream() {
		@Override
		public void write(int b) throws IOException {
		}
	};
	CliClient cliClient = null;
	try (Terminal terminal = new DumbTerminal(inputStream, outputStream)) {
		cliClient = new CliClient(terminal, new SessionContext("test-session", new Environment()), executor);
		cliClient.open();
		verify(executor).useDatabase(any(), any());
	} finally {
		if (cliClient != null) {
			cliClient.close();
		}
	}
}
 
Example #7
Source Project: flink   Author: flink-tpc-ds   File: LocalExecutor.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public String explainStatement(SessionContext session, String statement) throws SqlExecutionException {
	final ExecutionContext<?> context = getOrCreateExecutionContext(session);
	final TableEnvironment tableEnv = context
		.createEnvironmentInstance()
		.getTableEnvironment();

	// translate
	try {
		final Table table = createTable(context, tableEnv, statement);
		return context.wrapClassLoader(() -> tableEnv.explain(table));
	} catch (Throwable t) {
		// catch everything such that the query does not crash the executor
		throw new SqlExecutionException("Invalid SQL statement.", t);
	}
}
 
Example #8
Source Project: flink   Author: apache   File: ExecutionContextTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testInitCatalogs() throws Exception{
	final Map<String, String> replaceVars = createDefaultReplaceVars();
	Environment env = EnvironmentFileUtil.parseModified(DEFAULTS_ENVIRONMENT_FILE, replaceVars);

	Map<String, Object> catalogProps = new HashMap<>();
	catalogProps.put("name", "test");
	catalogProps.put("type", "test_cl_catalog");
	env.getCatalogs().clear();
	env.getCatalogs().put("test", CatalogEntry.create(catalogProps));
	Configuration flinkConfig = new Configuration();
	ExecutionContext.builder(env,
			new SessionContext("test-session", new Environment()),
			Collections.emptyList(),
			flinkConfig,
			new DefaultClusterClientServiceLoader(),
			new Options(),
			Collections.singletonList(new DefaultCLI(flinkConfig))).build();
}
 
Example #9
Source Project: flink   Author: apache   File: CliClientTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * execute a sql statement and return the terminal output as string.
 */
private String testExecuteSql(TestingExecutor executor, String sql) throws IOException {
	InputStream inputStream = new ByteArrayInputStream((sql + "\n").getBytes());
	ByteArrayOutputStream outputStream = new ByteArrayOutputStream(256);
	CliClient cliClient = null;
	SessionContext sessionContext = new SessionContext("test-session", new Environment());
	String sessionId = executor.openSession(sessionContext);

	try (Terminal terminal = new DumbTerminal(inputStream, outputStream)) {
		cliClient = new CliClient(terminal, sessionId, executor, File.createTempFile("history", "tmp").toPath());
		cliClient.open();
		return new String(outputStream.toByteArray());
	} finally {
		if (cliClient != null) {
			cliClient.close();
		}
	}
}
 
Example #10
Source Project: flink   Author: flink-tpc-ds   File: LocalExecutorITCase.java    License: Apache License 2.0 6 votes vote down vote up
private void executeStreamQueryTable(
		Map<String, String> replaceVars,
		String query,
		List<String> expectedResults) throws Exception {

	final Executor executor = createModifiedExecutor(clusterClient, replaceVars);
	final SessionContext session = new SessionContext("test-session", new Environment());

	try {
		// start job and retrieval
		final ResultDescriptor desc = executor.executeQuery(session, query);

		assertTrue(desc.isMaterialized());

		final List<String> actualResults = retrieveTableResult(executor, session, desc.getResultId());

		TestBaseUtils.compareResultCollections(expectedResults, actualResults, Comparator.naturalOrder());
	} finally {
		executor.stop(session);
	}
}
 
Example #11
Source Project: flink   Author: flink-tpc-ds   File: CliClientTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testUseNonExistingCatalog() throws Exception {
	Executor executor = mock(Executor.class);
	doThrow(new SqlExecutionException("mocked exception")).when(executor).useCatalog(any(), any());
	InputStream inputStream = new ByteArrayInputStream("use catalog cat;\n".getBytes());
	// don't care about the output
	OutputStream outputStream = new OutputStream() {
		@Override
		public void write(int b) throws IOException {
		}
	};
	CliClient cliClient = null;
	try (Terminal terminal = new DumbTerminal(inputStream, outputStream)) {
		cliClient = new CliClient(terminal, new SessionContext("test-session", new Environment()), executor);
		cliClient.open();
		verify(executor).useCatalog(any(), any());
	} finally {
		if (cliClient != null) {
			cliClient.close();
		}
	}
}
 
Example #12
Source Project: flink   Author: flink-tpc-ds   File: LocalExecutor.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void useCatalog(SessionContext session, String catalogName) throws SqlExecutionException {
	final ExecutionContext<?> context = getOrCreateExecutionContext(session);
	final TableEnvironment tableEnv = context
		.createEnvironmentInstance()
		.getTableEnvironment();

	context.wrapClassLoader(() -> {
		// Rely on TableEnvironment/CatalogManager to validate input
		try {
			tableEnv.useCatalog(catalogName);
		} catch (CatalogException e) {
			throw new SqlExecutionException("Failed to switch to catalog " + catalogName, e);
		}
		session.setCurrentCatalog(catalogName);
		session.setCurrentDatabase(tableEnv.getCurrentDatabase());
		return null;
	});
}
 
Example #13
Source Project: Flink-CEPplus   Author: ljygz   File: SqlClient.java    License: Apache License 2.0 5 votes vote down vote up
private static void validateEnvironment(SessionContext context, Executor executor) {
	System.out.print("Validating current environment...");
	try {
		executor.validateSession(context);
		System.out.println("done.");
	} catch (SqlExecutionException e) {
		throw new SqlClientException(
			"The configured environment is invalid. Please check your environment files again.", e);
	}
}
 
Example #14
Source Project: flink   Author: flink-tpc-ds   File: LocalExecutor.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public TypedResult<Integer> snapshotResult(SessionContext session, String resultId, int pageSize) throws SqlExecutionException {
	final DynamicResult<?> result = resultStore.getResult(resultId);
	if (result == null) {
		throw new SqlExecutionException("Could not find a result with result identifier '" + resultId + "'.");
	}
	if (!result.isMaterialized()) {
		throw new SqlExecutionException("Invalid result retrieval mode.");
	}
	return ((MaterializedResult<?>) result).snapshot(pageSize);
}
 
Example #15
Source Project: Flink-CEPplus   Author: ljygz   File: LocalExecutor.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, String> getSessionProperties(SessionContext session) throws SqlExecutionException {
	final Environment env = getOrCreateExecutionContext(session)
		.getMergedEnvironment();
	final Map<String, String> properties = new HashMap<>();
	properties.putAll(env.getExecution().asTopLevelMap());
	properties.putAll(env.getDeployment().asTopLevelMap());
	return properties;
}
 
Example #16
Source Project: Flink-CEPplus   Author: ljygz   File: LocalExecutor.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public List<String> listTables(SessionContext session) throws SqlExecutionException {
	final TableEnvironment tableEnv = getOrCreateExecutionContext(session)
		.createEnvironmentInstance()
		.getTableEnvironment();
	return Arrays.asList(tableEnv.listTables());
}
 
Example #17
Source Project: Flink-CEPplus   Author: ljygz   File: LocalExecutor.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public List<String> listUserDefinedFunctions(SessionContext session) throws SqlExecutionException {
	final TableEnvironment tableEnv = getOrCreateExecutionContext(session)
		.createEnvironmentInstance()
		.getTableEnvironment();
	return Arrays.asList(tableEnv.listUserDefinedFunctions());
}
 
Example #18
Source Project: Flink-CEPplus   Author: ljygz   File: LocalExecutor.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public TableSchema getTableSchema(SessionContext session, String name) throws SqlExecutionException {
	final TableEnvironment tableEnv = getOrCreateExecutionContext(session)
		.createEnvironmentInstance()
		.getTableEnvironment();
	try {
		return tableEnv.scan(name).getSchema();
	} catch (Throwable t) {
		// catch everything such that the query does not crash the executor
		throw new SqlExecutionException("No table with this name could be found.", t);
	}
}
 
Example #19
Source Project: flink   Author: apache   File: LocalExecutorITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testSetSessionProperties() throws Exception {
	final LocalExecutor executor = createDefaultExecutor(clusterClient);
	String key = OptimizerConfigOptions.TABLE_OPTIMIZER_AGG_PHASE_STRATEGY.key();

	final SessionContext session = new SessionContext("test-session", new Environment());
	String sessionId = executor.openSession(session);
	// check the config in Environment
	assertNull(executor.getSessionProperties(sessionId).get(key));
	// check the config in TableConfig
	assertNull(executor.getExecutionContext(sessionId)
			.getTableEnvironment().getConfig().getConfiguration().getString(key, null));

	// modify config
	executor.setSessionProperty(sessionId, key, "ONE_PHASE");
	// check the config in Environment again
	assertEquals("ONE_PHASE", executor.getSessionProperties(sessionId).get(key));
	// check the config in TableConfig again
	assertEquals("ONE_PHASE",
			executor.getExecutionContext(sessionId)
					.getTableEnvironment().getConfig().getConfiguration().getString(key, null));

	// reset all properties
	executor.resetSessionProperties(sessionId);
	// check the config in Environment
	assertNull(executor.getSessionProperties(sessionId).get(key));
	// check the config in TableConfig
	assertNull(executor.getExecutionContext(sessionId)
			.getTableEnvironment().getConfig().getConfiguration().getString(key, null));
}
 
Example #20
Source Project: flink   Author: flink-tpc-ds   File: CliClientTest.java    License: Apache License 2.0 5 votes vote down vote up
private void verifySqlCompletion(String statement, int position, List<String> expectedHints, List<String> notExpectedHints) throws IOException {
	final SessionContext context = new SessionContext("test-session", new Environment());
	final MockExecutor mockExecutor = new MockExecutor();

	final SqlCompleter completer = new SqlCompleter(context, mockExecutor);
	final SqlMultiLineParser parser = new SqlMultiLineParser();

	try (Terminal terminal = TerminalUtils.createDummyTerminal()) {
		final LineReader reader = LineReaderBuilder.builder().terminal(terminal).build();

		final ParsedLine parsedLine = parser.parse(statement, position, Parser.ParseContext.COMPLETE);
		final List<Candidate> candidates = new ArrayList<>();
		final List<String> results = new ArrayList<>();
		completer.complete(reader, parsedLine, candidates);
		candidates.forEach(item -> results.add(item.value()));

		assertTrue(results.containsAll(expectedHints));

		assertEquals(statement, mockExecutor.receivedStatement);
		assertEquals(context, mockExecutor.receivedContext);
		assertEquals(position, mockExecutor.receivedPosition);
		assertTrue(results.contains("HintA"));
		assertTrue(results.contains("Hint B"));

		results.retainAll(notExpectedHints);
		assertEquals(0, results.size());
	}
}
 
Example #21
Source Project: Flink-CEPplus   Author: ljygz   File: LocalExecutor.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public TypedResult<List<Tuple2<Boolean, Row>>> retrieveResultChanges(SessionContext session,
		String resultId) throws SqlExecutionException {
	final DynamicResult<?> result = resultStore.getResult(resultId);
	if (result == null) {
		throw new SqlExecutionException("Could not find a result with result identifier '" + resultId + "'.");
	}
	if (result.isMaterialized()) {
		throw new SqlExecutionException("Invalid result retrieval mode.");
	}
	return ((ChangelogResult<?>) result).retrieveChanges();
}
 
Example #22
Source Project: flink   Author: apache   File: CliResultViewTest.java    License: Apache License 2.0 5 votes vote down vote up
private void testResultViewClearResult(TypedResult<?> typedResult, boolean isTableMode, int expectedCancellationCount) throws Exception {
	final CountDownLatch cancellationCounterLatch = new CountDownLatch(expectedCancellationCount);
	final SessionContext session = new SessionContext("test-session", new Environment());
	final MockExecutor executor = new MockExecutor(typedResult, cancellationCounterLatch);
	String sessionId = executor.openSession(session);
	final ResultDescriptor descriptor = new ResultDescriptor(
			"result-id",
			TableSchema.builder().field("Null Field", Types.STRING()).build(),
			false,
			false);

	Thread resultViewRunner = null;
	CliClient cli = null;
	try {
		cli = new CliClient(
				TerminalUtils.createDummyTerminal(),
				sessionId,
				executor,
				File.createTempFile("history", "tmp").toPath());
		resultViewRunner = new Thread(new TestingCliResultView(cli, descriptor, isTableMode));
		resultViewRunner.start();
	} finally {
		if (resultViewRunner != null && !resultViewRunner.isInterrupted()) {
			resultViewRunner.interrupt();
		}
		if (cli != null) {
			cli.close();
		}
	}

	assertTrue(
		"Invalid number of cancellations.",
		cancellationCounterLatch.await(10, TimeUnit.SECONDS));
}
 
Example #23
Source Project: flink   Author: flink-tpc-ds   File: LocalExecutorITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testListTables() throws Exception {
	final Executor executor = createDefaultExecutor(clusterClient);
	final SessionContext session = new SessionContext("test-session", new Environment());

	final List<String> actualTables = executor.listTables(session);

	final List<String> expectedTables = Arrays.asList(
		"TableNumber1",
		"TableNumber2",
		"TableSourceSink",
		"TestView1",
		"TestView2");
	assertEquals(expectedTables, actualTables);
}
 
Example #24
Source Project: Flink-CEPplus   Author: ljygz   File: LocalExecutor.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void stop(SessionContext session) {
	resultStore.getResults().forEach((resultId) -> {
		try {
			cancelQuery(session, resultId);
		} catch (Throwable t) {
			// ignore any throwable to keep the clean up running
		}
	});
}
 
Example #25
Source Project: flink   Author: flink-tpc-ds   File: LocalExecutorITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test(timeout = 30_000L)
public void testStreamQueryExecutionChangelog() throws Exception {
	final URL url = getClass().getClassLoader().getResource("test-data.csv");
	Objects.requireNonNull(url);
	final Map<String, String> replaceVars = new HashMap<>();
	replaceVars.put("$VAR_PLANNER", planner);
	replaceVars.put("$VAR_SOURCE_PATH1", url.getPath());
	replaceVars.put("$VAR_EXECUTION_TYPE", "streaming");
	replaceVars.put("$VAR_RESULT_MODE", "changelog");
	replaceVars.put("$VAR_UPDATE_MODE", "update-mode: append");
	replaceVars.put("$VAR_MAX_ROWS", "100");

	final Executor executor = createModifiedExecutor(clusterClient, replaceVars);
	final SessionContext session = new SessionContext("test-session", new Environment());

	try {
		// start job and retrieval
		final ResultDescriptor desc = executor.executeQuery(
			session,
			"SELECT scalarUDF(IntegerField1), StringField1 FROM TableNumber1");

		assertFalse(desc.isMaterialized());

		final List<String> actualResults =
				retrieveChangelogResult(executor, session, desc.getResultId());

		final List<String> expectedResults = new ArrayList<>();
		expectedResults.add("(true,47,Hello World)");
		expectedResults.add("(true,27,Hello World)");
		expectedResults.add("(true,37,Hello World)");
		expectedResults.add("(true,37,Hello World)");
		expectedResults.add("(true,47,Hello World)");
		expectedResults.add("(true,57,Hello World!!!!)");

		TestBaseUtils.compareResultCollections(expectedResults, actualResults, Comparator.naturalOrder());
	} finally {
		executor.stop(session);
	}
}
 
Example #26
Source Project: flink   Author: apache   File: LocalExecutorITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testListDatabases() throws Exception {
	final Executor executor = createDefaultExecutor(clusterClient);
	final SessionContext session = new SessionContext("test-session", new Environment());
	String sessionId = executor.openSession(session);
	assertEquals("test-session", sessionId);

	final List<String> actualDatabases = executor.listDatabases(sessionId);

	final List<String> expectedDatabases = Collections.singletonList("default_database");
	assertEquals(expectedDatabases, actualDatabases);

	executor.closeSession(sessionId);
}
 
Example #27
Source Project: flink   Author: apache   File: DependencyTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testSqlParseWithUserClassLoader() throws Exception {
	final LocalExecutor executor = createExecutor();
	final SessionContext session = new SessionContext("test-session", new Environment());
	String sessionId = executor.openSession(session);
	try {
		final Parser sqlParser = executor.getSqlParser(sessionId);
		List<Operation> operations = sqlParser.parse("SELECT IntegerField1, StringField1 FROM TableNumber1");

		assertTrue(operations != null && operations.size() == 1);
	} finally {
		executor.closeSession(sessionId);
	}
}
 
Example #28
Source Project: Flink-CEPplus   Author: ljygz   File: LocalExecutorITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testListUserDefinedFunctions() throws Exception {
	final Executor executor = createDefaultExecutor(clusterClient);
	final SessionContext session = new SessionContext("test-session", new Environment());

	final List<String> actualTables = executor.listUserDefinedFunctions(session);

	final List<String> expectedTables = Arrays.asList("aggregateUDF", "tableUDF", "scalarUDF");
	assertEquals(expectedTables, actualTables);
}
 
Example #29
Source Project: Flink-CEPplus   Author: ljygz   File: LocalExecutorITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testGetSessionProperties() throws Exception {
	final Executor executor = createDefaultExecutor(clusterClient);
	final SessionContext session = new SessionContext("test-session", new Environment());

	session.setSessionProperty("execution.result-mode", "changelog");

	executor.getSessionProperties(session);

	// modify defaults
	session.setSessionProperty("execution.result-mode", "table");

	final Map<String, String> actualProperties = executor.getSessionProperties(session);

	final Map<String, String> expectedProperties = new HashMap<>();
	expectedProperties.put("execution.type", "batch");
	expectedProperties.put("execution.time-characteristic", "event-time");
	expectedProperties.put("execution.periodic-watermarks-interval", "99");
	expectedProperties.put("execution.parallelism", "1");
	expectedProperties.put("execution.max-parallelism", "16");
	expectedProperties.put("execution.max-idle-state-retention", "0");
	expectedProperties.put("execution.min-idle-state-retention", "0");
	expectedProperties.put("execution.result-mode", "table");
	expectedProperties.put("execution.max-table-result-rows", "100");
	expectedProperties.put("execution.restart-strategy.type", "failure-rate");
	expectedProperties.put("execution.restart-strategy.max-failures-per-interval", "10");
	expectedProperties.put("execution.restart-strategy.failure-rate-interval", "99000");
	expectedProperties.put("execution.restart-strategy.delay", "1000");
	expectedProperties.put("deployment.response-timeout", "5000");

	assertEquals(expectedProperties, actualProperties);
}
 
Example #30
Source Project: flink   Author: apache   File: LocalExecutorITCase.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testCreateDatabase() throws Exception {
	final Executor executor = createDefaultExecutor(clusterClient);
	final SessionContext session = new SessionContext("test-session", new Environment());
	String sessionId = executor.openSession(session);
	assertEquals("test-session", sessionId);

	executor.executeUpdate(sessionId, "create database db1");

	final List<String> actualDatabases = executor.listDatabases(sessionId);
	final List<String> expectedDatabases = Arrays.asList("default_database", "db1");
	assertEquals(expectedDatabases, actualDatabases);

	executor.closeSession(sessionId);
}