org.apache.flink.table.client.gateway.ProgramTargetDescriptor Java Examples

The following examples show how to use org.apache.flink.table.client.gateway.ProgramTargetDescriptor. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LocalExecutor.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private <C> ProgramTargetDescriptor executeUpdateInternal(ExecutionContext<C> context, String statement) {
	final ExecutionContext.EnvironmentInstance envInst = context.createEnvironmentInstance();

	applyUpdate(context, envInst.getTableEnvironment(), envInst.getQueryConfig(), statement);

	// create job graph with dependencies
	final String jobName = context.getSessionContext().getName() + ": " + statement;
	final JobGraph jobGraph;
	try {
		jobGraph = envInst.createJobGraph(jobName);
	} catch (Throwable t) {
		// catch everything such that the statement does not crash the executor
		throw new SqlExecutionException("Invalid SQL statement.", t);
	}

	// create execution
	final BasicResult<C> result = new BasicResult<>();
	final ProgramDeployer<C> deployer = new ProgramDeployer<>(
		context, jobName, jobGraph, result, false);

	// blocking deployment
	deployer.run();

	return ProgramTargetDescriptor.of(
		result.getClusterId(),
		jobGraph.getJobID(),
		result.getWebInterfaceUrl());
}
 
Example #2
Source File: CliClient.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private boolean callInsertInto(SqlCommandCall cmdCall) {
	printInfo(CliStrings.MESSAGE_SUBMITTING_STATEMENT);

	try {
		final ProgramTargetDescriptor programTarget = executor.executeUpdate(context, cmdCall.operands[0]);
		terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_STATEMENT_SUBMITTED).toAnsi());
		terminal.writer().println(programTarget.toString());
		terminal.flush();
	} catch (SqlExecutionException e) {
		printExecutionException(e);
		return false;
	}
	return true;
}
 
Example #3
Source File: CliClientTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public ProgramTargetDescriptor executeUpdate(String sessionId, String statement) throws SqlExecutionException {
	receivedContext = sessionMap.get(sessionId);
	receivedStatement = statement;
	if (failExecution) {
		throw new SqlExecutionException("Fail execution.");
	}
	JobID jobID = JobID.generate();
	return new ProgramTargetDescriptor(jobID);
}
 
Example #4
Source File: CliClientTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public ProgramTargetDescriptor executeUpdate(SessionContext session, String statement) throws SqlExecutionException {
	receivedContext = session;
	receivedStatement = statement;
	if (failExecution) {
		throw new SqlExecutionException("Fail execution.");
	}
	return new ProgramTargetDescriptor("testClusterId", "testJobId", "http://testcluster:1234");
}
 
Example #5
Source File: LocalExecutor.java    From flink with Apache License 2.0 5 votes vote down vote up
private <C> ProgramTargetDescriptor executeUpdateInternal(ExecutionContext<C> context, String statement) {
	final ExecutionContext<C>.EnvironmentInstance envInst = context.createEnvironmentInstance();

	applyUpdate(context, envInst.getTableEnvironment(), envInst.getQueryConfig(), statement);

	// create job graph with dependencies
	final String jobName = context.getSessionContext().getName() + ": " + statement;
	final JobGraph jobGraph;
	try {
		jobGraph = envInst.createJobGraph(jobName);
	} catch (Throwable t) {
		// catch everything such that the statement does not crash the executor
		throw new SqlExecutionException("Invalid SQL statement.", t);
	}

	// create execution
	final BasicResult<C> result = new BasicResult<>();
	final ProgramDeployer<C> deployer = new ProgramDeployer<>(
		context, jobName, jobGraph, result, false);

	// blocking deployment
	deployer.run();

	return ProgramTargetDescriptor.of(
		result.getClusterId(),
		jobGraph.getJobID(),
		result.getWebInterfaceUrl());
}
 
Example #6
Source File: CliClient.java    From flink with Apache License 2.0 5 votes vote down vote up
private boolean callInsertInto(SqlCommandCall cmdCall) {
	printInfo(CliStrings.MESSAGE_SUBMITTING_STATEMENT);

	try {
		final ProgramTargetDescriptor programTarget = executor.executeUpdate(context, cmdCall.operands[0]);
		terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_STATEMENT_SUBMITTED).toAnsi());
		terminal.writer().println(programTarget.toString());
		terminal.flush();
	} catch (SqlExecutionException e) {
		printExecutionException(e);
		return false;
	}
	return true;
}
 
Example #7
Source File: LocalExecutorITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
private void executeAndVerifySinkResult(
		Executor executor,
		String sessionId,
		String statement,
		String resultPath) throws Exception {
	final ProgramTargetDescriptor targetDescriptor = executor.executeUpdate(
			sessionId,
			statement);

	// wait for job completion and verify result
	boolean isRunning = true;
	while (isRunning) {
		Thread.sleep(50); // slow the processing down
		final JobStatus jobStatus = clusterClient.getJobStatus(targetDescriptor.getJobId()).get();
		switch (jobStatus) {
		case CREATED:
		case RUNNING:
			continue;
		case FINISHED:
			isRunning = false;
			verifySinkResult(resultPath);
			break;
		default:
			fail("Unexpected job status.");
		}
	}
}
 
Example #8
Source File: CliClient.java    From flink with Apache License 2.0 5 votes vote down vote up
private boolean callInsert(SqlCommandCall cmdCall) {
	printInfo(CliStrings.MESSAGE_SUBMITTING_STATEMENT);

	try {
		final ProgramTargetDescriptor programTarget = executor.executeUpdate(sessionId, cmdCall.operands[0]);
		terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_STATEMENT_SUBMITTED).toAnsi());
		terminal.writer().println(programTarget.toString());
		terminal.flush();
	} catch (SqlExecutionException e) {
		printExecutionException(e);
		return false;
	}
	return true;
}
 
Example #9
Source File: CliClientTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public ProgramTargetDescriptor executeUpdate(SessionContext session, String statement) throws SqlExecutionException {
	receivedContext = session;
	receivedStatement = statement;
	if (failExecution) {
		throw new SqlExecutionException("Fail execution.");
	}
	return new ProgramTargetDescriptor("testClusterId", "testJobId", "http://testcluster:1234");
}
 
Example #10
Source File: TestingExecutor.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public ProgramTargetDescriptor executeUpdate(String sessionId, String statement) throws SqlExecutionException {
	throw new UnsupportedOperationException("Not implemented.");
}
 
Example #11
Source File: LocalExecutor.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public ProgramTargetDescriptor executeUpdate(SessionContext session, String statement) throws SqlExecutionException {
	final ExecutionContext<?> context = getOrCreateExecutionContext(session);
	return executeUpdateInternal(context, statement);
}
 
Example #12
Source File: CliResultViewTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public ProgramTargetDescriptor executeUpdate(String sessionId, String statement) throws SqlExecutionException {
	return null;
}
 
Example #13
Source File: LocalExecutor.java    From flink with Apache License 2.0 4 votes vote down vote up
private <C> ProgramTargetDescriptor executeUpdateInternal(
		String sessionId,
		ExecutionContext<C> context,
		String statement) {

	applyUpdate(context, statement);

	//Todo: we should refactor following condition after TableEnvironment has support submit job directly.
	if (!INSERT_SQL_PATTERN.matcher(statement.trim()).matches()) {
		return null;
	}

	// create pipeline
	final String jobName = sessionId + ": " + statement;
	final Pipeline pipeline;
	try {
		pipeline = context.createPipeline(jobName);
	} catch (Throwable t) {
		// catch everything such that the statement does not crash the executor
		throw new SqlExecutionException("Invalid SQL statement.", t);
	}

	// create a copy so that we can change settings without affecting the original config
	Configuration configuration = new Configuration(context.getFlinkConfig());
	// for update queries we don't wait for the job result, so run in detached mode
	configuration.set(DeploymentOptions.ATTACHED, false);

	// create execution
	final ProgramDeployer deployer = new ProgramDeployer(configuration, jobName, pipeline);

	// wrap in classloader because CodeGenOperatorFactory#getStreamOperatorClass
	// requires to access UDF in deployer.deploy().
	return context.wrapClassLoader(() -> {
		try {
			// blocking deployment
			JobClient jobClient = deployer.deploy().get();
			return ProgramTargetDescriptor.of(jobClient.getJobID());
		} catch (Exception e) {
			throw new RuntimeException("Error running SQL job.", e);
		}
	});
}
 
Example #14
Source File: LocalExecutor.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public ProgramTargetDescriptor executeUpdate(String sessionId, String statement) throws SqlExecutionException {
	final ExecutionContext<?> context = getExecutionContext(sessionId);
	return executeUpdateInternal(sessionId, context, statement);
}
 
Example #15
Source File: CliResultViewTest.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public ProgramTargetDescriptor executeUpdate(SessionContext session, String statement) throws SqlExecutionException {
	return null;
}
 
Example #16
Source File: LocalExecutorITCase.java    From flink with Apache License 2.0 4 votes vote down vote up
@Test(timeout = 30_000L)
public void testStreamQueryExecutionSink() throws Exception {
	final String csvOutputPath = new File(tempFolder.newFolder().getAbsolutePath(), "test-out.csv").toURI().toString();
	final URL url = getClass().getClassLoader().getResource("test-data.csv");
	Objects.requireNonNull(url);
	final Map<String, String> replaceVars = new HashMap<>();
	replaceVars.put("$VAR_PLANNER", planner);
	replaceVars.put("$VAR_SOURCE_PATH1", url.getPath());
	replaceVars.put("$VAR_EXECUTION_TYPE", "streaming");
	replaceVars.put("$VAR_SOURCE_SINK_PATH", csvOutputPath);
	replaceVars.put("$VAR_UPDATE_MODE", "update-mode: append");
	replaceVars.put("$VAR_MAX_ROWS", "100");

	final Executor executor = createModifiedExecutor(clusterClient, replaceVars);
	final SessionContext session = new SessionContext("test-session", new Environment());

	try {
		// Case 1: Registered sink
		final ProgramTargetDescriptor targetDescriptor = executor.executeUpdate(
			session,
			"INSERT INTO TableSourceSink SELECT IntegerField1 = 42, StringField1 FROM TableNumber1");

		// wait for job completion and verify result
		boolean isRunning = true;
		while (isRunning) {
			Thread.sleep(50); // slow the processing down
			final JobStatus jobStatus = clusterClient.getJobStatus(JobID.fromHexString(targetDescriptor.getJobId())).get();
			switch (jobStatus) {
				case CREATED:
				case RUNNING:
					continue;
				case FINISHED:
					isRunning = false;
					verifySinkResult(csvOutputPath);
					break;
				default:
					fail("Unexpected job status.");
			}
		}

		// Case 2: Temporary sink
		session.setCurrentCatalog("simple-catalog");
		session.setCurrentDatabase("default_database");
		// all queries are pipelined to an in-memory sink, check it is properly registered
		final ResultDescriptor otherCatalogDesc = executor.executeQuery(session, "SELECT * FROM `test-table`");

		final List<String> otherCatalogResults = retrieveTableResult(
			executor,
			session,
			otherCatalogDesc.getResultId());

		TestBaseUtils.compareResultCollections(
			SimpleCatalogFactory.TABLE_CONTENTS.stream().map(Row::toString).collect(Collectors.toList()),
			otherCatalogResults,
			Comparator.naturalOrder());
	} finally {
		executor.stop(session);
	}
}
 
Example #17
Source File: LocalExecutor.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public ProgramTargetDescriptor executeUpdate(SessionContext session, String statement) throws SqlExecutionException {
	final ExecutionContext<?> context = getOrCreateExecutionContext(session);
	return executeUpdateInternal(context, statement);
}
 
Example #18
Source File: CliResultViewTest.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public ProgramTargetDescriptor executeUpdate(SessionContext session, String statement) throws SqlExecutionException {
	return null;
}
 
Example #19
Source File: LocalExecutorITCase.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Test(timeout = 30_000L)
public void testStreamQueryExecutionSink() throws Exception {
	final String csvOutputPath = new File(tempFolder.newFolder().getAbsolutePath(), "test-out.csv").toURI().toString();
	final URL url = getClass().getClassLoader().getResource("test-data.csv");
	Objects.requireNonNull(url);
	final Map<String, String> replaceVars = new HashMap<>();
	replaceVars.put("$VAR_SOURCE_PATH1", url.getPath());
	replaceVars.put("$VAR_EXECUTION_TYPE", "streaming");
	replaceVars.put("$VAR_SOURCE_SINK_PATH", csvOutputPath);
	replaceVars.put("$VAR_UPDATE_MODE", "update-mode: append");
	replaceVars.put("$VAR_MAX_ROWS", "100");

	final Executor executor = createModifiedExecutor(clusterClient, replaceVars);
	final SessionContext session = new SessionContext("test-session", new Environment());

	try {
		// start job
		final ProgramTargetDescriptor targetDescriptor = executor.executeUpdate(
			session,
			"INSERT INTO TableSourceSink SELECT IntegerField1 = 42, StringField1 FROM TableNumber1");

		// wait for job completion and verify result
		boolean isRunning = true;
		while (isRunning) {
			Thread.sleep(50); // slow the processing down
			final JobStatus jobStatus = clusterClient.getJobStatus(JobID.fromHexString(targetDescriptor.getJobId())).get();
			switch (jobStatus) {
				case CREATED:
				case RUNNING:
					continue;
				case FINISHED:
					isRunning = false;
					verifySinkResult(csvOutputPath);
					break;
				default:
					fail("Unexpected job status.");
			}
		}
	} finally {
		executor.stop(session);
	}
}