Java Code Examples for org.apache.flink.table.api.EnvironmentSettings#toExecutorProperties()

The following examples show how to use org.apache.flink.table.api.EnvironmentSettings#toExecutorProperties() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TableEnvironmentImpl.java    From flink with Apache License 2.0 6 votes vote down vote up
public static TableEnvironmentImpl create(EnvironmentSettings settings) {

		CatalogManager catalogManager = new CatalogManager(
			settings.getBuiltInCatalogName(),
			new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName()));

		FunctionCatalog functionCatalog = new FunctionCatalog(catalogManager);

		Map<String, String> executorProperties = settings.toExecutorProperties();
		Executor executor = ComponentFactoryService.find(ExecutorFactory.class, executorProperties)
			.create(executorProperties);

		TableConfig tableConfig = new TableConfig();
		Map<String, String> plannerProperties = settings.toPlannerProperties();
		Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
			.create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager);

		return new TableEnvironmentImpl(
			catalogManager,
			tableConfig,
			executor,
			functionCatalog,
			planner,
			settings.isStreamingMode()
		);
	}
 
Example 2
Source File: StreamTableEnvironmentImpl.java    From flink with Apache License 2.0 5 votes vote down vote up
public static StreamTableEnvironment create(
		StreamExecutionEnvironment executionEnvironment,
		EnvironmentSettings settings,
		TableConfig tableConfig) {

	if (!settings.isStreamingMode()) {
		throw new TableException(
			"StreamTableEnvironment can not run in batch mode for now, please use TableEnvironment.");
	}

	CatalogManager catalogManager = new CatalogManager(
		settings.getBuiltInCatalogName(),
		new GenericInMemoryCatalog(settings.getBuiltInCatalogName(), settings.getBuiltInDatabaseName()));

	FunctionCatalog functionCatalog = new FunctionCatalog(catalogManager);

	Map<String, String> executorProperties = settings.toExecutorProperties();
	Executor executor = lookupExecutor(executorProperties, executionEnvironment);

	Map<String, String> plannerProperties = settings.toPlannerProperties();
	Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
		.create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager);

	return new StreamTableEnvironmentImpl(
		catalogManager,
		functionCatalog,
		tableConfig,
		executionEnvironment,
		planner,
		executor,
		settings.isStreamingMode()
	);
}
 
Example 3
Source File: TableEnvFactory.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
public TableEnvironment createJavaBlinkStreamTableEnvironment(EnvironmentSettings settings) {

    try {
      Map<String, String> executorProperties = settings.toExecutorProperties();
      Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv());

      Map<String, String> plannerProperties = settings.toPlannerProperties();
      Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
              .create(plannerProperties, executor, tblConfig, blinkFunctionCatalog, catalogManager);

      Class clazz = null;
      if (flinkVersion.isFlink110()) {
        clazz = Class
                .forName("org.apache.flink.table.api.java.internal.StreamTableEnvironmentImpl");
      } else {
        clazz = Class
                .forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl");
      }
      Constructor constructor = clazz
              .getConstructor(
                      CatalogManager.class,
                      ModuleManager.class,
                      FunctionCatalog.class,
                      TableConfig.class,
                      org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class,
                      Planner.class,
                      Executor.class,
                      boolean.class);
      return (TableEnvironment) constructor.newInstance(catalogManager,
              moduleManager,
              blinkFunctionCatalog,
              tblConfig,
              senv.getJavaEnv(),
              planner,
              executor,
              settings.isStreamingMode());
    } catch (Exception e) {
      throw new TableException("Fail to createJavaBlinkStreamTableEnvironment", e);
    }
  }
 
Example 4
Source File: TableEnvFactory.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
public TableEnvironment createJavaBlinkBatchTableEnvironment(
        EnvironmentSettings settings) {
  try {
    final Map<String, String> executorProperties = settings.toExecutorProperties();
    executor = lookupExecutor(executorProperties, senv.getJavaEnv());
    final Map<String, String> plannerProperties = settings.toPlannerProperties();
    final Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
            .create(plannerProperties, executor, tblConfig, blinkFunctionCatalog, catalogManager);

    Class clazz = null;
    if (flinkVersion.isFlink110()) {
      clazz = Class
              .forName("org.apache.flink.table.api.java.internal.StreamTableEnvironmentImpl");
    } else {
      clazz = Class
              .forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl");
    }
    Constructor constructor = clazz.getConstructor(
                    CatalogManager.class,
                    ModuleManager.class,
                    FunctionCatalog.class,
                    TableConfig.class,
                    StreamExecutionEnvironment.class,
                    Planner.class,
                    Executor.class,
                    boolean.class);
    return (TableEnvironment) constructor.newInstance(
            catalogManager,
            moduleManager,
            blinkFunctionCatalog,
            tblConfig,
            senv.getJavaEnv(),
            planner,
            executor,
            settings.isStreamingMode());
  } catch (Exception e) {
    LOGGER.info(ExceptionUtils.getStackTrace(e));
    throw new TableException("Fail to createJavaBlinkBatchTableEnvironment", e);
  }
}
 
Example 5
Source File: TableEnvFactory.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
public void createPlanner(EnvironmentSettings settings) {
  Map<String, String> executorProperties = settings.toExecutorProperties();
  Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv());

  Map<String, String> plannerProperties = settings.toPlannerProperties();
  ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
          .create(
                  plannerProperties,
                  executor,
                  tblConfig,
                  blinkFunctionCatalog,
                  catalogManager);
}
 
Example 6
Source File: ExecutionContext.java    From flink with Apache License 2.0 5 votes vote down vote up
private void createTableEnvironment(
		EnvironmentSettings settings,
		TableConfig config,
		CatalogManager catalogManager,
		ModuleManager moduleManager,
		FunctionCatalog functionCatalog) {
	if (environment.getExecution().isStreamingPlanner()) {
		streamExecEnv = createStreamExecutionEnvironment();
		execEnv = null;

		final Map<String, String> executorProperties = settings.toExecutorProperties();
		executor = lookupExecutor(executorProperties, streamExecEnv);
		tableEnv = createStreamTableEnvironment(
				streamExecEnv,
				settings,
				config,
				executor,
				catalogManager,
				moduleManager,
				functionCatalog);
	} else if (environment.getExecution().isBatchPlanner()) {
		streamExecEnv = null;
		execEnv = ExecutionEnvironment.getExecutionEnvironment();
		executor = null;
		tableEnv = new BatchTableEnvironmentImpl(
				execEnv,
				config,
				catalogManager,
				moduleManager);
	} else {
		throw new SqlExecutionException("Unsupported execution type specified.");
	}
}
 
Example 7
Source File: ExecutionContext.java    From flink with Apache License 2.0 4 votes vote down vote up
private EnvironmentInstance() {
	// create settings
	final EnvironmentSettings settings = mergedEnv.getExecution().getEnvironmentSettings();

	// create environments
	if (mergedEnv.getExecution().isStreamingPlanner()) {
		streamExecEnv = createStreamExecutionEnvironment();
		execEnv = null;

		final Map<String, String> executorProperties = settings.toExecutorProperties();
		executor = lookupExecutor(executorProperties, streamExecEnv);
		tableEnv = createStreamTableEnvironment(streamExecEnv, settings, executor);
	} else if (mergedEnv.getExecution().isBatchPlanner()) {
		streamExecEnv = null;
		execEnv = createExecutionEnvironment();
		executor = null;
		tableEnv = BatchTableEnvironment.create(execEnv);
	} else {
		throw new SqlExecutionException("Unsupported execution type specified.");
	}

	// set table configuration
	mergedEnv.getConfiguration().asMap().forEach((k, v) ->
		tableEnv.getConfig().getConfiguration().setString(k, v));

	// register catalogs
	catalogs.forEach(tableEnv::registerCatalog);

	// create query config
	queryConfig = createQueryConfig();

	// register table sources
	tableSources.forEach(tableEnv::registerTableSource);

	// register table sinks
	tableSinks.forEach(tableEnv::registerTableSink);

	// register user-defined functions
	registerFunctions();

	// register views and temporal tables in specified order
	mergedEnv.getTables().forEach((name, entry) -> {
		// if registering a view fails at this point,
		// it means that it accesses tables that are not available anymore
		if (entry instanceof ViewEntry) {
			final ViewEntry viewEntry = (ViewEntry) entry;
			registerView(viewEntry);
		} else if (entry instanceof TemporalTableEntry) {
			final TemporalTableEntry temporalTableEntry = (TemporalTableEntry) entry;
			registerTemporalTable(temporalTableEntry);
		}
	});

	// set current catalog
	if (sessionContext.getCurrentCatalog().isPresent()) {
		tableEnv.useCatalog(sessionContext.getCurrentCatalog().get());
	} else if (mergedEnv.getExecution().getCurrentCatalog().isPresent()) {
		tableEnv.useCatalog(mergedEnv.getExecution().getCurrentCatalog().get());
	}

	// set current database
	if (sessionContext.getCurrentDatabase().isPresent()) {
		tableEnv.useDatabase(sessionContext.getCurrentDatabase().get());
	} else if (mergedEnv.getExecution().getCurrentDatabase().isPresent()) {
		tableEnv.useDatabase(mergedEnv.getExecution().getCurrentDatabase().get());
	}
}
 
Example 8
Source File: TableEnvFactory.java    From zeppelin with Apache License 2.0 4 votes vote down vote up
public TableEnvironment createScalaFlinkStreamTableEnvironment(EnvironmentSettings settings) {
  try {
    Map<String, String> executorProperties = settings.toExecutorProperties();
    Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv());

    Map<String, String> plannerProperties = settings.toPlannerProperties();
    Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
            .create(
                    plannerProperties,
                    executor,
                    tblConfig,
                    flinkFunctionCatalog,
                    catalogManager);

    Class clazz = null;
    if (flinkVersion.isFlink110()) {
      clazz = Class
              .forName("org.apache.flink.table.api.scala.internal.StreamTableEnvironmentImpl");
    } else {
      clazz = Class
              .forName("org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl");
    }
    Constructor constructor = clazz
            .getConstructor(
                    CatalogManager.class,
                    ModuleManager.class,
                    FunctionCatalog.class,
                    TableConfig.class,
                    org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.class,
                    Planner.class,
                    Executor.class,
                    boolean.class);
    return (TableEnvironment) constructor.newInstance(catalogManager,
            moduleManager,
            flinkFunctionCatalog,
            tblConfig,
            senv,
            planner,
            executor,
            settings.isStreamingMode());

  } catch (Exception e) {
    throw new TableException("Fail to createScalaFlinkStreamTableEnvironment", e);
  }
}
 
Example 9
Source File: TableEnvFactory.java    From zeppelin with Apache License 2.0 4 votes vote down vote up
public TableEnvironment createJavaFlinkStreamTableEnvironment(EnvironmentSettings settings) {

    try {
      Map<String, String> executorProperties = settings.toExecutorProperties();
      Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv());

      Map<String, String> plannerProperties = settings.toPlannerProperties();
      Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
              .create(plannerProperties, executor, tblConfig, flinkFunctionCatalog, catalogManager);

      Class clazz = null;
      if (flinkVersion.isFlink110()) {
        clazz = Class
                .forName("org.apache.flink.table.api.java.internal.StreamTableEnvironmentImpl");
      } else {
        clazz = Class
                .forName("org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl");
      }
      Constructor constructor = clazz
              .getConstructor(
                      CatalogManager.class,
                      ModuleManager.class,
                      FunctionCatalog.class,
                      TableConfig.class,
                      org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.class,
                      Planner.class,
                      Executor.class,
                      boolean.class);
      return (TableEnvironment) constructor.newInstance(catalogManager,
              moduleManager,
              flinkFunctionCatalog,
              tblConfig,
              senv.getJavaEnv(),
              planner,
              executor,
              settings.isStreamingMode());

    } catch (Exception e) {
      throw new TableException("Fail to createJavaFlinkStreamTableEnvironment", e);
    }
  }
 
Example 10
Source File: TableEnvFactory.java    From zeppelin with Apache License 2.0 4 votes vote down vote up
public TableEnvironment createScalaBlinkStreamTableEnvironment(EnvironmentSettings settings) {

    try {
      Map<String, String> executorProperties = settings.toExecutorProperties();
      Executor executor = lookupExecutor(executorProperties, senv.getJavaEnv());

      Map<String, String> plannerProperties = settings.toPlannerProperties();
      Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
              .create(
                      plannerProperties,
                      executor,
                      tblConfig,
                      blinkFunctionCatalog,
                      catalogManager);


      Class clazz = null;
      if (flinkVersion.isFlink110()) {
        clazz = Class
                .forName("org.apache.flink.table.api.scala.internal.StreamTableEnvironmentImpl");
      } else {
        clazz = Class
                .forName("org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl");
      }
      Constructor constructor = clazz
              .getConstructor(
                      CatalogManager.class,
                      ModuleManager.class,
                      FunctionCatalog.class,
                      TableConfig.class,
                      org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.class,
                      Planner.class,
                      Executor.class,
                      boolean.class);
      return (TableEnvironment) constructor.newInstance(catalogManager,
              moduleManager,
              blinkFunctionCatalog,
              tblConfig,
              senv,
              planner,
              executor,
              settings.isStreamingMode());
    } catch (Exception e) {
      throw new TableException("Fail to createScalaBlinkStreamTableEnvironment", e);
    }
  }
 
Example 11
Source File: TableEnvironmentImpl.java    From flink with Apache License 2.0 4 votes vote down vote up
public static TableEnvironmentImpl create(EnvironmentSettings settings) {

		// temporary solution until FLINK-15635 is fixed
		ClassLoader classLoader = Thread.currentThread().getContextClassLoader();

		TableConfig tableConfig = new TableConfig();

		ModuleManager moduleManager = new ModuleManager();

		CatalogManager catalogManager = CatalogManager.newBuilder()
			.classLoader(classLoader)
			.config(tableConfig.getConfiguration())
			.defaultCatalog(
				settings.getBuiltInCatalogName(),
				new GenericInMemoryCatalog(
					settings.getBuiltInCatalogName(),
					settings.getBuiltInDatabaseName()))
			.build();

		FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager);

		Map<String, String> executorProperties = settings.toExecutorProperties();
		Executor executor = ComponentFactoryService.find(ExecutorFactory.class, executorProperties)
			.create(executorProperties);

		Map<String, String> plannerProperties = settings.toPlannerProperties();
		Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
			.create(
				plannerProperties,
				executor,
				tableConfig,
				functionCatalog,
				catalogManager);

		return new TableEnvironmentImpl(
			catalogManager,
			moduleManager,
			tableConfig,
			executor,
			functionCatalog,
			planner,
			settings.isStreamingMode()
		);
	}
 
Example 12
Source File: StreamTableEnvironmentImpl.java    From flink with Apache License 2.0 4 votes vote down vote up
public static StreamTableEnvironment create(
		StreamExecutionEnvironment executionEnvironment,
		EnvironmentSettings settings,
		TableConfig tableConfig) {

	if (!settings.isStreamingMode()) {
		throw new TableException(
			"StreamTableEnvironment can not run in batch mode for now, please use TableEnvironment.");
	}

	// temporary solution until FLINK-15635 is fixed
	ClassLoader classLoader = Thread.currentThread().getContextClassLoader();

	ModuleManager moduleManager = new ModuleManager();

	CatalogManager catalogManager = CatalogManager.newBuilder()
		.classLoader(classLoader)
		.config(tableConfig.getConfiguration())
		.defaultCatalog(
			settings.getBuiltInCatalogName(),
			new GenericInMemoryCatalog(
				settings.getBuiltInCatalogName(),
				settings.getBuiltInDatabaseName()))
		.executionConfig(executionEnvironment.getConfig())
		.build();

	FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager);

	Map<String, String> executorProperties = settings.toExecutorProperties();
	Executor executor = lookupExecutor(executorProperties, executionEnvironment);

	Map<String, String> plannerProperties = settings.toPlannerProperties();
	Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
		.create(plannerProperties, executor, tableConfig, functionCatalog, catalogManager);

	return new StreamTableEnvironmentImpl(
		catalogManager,
		moduleManager,
		functionCatalog,
		tableConfig,
		executionEnvironment,
		planner,
		executor,
		settings.isStreamingMode()
	);
}