org.apache.flink.configuration.Configuration Java Examples

The following examples show how to use org.apache.flink.configuration.Configuration. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: QueryableWindowOperator.java    From yahoo-streaming-benchmark with Apache License 2.0 6 votes vote down vote up
private static void initializeActorSystem(String hostname) throws UnknownHostException {
  synchronized (actorSystemLock) {
    if (actorSystem == null) {
      Configuration config = new Configuration();
      Option<scala.Tuple2<String, Object>> remoting = new Some<>(new scala.Tuple2<String, Object>(hostname, 0));

      Config akkaConfig = AkkaUtils.getAkkaConfig(config, remoting);

      LOG.info("Start actory system.");
      actorSystem = ActorSystem.create("queryableWindow", akkaConfig);
      actorSystemUsers = 1;
    } else {
      LOG.info("Actor system has already been started.");
      actorSystemUsers++;
    }
  }
}
 
Example #2
Source File: FlinkCubingByLayer.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        CubeDesc cubeDesc = cubeInstance.getDescriptor();
        CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
        CubeJoinedFlatTableEnrich interDesc = new CubeJoinedFlatTableEnrich(
                EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
        long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
        Cuboid baseCuboid = Cuboid.findForMandatory(cubeDesc, baseCuboidId);
        baseCuboidBuilder = new BaseCuboidBuilder(kConfig, cubeDesc, cubeSegment, interDesc,
                AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid),
                MeasureIngester.create(cubeDesc.getMeasures()), cubeSegment.buildDictionaryMap());
    }
}
 
Example #3
Source File: PythonProgramOptionsTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testCreateProgramOptionsWithLongOptions() throws CliArgsException {
	String[] args = {
		"--python", "xxx.py",
		"--pyModule", "xxx",
		"--pyFiles", "/absolute/a.py,relative/b.py,relative/c.py",
		"--pyRequirements", "d.txt#e_dir",
		"--pyExecutable", "/usr/bin/python",
		"--pyArchives", "g.zip,h.zip#data,h.zip#data2",
		"userarg1", "userarg2"
	};

	CommandLine line = CliFrontendParser.parse(options, args, false);
	PythonProgramOptions programOptions = (PythonProgramOptions) ProgramOptions.create(line);
	Configuration config = new Configuration();
	programOptions.applyToConfiguration(config);
	assertEquals("/absolute/a.py,relative/b.py,relative/c.py", config.get(PythonOptions.PYTHON_FILES));
	assertEquals("d.txt#e_dir", config.get(PYTHON_REQUIREMENTS));
	assertEquals("g.zip,h.zip#data,h.zip#data2", config.get(PythonOptions.PYTHON_ARCHIVES));
	assertEquals("/usr/bin/python", config.get(PYTHON_EXECUTABLE));
	assertArrayEquals(
		new String[] {"--python", "xxx.py", "--pyModule", "xxx", "userarg1", "userarg2"},
		programOptions.getProgramArgs());
}
 
Example #4
Source File: RestClientTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testConnectionTimeout() throws Exception {
	final Configuration config = new Configuration();
	config.setLong(RestOptions.CONNECTION_TIMEOUT, 1);
	try (final RestClient restClient = new RestClient(RestClientConfiguration.fromConfiguration(config), Executors.directExecutor())) {
		restClient.sendRequest(
			unroutableIp,
			80,
			new TestMessageHeaders(),
			EmptyMessageParameters.getInstance(),
			EmptyRequestBody.getInstance())
			.get(60, TimeUnit.SECONDS);
	} catch (final ExecutionException e) {
		final Throwable throwable = ExceptionUtils.stripExecutionException(e);
		assertThat(throwable, instanceOf(ConnectTimeoutException.class));
		assertThat(throwable.getMessage(), containsString(unroutableIp));
	}
}
 
Example #5
Source File: RescalingITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@Before
public void setup() throws Exception {
	// detect parameter change
	if (currentBackend != backend) {
		shutDownExistingCluster();

		currentBackend = backend;

		Configuration config = new Configuration();

		final File checkpointDir = temporaryFolder.newFolder();
		final File savepointDir = temporaryFolder.newFolder();

		config.setString(CheckpointingOptions.STATE_BACKEND, currentBackend);
		config.setString(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir.toURI().toString());
		config.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir.toURI().toString());

		cluster = new MiniClusterWithClientResource(
			new MiniClusterResourceConfiguration.Builder()
				.setConfiguration(config)
				.setNumberTaskManagers(numTaskManagers)
				.setNumberSlotsPerTaskManager(numSlots)
				.build());
		cluster.before();
	}
}
 
Example #6
Source File: BootstrapTools.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
* Sets the value of a new config key to the value of a deprecated config key. Taking into
* account the changed prefix.
* @param config Config to write
* @param deprecatedPrefix Old prefix of key
* @param designatedPrefix New prefix of key
*/
public static void substituteDeprecatedConfigPrefix(
		Configuration config,
		String deprecatedPrefix,
		String designatedPrefix) {

	// set the designated key only if it is not set already
	final int prefixLen = deprecatedPrefix.length();

	Configuration replacement = new Configuration();

	for (String key : config.keySet()) {
		if (key.startsWith(deprecatedPrefix)) {
			String newKey = designatedPrefix + key.substring(prefixLen);
			if (!config.containsKey(newKey)) {
				replacement.setString(newKey, config.getString(key, null));
			}
		}
	}

	config.addAll(replacement);
}
 
Example #7
Source File: GenericCsvInputFormatTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testReadTooShortInputLenient() throws IOException {
	try {
		final String fileContent = "666|777|888|999|555\n111|222|333|444\n666|777|888|999|555";
		final FileInputSplit split = createTempFile(fileContent);	
	
		final Configuration parameters = new Configuration();
		format.setFieldDelimiter("|");
		format.setFieldTypesGeneric(IntValue.class, IntValue.class, IntValue.class, IntValue.class, IntValue.class);
		format.setLenient(true);
		
		format.configure(parameters);
		format.open(split);
		
		Value[] values = createIntValues(5);
		
		assertNotNull(format.nextRecord(values));	// line okay
		assertNull(format.nextRecord(values));	// line too short
		assertNotNull(format.nextRecord(values));	// line okay
	}
	catch (Exception ex) {
		fail("Test failed due to a " + ex.getClass().getSimpleName() + ": " + ex.getMessage());
	}
}
 
Example #8
Source File: ClientTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private void launchMultiExecuteJob(final boolean enforceSingleJobExecution) throws ProgramInvocationException {
	try (final ClusterClient<?> clusterClient =
				new MiniClusterClient(new Configuration(), MINI_CLUSTER_RESOURCE.getMiniCluster())) {

		final PackagedProgram program = PackagedProgram.newBuilder()
				.setEntryPointClassName(TestMultiExecute.class.getName())
				.build();

		final Configuration configuration = fromPackagedProgram(program, 1, false);

		ClientUtils.executeProgram(
				new TestExecutorServiceLoader(clusterClient, plan),
				configuration,
				program,
				enforceSingleJobExecution,
				false);
	}
}
 
Example #9
Source File: SlsRecordReader.java    From alibaba-flink-connectors with Apache License 2.0 6 votes vote down vote up
public SlsRecordReader(
		String endPoint,
		Configuration properties,
		String project,
		String logStore,
		int startInSec,
		int stopInSec,
		int maxRetryTime,
		int batchGetSize,
		List<Shard> initShardList,
		String consumerGroup) {
	this.endPoint = endPoint;
	this.project = project;
	this.logStore = logStore;
	this.startInSec = startInSec;
	this.stopInSec = stopInSec;
	this.maxRetryTime = maxRetryTime;
	this.batchGetSize = batchGetSize;
	setInitPartitionCount(null == initShardList ? 0 : initShardList.size());
	this.properties = properties;
	this.consumerGroup = consumerGroup;
}
 
Example #10
Source File: BlobServerSSLTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testFailedToInitWithInvalidSslKeystoreConfigured() {
	final Configuration config = new Configuration();

	config.setBoolean(SecurityOptions.SSL_INTERNAL_ENABLED, true);
	config.setString(SecurityOptions.SSL_KEYSTORE, "invalid.keystore");
	config.setString(SecurityOptions.SSL_KEYSTORE_PASSWORD, "password");
	config.setString(SecurityOptions.SSL_KEY_PASSWORD, "password");
	config.setString(SecurityOptions.SSL_TRUSTSTORE, "invalid.keystore");
	config.setString(SecurityOptions.SSL_TRUSTSTORE_PASSWORD, "password");

	try (final BlobServer ignored = new BlobServer(config, new VoidBlobStore())) {
		fail();
	} catch (Exception e) {
		findThrowable(e, IOException.class);
		findThrowableWithMessage(e, "Failed to initialize SSL for the blob server");
	}
}
 
Example #11
Source File: UnaryOperatorTestBase.java    From flink with Apache License 2.0 6 votes vote down vote up
protected UnaryOperatorTestBase(ExecutionConfig executionConfig, long memory, int maxNumSorters, long perSortMemory) {
	if (memory < 0 || maxNumSorters < 0 || perSortMemory < 0) {
		throw new IllegalArgumentException();
	}
	
	final long totalMem = Math.max(memory, 0) + (Math.max(maxNumSorters, 0) * perSortMemory);
	
	this.perSortMem = perSortMemory;
	this.perSortFractionMem = (double)perSortMemory/totalMem;
	this.ioManager = new IOManagerAsync();
	this.memManager = totalMem > 0 ? MemoryManagerBuilder.newBuilder().setMemorySize(totalMem).build() : null;
	this.owner = new DummyInvokable();

	Configuration config = new Configuration();
	this.taskConfig = new TaskConfig(config);

	this.executionConfig = executionConfig;
	this.comparators = new ArrayList<TypeComparator<IN>>(2);

	this.taskManageInfo = new TestingTaskManagerRuntimeInfo();
}
 
Example #12
Source File: TaskExecutorSubmissionTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private TaskDeploymentDescriptor createTestTaskDeploymentDescriptor(
	String taskName,
	ExecutionAttemptID eid,
	Class<? extends AbstractInvokable> abstractInvokable,
	int maxNumberOfSubtasks,
	Collection<ResultPartitionDeploymentDescriptor> producedPartitions,
	Collection<InputGateDeploymentDescriptor> inputGates
) throws IOException {
	Preconditions.checkNotNull(producedPartitions);
	Preconditions.checkNotNull(inputGates);
	return createTaskDeploymentDescriptor(
		jobId, testName.getMethodName(), eid,
		new SerializedValue<>(new ExecutionConfig()), taskName, maxNumberOfSubtasks, 0, 1, 0,
		new Configuration(), new Configuration(), abstractInvokable.getName(),
		producedPartitions,
		inputGates,
		Collections.emptyList(),
		Collections.emptyList(),
		0);
}
 
Example #13
Source File: StreamingCustomInputSplitProgram.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
			Configuration config = new Configuration();

	config.setString(AkkaOptions.ASK_TIMEOUT, "5 s");

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	DataStream<Integer> data = env.createInput(new CustomInputFormat());

	data.map(new MapFunction<Integer, Tuple2<Integer, Double>>() {
		@Override
		public Tuple2<Integer, Double> map(Integer value) throws Exception {
			return new Tuple2<Integer, Double>(value, value * 0.5);
		}
	}).addSink(new DiscardingSink<>());

	env.execute();
}
 
Example #14
Source File: TaskManagerRunnerConfigurationTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testDefaultFsParameterLoading() throws Exception {
	try {
		final File tmpDir = temporaryFolder.newFolder();
		final File confFile = new File(tmpDir, GlobalConfiguration.FLINK_CONF_FILENAME);

		final URI defaultFS = new URI("otherFS", null, "localhost", 1234, null, null, null);

		final PrintWriter pw1 = new PrintWriter(confFile);
		pw1.println("fs.default-scheme: " + defaultFS);
		pw1.close();

		String[] args = new String[] {"--configDir", tmpDir.toString()};
		Configuration configuration = TaskManagerRunner.loadConfiguration(args);
		FileSystem.initialize(configuration);

		assertEquals(defaultFS, FileSystem.getDefaultFsUri());
	}
	finally {
		// reset FS settings
		FileSystem.initialize(new Configuration());
	}
}
 
Example #15
Source File: ClusterEntrypoint.java    From flink with Apache License 2.0 6 votes vote down vote up
protected static Configuration loadConfiguration(EntrypointClusterConfiguration entrypointClusterConfiguration) {
	final Configuration dynamicProperties = ConfigurationUtils.createConfiguration(entrypointClusterConfiguration.getDynamicProperties());
	final Configuration configuration = GlobalConfiguration.loadConfiguration(entrypointClusterConfiguration.getConfigDir(), dynamicProperties);

	final int restPort = entrypointClusterConfiguration.getRestPort();

	if (restPort >= 0) {
		configuration.setInteger(RestOptions.PORT, restPort);
	}

	final String hostname = entrypointClusterConfiguration.getHostname();

	if (hostname != null) {
		configuration.setString(JobManagerOptions.ADDRESS, hostname);
	}

	return configuration;
}
 
Example #16
Source File: SortUtilsNext.java    From Alink with Apache License 2.0 6 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
	super.open(parameters);

	this.taskId = getRuntimeContext().getIndexOfThisSubtask();

	LOG.info("{} open.", getRuntimeContext().getTaskName());

	List<Tuple2<Integer, Long>> allCnt = getRuntimeContext().getBroadcastVariable("cnt");

	for (Tuple2<Integer, Long> localCnt : allCnt) {
		if (localCnt.f0 == taskId) {
			cnt = localCnt.f1.intValue();
			break;
		}
	}
}
 
Example #17
Source File: ZooKeeperUtilityFactory.java    From flink with Apache License 2.0 5 votes vote down vote up
public ZooKeeperUtilityFactory(Configuration configuration, String path) throws Exception {
	Preconditions.checkNotNull(path, "path");

	root = ZooKeeperUtils.startCuratorFramework(configuration);

	root.newNamespaceAwareEnsurePath(path).ensure(root.getZookeeperClient());
	facade = root.usingNamespace(ZooKeeperUtils.generateZookeeperPath(root.getNamespace(), path));
}
 
Example #18
Source File: StreamCheckpointingITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws IOException {
	step = getRuntimeContext().getNumberOfParallelSubtasks();
	if (index == 0) {
		index = getRuntimeContext().getIndexOfThisSubtask();
	}
}
 
Example #19
Source File: ZooKeeperHaServices.java    From flink with Apache License 2.0 5 votes vote down vote up
public ZooKeeperHaServices(
		CuratorFramework client,
		Executor executor,
		Configuration configuration,
		BlobStoreService blobStoreService) {
	this.client = checkNotNull(client);
	this.executor = checkNotNull(executor);
	this.configuration = checkNotNull(configuration);
	this.runningJobsRegistry = new ZooKeeperRunningJobsRegistry(client, configuration);

	this.blobStoreService = checkNotNull(blobStoreService);
}
 
Example #20
Source File: ParallelVB.java    From toolbox with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
    super.open(parameters);
    svb = Serialization.deserializeObject(parameters.getBytes(SVB, null));
    svb.initLearning();
    latentVariables = Serialization.deserializeObject(parameters.getBytes(LATENT_VARS, null));
}
 
Example #21
Source File: KubernetesSessionCliTest.java    From flink with Apache License 2.0 5 votes vote down vote up
private KubernetesSessionCli createFlinkKubernetesCustomCliWithJmAndTmTotalMemory(int totalMemory) {
	Configuration configuration = new Configuration();
	configuration.set(JobManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(totalMemory));
	configuration.set(TaskManagerOptions.TOTAL_PROCESS_MEMORY, MemorySize.ofMebiBytes(totalMemory));
	return new KubernetesSessionCli(
			configuration,
			tmp.getRoot().getAbsolutePath());
}
 
Example #22
Source File: FailoverStrategyLoader.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Loads a FailoverStrategy Factory from the given configuration.
 */
public static FailoverStrategy.Factory loadFailoverStrategy(Configuration config, @Nullable Logger logger) {
	final String strategyParam = config.getString(JobManagerOptions.EXECUTION_FAILOVER_STRATEGY);

	if (StringUtils.isNullOrWhitespaceOnly(strategyParam)) {
		if (logger != null) {
			logger.warn("Null config value for {} ; using default failover strategy (full restarts).",
					JobManagerOptions.EXECUTION_FAILOVER_STRATEGY.key());
		}

		return new RestartAllStrategy.Factory();
	}
	else {
		switch (strategyParam.toLowerCase()) {
			case FULL_RESTART_STRATEGY_NAME:
				return new RestartAllStrategy.Factory();

			case PIPELINED_REGION_RESTART_STRATEGY_NAME:
				return new RestartPipelinedRegionStrategy.Factory();

			case INDIVIDUAL_RESTART_STRATEGY_NAME:
				return new RestartIndividualStrategy.Factory();

			default:
				// we could interpret the parameter as a factory class name and instantiate that
				// for now we simply do not support this
				throw new IllegalConfigurationException("Unknown failover strategy: " + strategyParam);
		}
	}
}
 
Example #23
Source File: AbstractYarnClusterDescriptor.java    From flink with Apache License 2.0 5 votes vote down vote up
private static void throwIfUserTriesToDisableUserJarInclusionInSystemClassPath(final Configuration config) {
	final String userJarInclusion = config.getString(YarnConfigOptions.CLASSPATH_INCLUDE_USER_JAR);
	if ("DISABLED".equalsIgnoreCase(userJarInclusion)) {
		throw new IllegalArgumentException(String.format("Config option %s cannot be set to DISABLED anymore (see FLINK-11781)",
			YarnConfigOptions.CLASSPATH_INCLUDE_USER_JAR.key()));
	}
}
 
Example #24
Source File: AdvertisingTopologyNative.java    From yahoo-streaming-benchmark with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) {
  LOG.info("Opening connection with Jedis to {}", config.redisHost);

  this.campaignProcessorCommon = new CampaignProcessorCommon(config.redisHost);
  this.campaignProcessorCommon.prepare();
}
 
Example #25
Source File: YarnClusterDescriptorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * @see <a href="https://issues.apache.org/jira/browse/FLINK-11781">FLINK-11781</a>
 */
@Test
public void testThrowsExceptionIfUserTriesToDisableUserJarInclusionInSystemClassPath() {
	final Configuration configuration = new Configuration();
	configuration.setString(YarnConfigOptions.CLASSPATH_INCLUDE_USER_JAR, "DISABLED");

	try {
		createYarnClusterDescriptor(configuration);
		fail("Expected exception not thrown");
	} catch (final IllegalArgumentException e) {
		assertThat(e.getMessage(), containsString("cannot be set to DISABLED anymore"));
	}
}
 
Example #26
Source File: FilterITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration config) {
	Collection<Integer> ints = this.getRuntimeContext().getBroadcastVariable("ints");
	for (int i: ints) {
		literal = literal < i ? i : literal;
	}
}
 
Example #27
Source File: SSLUtilsTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Tests that REST Server SSL Engine is created given a valid SSL configuration.
 */
@Test
public void testRESTServerSSL() throws Exception {
	Configuration serverConfig = createRestSslConfigWithKeyStore();

	SSLHandlerFactory ssl = SSLUtils.createRestServerSSLEngineFactory(serverConfig);
	assertNotNull(ssl);
}
 
Example #28
Source File: ScopeFormats.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates the scope formats as defined in the given configuration.
 *
 * @param config The configuration that defines the formats
 * @return The ScopeFormats parsed from the configuration
 */
public static ScopeFormats fromConfig(Configuration config) {
	String jmFormat = config.getString(MetricOptions.SCOPE_NAMING_JM);
	String jmJobFormat = config.getString(MetricOptions.SCOPE_NAMING_JM_JOB);
	String tmFormat = config.getString(MetricOptions.SCOPE_NAMING_TM);
	String tmJobFormat = config.getString(MetricOptions.SCOPE_NAMING_TM_JOB);
	String taskFormat = config.getString(MetricOptions.SCOPE_NAMING_TASK);
	String operatorFormat = config.getString(MetricOptions.SCOPE_NAMING_OPERATOR);

	return new ScopeFormats(jmFormat, jmJobFormat, tmFormat, tmJobFormat, taskFormat, operatorFormat);
}
 
Example #29
Source File: MiniClusterConfiguration.java    From flink with Apache License 2.0 5 votes vote down vote up
public MiniClusterConfiguration(
		Configuration configuration,
		int numTaskManagers,
		RpcServiceSharing rpcServiceSharing,
		@Nullable String commonBindAddress) {

	this.numTaskManagers = numTaskManagers;
	this.configuration = generateConfiguration(Preconditions.checkNotNull(configuration));
	this.rpcServiceSharing = Preconditions.checkNotNull(rpcServiceSharing);
	this.commonBindAddress = commonBindAddress;
}
 
Example #30
Source File: StandaloneJobClusterEntryPointTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void jobIDdefaultsToRandomJobIDWithoutHA() {
	Optional<JobID> jobID = Optional.empty();

	Configuration globalConfiguration = new Configuration();

	JobID jobIdForCluster = StandaloneJobClusterEntryPoint.resolveJobIdForCluster(
		jobID,
		globalConfiguration);

	assertThat(jobIdForCluster, is(not(ZERO_JOB_ID)));
}