org.apache.flink.runtime.util.EnvironmentInformation Java Examples

The following examples show how to use org.apache.flink.runtime.util.EnvironmentInformation. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MesosSessionClusterEntrypoint.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, MesosSessionClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	// load configuration incl. dynamic properties
	CommandLineParser parser = new PosixParser();
	CommandLine cmd;
	try {
		cmd = parser.parse(ALL_OPTIONS, args);
	}
	catch (Exception e){
		LOG.error("Could not parse the command-line options.", e);
		System.exit(STARTUP_FAILURE_RETURN_CODE);
		return;
	}

	Configuration dynamicProperties = BootstrapTools.parseDynamicProperties(cmd);
	Configuration configuration = MesosUtils.loadConfiguration(dynamicProperties, LOG);

	MesosSessionClusterEntrypoint clusterEntrypoint = new MesosSessionClusterEntrypoint(configuration);

	ClusterEntrypoint.runClusterEntrypoint(clusterEntrypoint);
}
 
Example #2
Source File: DynamoDBStreamsProxy.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates an AmazonDynamoDBStreamsAdapterClient.
 * Uses it as the internal client interacting with the DynamoDB streams.
 *
 * @param configProps configuration properties
 * @return an AWS DynamoDB streams adapter client
 */
@Override
protected AmazonKinesis createKinesisClient(Properties configProps) {
	ClientConfiguration awsClientConfig = new ClientConfigurationFactory().getConfig();
	setAwsClientConfigProperties(awsClientConfig, configProps);

	AWSCredentialsProvider credentials = getCredentialsProvider(configProps);
	awsClientConfig.setUserAgentPrefix(
			String.format(
					USER_AGENT_FORMAT,
					EnvironmentInformation.getVersion(),
					EnvironmentInformation.getRevisionInformation().commitId));

	AmazonDynamoDBStreamsAdapterClient adapterClient =
			new AmazonDynamoDBStreamsAdapterClient(credentials, awsClientConfig);

	if (configProps.containsKey(AWS_ENDPOINT)) {
		adapterClient.setEndpoint(configProps.getProperty(AWS_ENDPOINT));
	} else {
		adapterClient.setRegion(Region.getRegion(
				Regions.fromName(configProps.getProperty(AWS_REGION))));
	}

	return adapterClient;
}
 
Example #3
Source File: AWSUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates an Amazon Kinesis Client.
 * @param configProps configuration properties containing the access key, secret key, and region
 * @param awsClientConfig preconfigured AWS SDK client configuration
 * @return a new Amazon Kinesis Client
 */
public static AmazonKinesis createKinesisClient(Properties configProps, ClientConfiguration awsClientConfig) {
	// set a Flink-specific user agent
	awsClientConfig.setUserAgentPrefix(String.format(USER_AGENT_FORMAT,
			EnvironmentInformation.getVersion(),
			EnvironmentInformation.getRevisionInformation().commitId));

	// utilize automatic refreshment of credentials by directly passing the AWSCredentialsProvider
	AmazonKinesisClientBuilder builder = AmazonKinesisClientBuilder.standard()
			.withCredentials(AWSUtil.getCredentialsProvider(configProps))
			.withClientConfiguration(awsClientConfig);

	if (configProps.containsKey(AWSConfigConstants.AWS_ENDPOINT)) {
		// Set signingRegion as null, to facilitate mocking Kinesis for local tests
		builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
												configProps.getProperty(AWSConfigConstants.AWS_ENDPOINT),
												null));
	} else {
		builder.withRegion(Regions.fromName(configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	}
	return builder.build();
}
 
Example #4
Source File: MesosJobClusterEntrypoint.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, MesosJobClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	// load configuration incl. dynamic properties
	CommandLineParser parser = new PosixParser();
	CommandLine cmd;
	try {
		cmd = parser.parse(ALL_OPTIONS, args);
	}
	catch (Exception e){
		LOG.error("Could not parse the command-line options.", e);
		System.exit(STARTUP_FAILURE_RETURN_CODE);
		return;
	}

	Configuration dynamicProperties = BootstrapTools.parseDynamicProperties(cmd);
	Configuration configuration = MesosEntrypointUtils.loadConfiguration(dynamicProperties, LOG);

	MesosJobClusterEntrypoint clusterEntrypoint = new MesosJobClusterEntrypoint(configuration, dynamicProperties);

	ClusterEntrypoint.runClusterEntrypoint(clusterEntrypoint);
}
 
Example #5
Source File: FlinkZooKeeperQuorumPeer.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	try {
		// startup checks and logging
		EnvironmentInformation.logEnvironmentInfo(LOG, "ZooKeeper Quorum Peer", args);
		
		final ParameterTool params = ParameterTool.fromArgs(args);
		final String zkConfigFile = params.getRequired("zkConfigFile");
		final int peerId = params.getInt("peerId");

		// Run quorum peer
		runFlinkZkQuorumPeer(zkConfigFile, peerId);
	}
	catch (Throwable t) {
		LOG.error("Error running ZooKeeper quorum peer: " + t.getMessage(), t);
		System.exit(-1);
	}
}
 
Example #6
Source File: DashboardConfiguration.java    From flink with Apache License 2.0 6 votes vote down vote up
public static DashboardConfiguration from(long refreshInterval, ZonedDateTime zonedDateTime) {

		final String flinkVersion = EnvironmentInformation.getVersion();

		final EnvironmentInformation.RevisionInformation revision = EnvironmentInformation.getRevisionInformation();
		final String flinkRevision;

		if (revision != null) {
			flinkRevision = revision.commitId + " @ " + revision.commitDate;
		} else {
			flinkRevision = "unknown revision";
		}

		return new DashboardConfiguration(
			refreshInterval,
			zonedDateTime.getZone().getDisplayName(TextStyle.FULL, Locale.getDefault()),
			// convert zone date time into offset in order to not do the day light saving adaptions wrt the offset
			zonedDateTime.toOffsetDateTime().getOffset().getTotalSeconds() * 1000,
			flinkVersion,
			flinkRevision);
	}
 
Example #7
Source File: StandaloneSessionClusterEntrypoint.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, StandaloneSessionClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	EntrypointClusterConfiguration entrypointClusterConfiguration = null;
	final CommandLineParser<EntrypointClusterConfiguration> commandLineParser = new CommandLineParser<>(new EntrypointClusterConfigurationParserFactory());

	try {
		entrypointClusterConfiguration = commandLineParser.parse(args);
	} catch (FlinkParseException e) {
		LOG.error("Could not parse command line arguments {}.", args, e);
		commandLineParser.printHelp(StandaloneSessionClusterEntrypoint.class.getSimpleName());
		System.exit(1);
	}

	Configuration configuration = loadConfiguration(entrypointClusterConfiguration);

	StandaloneSessionClusterEntrypoint entrypoint = new StandaloneSessionClusterEntrypoint(configuration);

	ClusterEntrypoint.runClusterEntrypoint(entrypoint);
}
 
Example #8
Source File: DynamoDBStreamsProxy.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates an AmazonDynamoDBStreamsAdapterClient.
 * Uses it as the internal client interacting with the DynamoDB streams.
 *
 * @param configProps configuration properties
 * @return an AWS DynamoDB streams adapter client
 */
@Override
protected AmazonKinesis createKinesisClient(Properties configProps) {
	ClientConfiguration awsClientConfig = new ClientConfigurationFactory().getConfig();
	setAwsClientConfigProperties(awsClientConfig, configProps);

	AWSCredentialsProvider credentials = getCredentialsProvider(configProps);
	awsClientConfig.setUserAgentPrefix(
			String.format(
					USER_AGENT_FORMAT,
					EnvironmentInformation.getVersion(),
					EnvironmentInformation.getRevisionInformation().commitId));

	AmazonDynamoDBStreamsAdapterClient adapterClient =
			new AmazonDynamoDBStreamsAdapterClient(credentials, awsClientConfig);

	if (configProps.containsKey(AWS_ENDPOINT)) {
		adapterClient.setEndpoint(configProps.getProperty(AWS_ENDPOINT));
	} else {
		adapterClient.setRegion(Region.getRegion(
				Regions.fromName(configProps.getProperty(AWS_REGION))));
	}

	return adapterClient;
}
 
Example #9
Source File: AWSUtil.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates an Amazon Kinesis Client.
 * @param configProps configuration properties containing the access key, secret key, and region
 * @param awsClientConfig preconfigured AWS SDK client configuration
 * @return a new Amazon Kinesis Client
 */
public static AmazonKinesis createKinesisClient(Properties configProps, ClientConfiguration awsClientConfig) {
	// set a Flink-specific user agent
	awsClientConfig.setUserAgentPrefix(String.format(USER_AGENT_FORMAT,
			EnvironmentInformation.getVersion(),
			EnvironmentInformation.getRevisionInformation().commitId));

	// utilize automatic refreshment of credentials by directly passing the AWSCredentialsProvider
	AmazonKinesisClientBuilder builder = AmazonKinesisClientBuilder.standard()
			.withCredentials(AWSUtil.getCredentialsProvider(configProps))
			.withClientConfiguration(awsClientConfig);

	if (configProps.containsKey(AWSConfigConstants.AWS_ENDPOINT)) {
		// If an endpoint is specified, we give preference to using an endpoint and use the region property to
		// sign the request.
		builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
			configProps.getProperty(AWSConfigConstants.AWS_ENDPOINT),
			configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	} else {
		builder.withRegion(Regions.fromName(configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	}
	return builder.build();
}
 
Example #10
Source File: AWSUtil.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Creates an Amazon Kinesis Client.
 * @param configProps configuration properties containing the access key, secret key, and region
 * @param awsClientConfig preconfigured AWS SDK client configuration
 * @return a new Amazon Kinesis Client
 */
public static AmazonKinesis createKinesisClient(Properties configProps, ClientConfiguration awsClientConfig) {
	// set a Flink-specific user agent
	awsClientConfig.setUserAgentPrefix(String.format(USER_AGENT_FORMAT,
			EnvironmentInformation.getVersion(),
			EnvironmentInformation.getRevisionInformation().commitId));

	// utilize automatic refreshment of credentials by directly passing the AWSCredentialsProvider
	AmazonKinesisClientBuilder builder = AmazonKinesisClientBuilder.standard()
			.withCredentials(AWSUtil.getCredentialsProvider(configProps))
			.withClientConfiguration(awsClientConfig);

	if (configProps.containsKey(AWSConfigConstants.AWS_ENDPOINT)) {
		// Set signingRegion as null, to facilitate mocking Kinesis for local tests
		builder.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(
												configProps.getProperty(AWSConfigConstants.AWS_ENDPOINT),
												null));
	} else {
		builder.withRegion(Regions.fromName(configProps.getProperty(AWSConfigConstants.AWS_REGION)));
	}
	return builder.build();
}
 
Example #11
Source File: MesosJobClusterEntrypoint.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, MesosJobClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	// load configuration incl. dynamic properties
	CommandLineParser parser = new PosixParser();
	CommandLine cmd;
	try {
		cmd = parser.parse(ALL_OPTIONS, args);
	}
	catch (Exception e){
		LOG.error("Could not parse the command-line options.", e);
		System.exit(STARTUP_FAILURE_RETURN_CODE);
		return;
	}

	Configuration dynamicProperties = BootstrapTools.parseDynamicProperties(cmd);
	Configuration configuration = MesosUtils.loadConfiguration(dynamicProperties, LOG);

	MesosJobClusterEntrypoint clusterEntrypoint = new MesosJobClusterEntrypoint(configuration);

	ClusterEntrypoint.runClusterEntrypoint(clusterEntrypoint);
}
 
Example #12
Source File: YarnSessionClusterEntrypoint.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, YarnSessionClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	Map<String, String> env = System.getenv();

	final String workingDirectory = env.get(ApplicationConstants.Environment.PWD.key());
	Preconditions.checkArgument(
		workingDirectory != null,
		"Working directory variable (%s) not set",
		ApplicationConstants.Environment.PWD.key());

	try {
		YarnEntrypointUtils.logYarnEnvironmentInformation(env, LOG);
	} catch (IOException e) {
		LOG.warn("Could not log YARN environment information.", e);
	}

	Configuration configuration = YarnEntrypointUtils.loadConfiguration(workingDirectory, env);

	YarnSessionClusterEntrypoint yarnSessionClusterEntrypoint = new YarnSessionClusterEntrypoint(configuration);

	ClusterEntrypoint.runClusterEntrypoint(yarnSessionClusterEntrypoint);
}
 
Example #13
Source File: YarnJobClusterEntrypoint.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, YarnJobClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	Map<String, String> env = System.getenv();

	final String workingDirectory = env.get(ApplicationConstants.Environment.PWD.key());
	Preconditions.checkArgument(
		workingDirectory != null,
		"Working directory variable (%s) not set",
		ApplicationConstants.Environment.PWD.key());

	try {
		YarnEntrypointUtils.logYarnEnvironmentInformation(env, LOG);
	} catch (IOException e) {
		LOG.warn("Could not log YARN environment information.", e);
	}

	Configuration configuration = YarnEntrypointUtils.loadConfiguration(workingDirectory, env);

	YarnJobClusterEntrypoint yarnJobClusterEntrypoint = new YarnJobClusterEntrypoint(configuration);

	ClusterEntrypoint.runClusterEntrypoint(yarnJobClusterEntrypoint);
}
 
Example #14
Source File: TaskManagerRunner.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, "TaskManager", args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	long maxOpenFileHandles = EnvironmentInformation.getOpenFileHandlesLimit();

	if (maxOpenFileHandles != -1L) {
		LOG.info("Maximum number of open file descriptors is {}.", maxOpenFileHandles);
	} else {
		LOG.info("Cannot determine the maximum number of open file descriptors");
	}

	runTaskManagerSecurely(args);
}
 
Example #15
Source File: FlinkZooKeeperQuorumPeer.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	try {
		// startup checks and logging
		EnvironmentInformation.logEnvironmentInfo(LOG, "ZooKeeper Quorum Peer", args);
		
		final ParameterTool params = ParameterTool.fromArgs(args);
		final String zkConfigFile = params.getRequired("zkConfigFile");
		final int peerId = params.getInt("peerId");

		// Run quorum peer
		runFlinkZkQuorumPeer(zkConfigFile, peerId);
	}
	catch (Throwable t) {
		LOG.error("Error running ZooKeeper quorum peer: " + t.getMessage(), t);
		System.exit(-1);
	}
}
 
Example #16
Source File: DashboardConfiguration.java    From flink with Apache License 2.0 6 votes vote down vote up
public static DashboardConfiguration from(long refreshInterval, ZonedDateTime zonedDateTime, boolean webSubmitEnabled) {

		final String flinkVersion = EnvironmentInformation.getVersion();

		final EnvironmentInformation.RevisionInformation revision = EnvironmentInformation.getRevisionInformation();
		final String flinkRevision;

		if (revision != null) {
			flinkRevision = revision.commitId + " @ " + revision.commitDate;
		} else {
			flinkRevision = "unknown revision";
		}

		return new DashboardConfiguration(
			refreshInterval,
			zonedDateTime.getZone().getDisplayName(TextStyle.FULL, Locale.getDefault()),
			// convert zone date time into offset in order to not do the day light saving adaptions wrt the offset
			zonedDateTime.toOffsetDateTime().getOffset().getTotalSeconds() * 1000,
			flinkVersion,
			flinkRevision,
			new Features(webSubmitEnabled));
	}
 
Example #17
Source File: StandaloneSessionClusterEntrypoint.java    From flink with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, StandaloneSessionClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	EntrypointClusterConfiguration entrypointClusterConfiguration = null;
	final CommandLineParser<EntrypointClusterConfiguration> commandLineParser = new CommandLineParser<>(new EntrypointClusterConfigurationParserFactory());

	try {
		entrypointClusterConfiguration = commandLineParser.parse(args);
	} catch (FlinkParseException e) {
		LOG.error("Could not parse command line arguments {}.", args, e);
		commandLineParser.printHelp(StandaloneSessionClusterEntrypoint.class.getSimpleName());
		System.exit(1);
	}

	Configuration configuration = loadConfiguration(entrypointClusterConfiguration);

	StandaloneSessionClusterEntrypoint entrypoint = new StandaloneSessionClusterEntrypoint(configuration);

	ClusterEntrypoint.runClusterEntrypoint(entrypoint);
}
 
Example #18
Source File: FlinkZooKeeperQuorumPeer.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	try {
		// startup checks and logging
		EnvironmentInformation.logEnvironmentInfo(LOG, "ZooKeeper Quorum Peer", args);
		
		final ParameterTool params = ParameterTool.fromArgs(args);
		final String zkConfigFile = params.getRequired("zkConfigFile");
		final int peerId = params.getInt("peerId");

		// Run quorum peer
		runFlinkZkQuorumPeer(zkConfigFile, peerId);
	}
	catch (Throwable t) {
		LOG.error("Error running ZooKeeper quorum peer: " + t.getMessage(), t);
		System.exit(-1);
	}
}
 
Example #19
Source File: DynamoDBStreamsProxy.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Creates an AmazonDynamoDBStreamsAdapterClient.
 * Uses it as the internal client interacting with the DynamoDB streams.
 *
 * @param configProps configuration properties
 * @return an AWS DynamoDB streams adapter client
 */
@Override
protected AmazonKinesis createKinesisClient(Properties configProps) {
	ClientConfiguration awsClientConfig = new ClientConfigurationFactory().getConfig();
	setAwsClientConfigProperties(awsClientConfig, configProps);

	AWSCredentialsProvider credentials = getCredentialsProvider(configProps);
	awsClientConfig.setUserAgentPrefix(
			String.format(
					USER_AGENT_FORMAT,
					EnvironmentInformation.getVersion(),
					EnvironmentInformation.getRevisionInformation().commitId));

	AmazonDynamoDBStreamsAdapterClient adapterClient =
			new AmazonDynamoDBStreamsAdapterClient(credentials, awsClientConfig);

	if (configProps.containsKey(AWS_ENDPOINT)) {
		adapterClient.setEndpoint(configProps.getProperty(AWS_ENDPOINT));
	} else {
		adapterClient.setRegion(Region.getRegion(
				Regions.fromName(configProps.getProperty(AWS_REGION))));
	}

	return adapterClient;
}
 
Example #20
Source File: MesosSessionClusterEntrypoint.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, MesosSessionClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	// load configuration incl. dynamic properties
	CommandLineParser parser = new PosixParser();
	CommandLine cmd;
	try {
		cmd = parser.parse(ALL_OPTIONS, args);
	}
	catch (Exception e){
		LOG.error("Could not parse the command-line options.", e);
		System.exit(STARTUP_FAILURE_RETURN_CODE);
		return;
	}

	Configuration dynamicProperties = BootstrapTools.parseDynamicProperties(cmd);
	Configuration configuration = MesosEntrypointUtils.loadConfiguration(dynamicProperties, LOG);

	MesosSessionClusterEntrypoint clusterEntrypoint = new MesosSessionClusterEntrypoint(configuration, dynamicProperties);

	ClusterEntrypoint.runClusterEntrypoint(clusterEntrypoint);
}
 
Example #21
Source File: MesosJobClusterEntrypoint.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, MesosJobClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	// load configuration incl. dynamic properties
	CommandLineParser parser = new PosixParser();
	CommandLine cmd;
	try {
		cmd = parser.parse(ALL_OPTIONS, args);
	}
	catch (Exception e){
		LOG.error("Could not parse the command-line options.", e);
		System.exit(STARTUP_FAILURE_RETURN_CODE);
		return;
	}

	Configuration dynamicProperties = BootstrapTools.parseDynamicProperties(cmd);
	Configuration configuration = MesosEntrypointUtils.loadConfiguration(dynamicProperties, LOG);

	MesosJobClusterEntrypoint clusterEntrypoint = new MesosJobClusterEntrypoint(configuration, dynamicProperties);

	ClusterEntrypoint.runClusterEntrypoint(clusterEntrypoint);
}
 
Example #22
Source File: DashboardConfiguration.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public static DashboardConfiguration from(long refreshInterval, ZonedDateTime zonedDateTime) {

		final String flinkVersion = EnvironmentInformation.getVersion();

		final EnvironmentInformation.RevisionInformation revision = EnvironmentInformation.getRevisionInformation();
		final String flinkRevision;

		if (revision != null) {
			flinkRevision = revision.commitId + " @ " + revision.commitDate;
		} else {
			flinkRevision = "unknown revision";
		}

		return new DashboardConfiguration(
			refreshInterval,
			zonedDateTime.getZone().getDisplayName(TextStyle.FULL, Locale.getDefault()),
			// convert zone date time into offset in order to not do the day light saving adaptions wrt the offset
			zonedDateTime.toOffsetDateTime().getOffset().getTotalSeconds() * 1000,
			flinkVersion,
			flinkRevision);
	}
 
Example #23
Source File: StandaloneSessionClusterEntrypoint.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, StandaloneSessionClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	EntrypointClusterConfiguration entrypointClusterConfiguration = null;
	final CommandLineParser<EntrypointClusterConfiguration> commandLineParser = new CommandLineParser<>(new EntrypointClusterConfigurationParserFactory());

	try {
		entrypointClusterConfiguration = commandLineParser.parse(args);
	} catch (FlinkParseException e) {
		LOG.error("Could not parse command line arguments {}.", args, e);
		commandLineParser.printHelp(StandaloneSessionClusterEntrypoint.class.getSimpleName());
		System.exit(1);
	}

	Configuration configuration = loadConfiguration(entrypointClusterConfiguration);

	StandaloneSessionClusterEntrypoint entrypoint = new StandaloneSessionClusterEntrypoint(configuration);

	ClusterEntrypoint.runClusterEntrypoint(entrypoint);
}
 
Example #24
Source File: HistoryServer.java    From flink with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
	EnvironmentInformation.logEnvironmentInfo(LOG, "HistoryServer", args);

	ParameterTool pt = ParameterTool.fromArgs(args);
	String configDir = pt.getRequired("configDir");

	LOG.info("Loading configuration from {}", configDir);
	final Configuration flinkConfig = GlobalConfiguration.loadConfiguration(configDir);

	FileSystem.initialize(flinkConfig, PluginUtils.createPluginManagerFromRootFolder(flinkConfig));

	// run the history server
	SecurityUtils.install(new SecurityConfiguration(flinkConfig));

	try {
		SecurityUtils.getInstalledContext().runSecured(new Callable<Integer>() {
			@Override
			public Integer call() throws Exception {
				HistoryServer hs = new HistoryServer(flinkConfig);
				hs.run();
				return 0;
			}
		});
		System.exit(0);
	} catch (Throwable t) {
		final Throwable strippedThrowable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class);
		LOG.error("Failed to run HistoryServer.", strippedThrowable);
		strippedThrowable.printStackTrace();
		System.exit(1);
	}
}
 
Example #25
Source File: SerializingLongReceiver.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("WeakerAccess")
public SerializingLongReceiver(InputGate inputGate, int expectedRepetitionsOfExpectedRecord) {
	super(expectedRepetitionsOfExpectedRecord);
	this.reader = new MutableRecordReader<>(
		inputGate,
		new String[]{
			EnvironmentInformation.getTemporaryFileDirectory()
		});
}
 
Example #26
Source File: ShuffleCompressionITCase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public void invoke() throws Exception {
	MutableRecordReader<LongValue> reader = new MutableRecordReader<>(
		getEnvironment().getInputGate(0),
		new String[]{EnvironmentInformation.getTemporaryFileDirectory()});

	LongValue value = new LongValue();
	for (int i = 0; i < PARALLELISM * NUM_RECORDS_TO_SEND; ++i) {
		reader.next(value);
		assertEquals(RECORD_TO_SEND.getValue(), value.getValue());
	}
}
 
Example #27
Source File: KubernetesTaskExecutorRunner.java    From flink with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
	EnvironmentInformation.logEnvironmentInfo(LOG, "Kubernetes TaskExecutor runner", args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	TaskManagerRunner.runTaskManagerSecurely(args);
}
 
Example #28
Source File: KubernetesSessionClusterEntrypoint.java    From flink with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, KubernetesSessionClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	final ClusterEntrypoint entrypoint = new KubernetesSessionClusterEntrypoint(
		KubernetesEntrypointUtils.loadConfiguration());
	ClusterEntrypoint.runClusterEntrypoint(entrypoint);
}
 
Example #29
Source File: TaskManagerRunner.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, "TaskManager", args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	long maxOpenFileHandles = EnvironmentInformation.getOpenFileHandlesLimit();

	if (maxOpenFileHandles != -1L) {
		LOG.info("Maximum number of open file descriptors is {}.", maxOpenFileHandles);
	} else {
		LOG.info("Cannot determine the maximum number of open file descriptors");
	}

	final Configuration configuration = loadConfiguration(args);

	try {
		FileSystem.initialize(configuration);
	} catch (IOException e) {
		throw new IOException("Error while setting the default " +
			"filesystem scheme from configuration.", e);
	}

	SecurityUtils.install(new SecurityConfiguration(configuration));

	try {
		SecurityUtils.getInstalledContext().runSecured(new Callable<Void>() {
			@Override
			public Void call() throws Exception {
				runTaskManager(configuration, ResourceID.generate());
				return null;
			}
		});
	} catch (Throwable t) {
		final Throwable strippedThrowable = ExceptionUtils.stripException(t, UndeclaredThrowableException.class);
		LOG.error("TaskManager initialization failed.", strippedThrowable);
		System.exit(STARTUP_FAILURE_RETURN_CODE);
	}
}
 
Example #30
Source File: YarnJobClusterEntrypoint.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
	// startup checks and logging
	EnvironmentInformation.logEnvironmentInfo(LOG, YarnJobClusterEntrypoint.class.getSimpleName(), args);
	SignalHandler.register(LOG);
	JvmShutdownSafeguard.installAsShutdownHook(LOG);

	Map<String, String> env = System.getenv();

	final String workingDirectory = env.get(ApplicationConstants.Environment.PWD.key());
	Preconditions.checkArgument(
		workingDirectory != null,
		"Working directory variable (%s) not set",
		ApplicationConstants.Environment.PWD.key());

	try {
		YarnEntrypointUtils.logYarnEnvironmentInformation(env, LOG);
	} catch (IOException e) {
		LOG.warn("Could not log YARN environment information.", e);
	}

	Configuration configuration = YarnEntrypointUtils.loadConfiguration(workingDirectory, env, LOG);

	YarnJobClusterEntrypoint yarnJobClusterEntrypoint = new YarnJobClusterEntrypoint(
		configuration,
		workingDirectory);

	ClusterEntrypoint.runClusterEntrypoint(yarnJobClusterEntrypoint);
}