Java Code Examples for org.apache.flink.configuration.ConfigConstants

The following examples show how to use org.apache.flink.configuration.ConfigConstants. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flink   Source File: NiFiSinkTopologyExample.java    License: Apache License 2.0 7 votes vote down vote up
public static void main(String[] args) throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	SiteToSiteClientConfig clientConfig = new SiteToSiteClient.Builder()
			.url("http://localhost:8080/nifi")
			.portName("Data from Flink")
			.buildConfig();

	DataStreamSink<String> dataStream = env.fromElements("one", "two", "three", "four", "five", "q")
			.addSink(new NiFiSink<>(clientConfig, new NiFiDataPacketBuilder<String>() {
				@Override
				public NiFiDataPacket createNiFiDataPacket(String s, RuntimeContext ctx) {
					return new StandardNiFiDataPacket(s.getBytes(ConfigConstants.DEFAULT_CHARSET),
						new HashMap<String, String>());
				}
			}));

	env.execute();
}
 
Example 2
Source Project: flink   Source File: CliFrontend.java    License: Apache License 2.0 6 votes vote down vote up
public static String getConfigurationDirectoryFromEnv() {
	String location = System.getenv(ConfigConstants.ENV_FLINK_CONF_DIR);

	if (location != null) {
		if (new File(location).exists()) {
			return location;
		}
		else {
			throw new RuntimeException("The configuration directory '" + location + "', specified in the '" +
				ConfigConstants.ENV_FLINK_CONF_DIR + "' environment variable, does not exist.");
		}
	}
	else if (new File(CONFIG_DIRECTORY_FALLBACK_1).exists()) {
		location = CONFIG_DIRECTORY_FALLBACK_1;
	}
	else if (new File(CONFIG_DIRECTORY_FALLBACK_2).exists()) {
		location = CONFIG_DIRECTORY_FALLBACK_2;
	}
	else {
		throw new RuntimeException("The configuration directory was not specified. " +
				"Please specify the directory containing the configuration file through the '" +
			ConfigConstants.ENV_FLINK_CONF_DIR + "' environment variable.");
	}
	return location;
}
 
Example 3
Source Project: Flink-CEPplus   Source File: ZookeeperOffsetHandler.java    License: Apache License 2.0 6 votes vote down vote up
public static Long getOffsetFromZooKeeper(CuratorFramework curatorClient, String groupId, String topic, int partition) throws Exception {
	ZKGroupTopicDirs topicDirs = new ZKGroupTopicDirs(groupId, topic);
	String path = topicDirs.consumerOffsetDir() + "/" + partition;
	curatorClient.newNamespaceAwareEnsurePath(path).ensure(curatorClient.getZookeeperClient());

	byte[] data = curatorClient.getData().forPath(path);

	if (data == null) {
		return null;
	} else {
		String asString = new String(data, ConfigConstants.DEFAULT_CHARSET);
		if (asString.length() == 0) {
			return null;
		} else {
			try {
				return Long.valueOf(asString);
			}
			catch (NumberFormatException e) {
				LOG.error(
						"The offset in ZooKeeper for group '{}', topic '{}', partition {} is a malformed string: {}",
					groupId, topic, partition, asString);
				return null;
			}
		}
	}
}
 
Example 4
Source Project: cascading-flink   Source File: FlinkPlanner.java    License: Apache License 2.0 6 votes vote down vote up
public FlinkPlanner(List<String> classPath) {
	super();
	this.classPath = classPath;

	env.getConfig().disableSysoutLogging();
	if (env.getParallelism() <= 0) {
		// load the default parallelism from config
		GlobalConfiguration.loadConfiguration(new File(CliFrontend.getConfigurationDirectoryFromEnv()).getAbsolutePath());
		org.apache.flink.configuration.Configuration configuration = GlobalConfiguration.getConfiguration();
		int parallelism = configuration.getInteger(ConfigConstants.DEFAULT_PARALLELISM_KEY, -1);
		if (parallelism <= 0) {
			throw new RuntimeException("Please set the default parallelism via the -p command-line flag");
		} else {
			env.setParallelism(parallelism);
		}
	}

}
 
Example 5
Source Project: flink   Source File: LocalExecutor.java    License: Apache License 2.0 6 votes vote down vote up
private JobExecutorService createJobExecutorService(Configuration configuration) throws Exception {
	if (!configuration.contains(RestOptions.BIND_PORT)) {
		configuration.setString(RestOptions.BIND_PORT, "0");
	}

	final MiniClusterConfiguration miniClusterConfiguration = new MiniClusterConfiguration.Builder()
		.setConfiguration(configuration)
		.setNumTaskManagers(
			configuration.getInteger(
				ConfigConstants.LOCAL_NUMBER_TASK_MANAGER,
				ConfigConstants.DEFAULT_LOCAL_NUMBER_TASK_MANAGER))
		.setRpcServiceSharing(RpcServiceSharing.SHARED)
		.setNumSlotsPerTaskManager(
			configuration.getInteger(
				TaskManagerOptions.NUM_TASK_SLOTS, 1))
		.build();

	final MiniCluster miniCluster = new MiniCluster(miniClusterConfiguration);
	miniCluster.start();

	configuration.setInteger(RestOptions.PORT, miniCluster.getRestAddress().get().getPort());

	return miniCluster;
}
 
Example 6
Source Project: Flink-CEPplus   Source File: RocksDBOperationUtils.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates a column descriptor for sate column family.
 *
 * <p>Sets TTL compaction filter if {@code ttlCompactFiltersManager} is not {@code null}.
 */
public static ColumnFamilyDescriptor createColumnFamilyDescriptor(
	RegisteredStateMetaInfoBase metaInfoBase,
	Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory,
	@Nullable RocksDbTtlCompactFiltersManager ttlCompactFiltersManager) {

	ColumnFamilyOptions options = createColumnFamilyOptions(columnFamilyOptionsFactory, metaInfoBase.getName());
	if (ttlCompactFiltersManager != null) {
		ttlCompactFiltersManager.setAndRegisterCompactFilterIfStateTtl(metaInfoBase, options);
	}
	byte[] nameBytes = metaInfoBase.getName().getBytes(ConfigConstants.DEFAULT_CHARSET);
	Preconditions.checkState(!Arrays.equals(RocksDB.DEFAULT_COLUMN_FAMILY, nameBytes),
		"The chosen state name 'default' collides with the name of the default column family!");

	return new ColumnFamilyDescriptor(nameBytes, options);
}
 
Example 7
Source Project: flink   Source File: BlobsCleanupITCase.java    License: Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setup() throws Exception {
	blobBaseDir = TEMPORARY_FOLDER.newFolder();

	Configuration cfg = new Configuration();
	cfg.setString(BlobServerOptions.STORAGE_DIRECTORY, blobBaseDir.getAbsolutePath());
	cfg.setString(ConfigConstants.RESTART_STRATEGY, "fixeddelay");
	cfg.setInteger(ConfigConstants.RESTART_STRATEGY_FIXED_DELAY_ATTEMPTS, 1);
	// BLOBs are deleted from BlobCache between 1s and 2s after last reference
	// -> the BlobCache may still have the BLOB or not (let's test both cases randomly)
	cfg.setLong(BlobServerOptions.CLEANUP_INTERVAL, 1L);

	configuration = new UnmodifiableConfiguration(cfg);

	miniClusterResource = new MiniClusterResource(new MiniClusterResourceConfiguration.Builder()
		.setNumberSlotsPerTaskManager(2)
		.setNumberTaskManagers(1)
		.setConfiguration(configuration)
		.build());

	miniClusterResource.before();
}
 
Example 8
/**
 * Fill the file with content and put the content in the {@code hdPathContents} list.
 * */
private Tuple2<Path, String> fillWithData(
	String base, String fileName, int fileIdx, String sampleLine) throws IOException, InterruptedException {

	assert (localFs != null);

	org.apache.hadoop.fs.Path tmp =
		new org.apache.hadoop.fs.Path(base + "/." + fileName + fileIdx);

	FSDataOutputStream stream = localFs.create(tmp);
	StringBuilder str = new StringBuilder();
	for (int i = 0; i < LINES_PER_FILE; i++) {
		String line = fileIdx + ": " + sampleLine + " " + i + "\n";
		str.append(line);
		stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
	}
	stream.close();
	return new Tuple2<>(tmp, str.toString());
}
 
Example 9
Source Project: flink   Source File: RocksDBOperationUtils.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Creates a column descriptor for sate column family.
 *
 * <p>Sets TTL compaction filter if {@code ttlCompactFiltersManager} is not {@code null}.
 */
public static ColumnFamilyDescriptor createColumnFamilyDescriptor(
	RegisteredStateMetaInfoBase metaInfoBase,
	Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory,
	@Nullable RocksDbTtlCompactFiltersManager ttlCompactFiltersManager) {

	ColumnFamilyOptions options = createColumnFamilyOptions(columnFamilyOptionsFactory, metaInfoBase.getName());
	if (ttlCompactFiltersManager != null) {
		ttlCompactFiltersManager.setAndRegisterCompactFilterIfStateTtl(metaInfoBase, options);
	}
	byte[] nameBytes = metaInfoBase.getName().getBytes(ConfigConstants.DEFAULT_CHARSET);
	Preconditions.checkState(!Arrays.equals(RocksDB.DEFAULT_COLUMN_FAMILY, nameBytes),
		"The chosen state name 'default' collides with the name of the default column family!");

	return new ColumnFamilyDescriptor(nameBytes, options);
}
 
Example 10
Source Project: Flink-CEPplus   Source File: SavepointMigrationTestBase.java    License: Apache License 2.0 6 votes vote down vote up
private Configuration getConfiguration() throws Exception {
	// Flink configuration
	final Configuration config = new Configuration();

	config.setInteger(ConfigConstants.LOCAL_NUMBER_TASK_MANAGER, 1);
	config.setInteger(TaskManagerOptions.NUM_TASK_SLOTS, DEFAULT_PARALLELISM);

	UUID id = UUID.randomUUID();
	final File checkpointDir = TEMP_FOLDER.newFolder("checkpoints_" + id).getAbsoluteFile();
	final File savepointDir = TEMP_FOLDER.newFolder("savepoints_" + id).getAbsoluteFile();

	if (!checkpointDir.exists() || !savepointDir.exists()) {
		throw new Exception("Test setup failed: failed to create (temporary) directories.");
	}

	LOG.info("Created temporary checkpoint directory: " + checkpointDir + ".");
	LOG.info("Created savepoint directory: " + savepointDir + ".");

	config.setString(CheckpointingOptions.STATE_BACKEND, "memory");
	config.setString(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir.toURI().toString());
	config.setInteger(CheckpointingOptions.FS_SMALL_FILE_THRESHOLD, 0);
	config.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir.toURI().toString());

	return config;
}
 
Example 11
/**
 * Fill the file with content and put the content in the {@code hdPathContents} list.
 * */
private Tuple2<Path, String> fillWithData(
	String base, String fileName, int fileIdx, String sampleLine) throws IOException, InterruptedException {

	assert (localFs != null);

	org.apache.hadoop.fs.Path tmp =
		new org.apache.hadoop.fs.Path(base + "/." + fileName + fileIdx);

	FSDataOutputStream stream = localFs.create(tmp);
	StringBuilder str = new StringBuilder();
	for (int i = 0; i < LINES_PER_FILE; i++) {
		String line = fileIdx + ": " + sampleLine + " " + i + "\n";
		str.append(line);
		stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
	}
	stream.close();
	return new Tuple2<>(tmp, str.toString());
}
 
Example 12
Source Project: flink   Source File: ReporterSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Verifies that factory/reflection approaches can be mixed freely.
 */
@Test
public void testMixedSetupsFactoryParsing() throws Exception {
	final Configuration config = new Configuration();
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test1." + ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX, InstantiationTypeTrackingTestReporterFactory.class.getName());
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test2." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, InstantiationTypeTrackingTestReporter.class.getName());

	final List<ReporterSetup> reporterSetups = ReporterSetup.fromConfiguration(config);

	assertEquals(2, reporterSetups.size());

	final ReporterSetup reporterSetup1 = reporterSetups.get(0);
	final ReporterSetup reporterSetup2 = reporterSetups.get(1);

	final InstantiationTypeTrackingTestReporter metricReporter1 = (InstantiationTypeTrackingTestReporter) reporterSetup1.getReporter();
	final InstantiationTypeTrackingTestReporter metricReporter2 = (InstantiationTypeTrackingTestReporter) reporterSetup2.getReporter();

	assertTrue(metricReporter1.createdByFactory ^ metricReporter2.createdByFactory);
}
 
Example 13
Source Project: Flink-CEPplus   Source File: ZooKeeperMesosWorkerStore.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Get the persisted framework ID.
 * @return the current ID or empty if none is yet persisted.
 * @throws Exception on ZK failures, interruptions.
 */
@Override
public Option<Protos.FrameworkID> getFrameworkID() throws Exception {
	synchronized (startStopLock) {
		verifyIsRunning();

		Option<Protos.FrameworkID> frameworkID;
		byte[] value = frameworkIdInZooKeeper.getValue();
		if (value.length == 0) {
			frameworkID = Option.empty();
		} else {
			frameworkID = Option.apply(Protos.FrameworkID.newBuilder().setValue(new String(value,
				ConfigConstants.DEFAULT_CHARSET)).build());
		}

		return frameworkID;
	}
}
 
Example 14
Source Project: flink   Source File: ReporterSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Verifies that multiple reporters are instantiated correctly.
 */
@Test
public void testMultipleReporterInstantiation() throws Exception {
	Configuration config = new Configuration();

	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test1." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter11.class.getName());
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test2." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter12.class.getName());
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test3." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter13.class.getName());

	List<ReporterSetup> reporterSetups = ReporterSetup.fromConfiguration(config);

	assertEquals(3, reporterSetups.size());

	Assert.assertTrue(TestReporter11.wasOpened);
	Assert.assertTrue(TestReporter12.wasOpened);
	Assert.assertTrue(TestReporter13.wasOpened);
}
 
Example 15
Source Project: flink   Source File: FakeKinesisBehavioursFactory.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public GetRecordsResult getRecords(String shardIterator, int maxRecordsToGet) {
	BlockingQueue<String> queue = Preconditions.checkNotNull(this.shardIteratorToQueueMap.get(shardIterator),
	"no queue for iterator %s", shardIterator);
	List<Record> records = Collections.emptyList();
	try {
		String data = queue.take();
		Record record = new Record()
			.withData(
				ByteBuffer.wrap(data.getBytes(ConfigConstants.DEFAULT_CHARSET)))
			.withPartitionKey(UUID.randomUUID().toString())
			.withApproximateArrivalTimestamp(new Date(System.currentTimeMillis()))
			.withSequenceNumber(String.valueOf(0));
		records = Collections.singletonList(record);
	} catch (InterruptedException e) {
		shardIterator = null;
	}
	return new GetRecordsResult()
		.withRecords(records)
		.withMillisBehindLatest(0L)
		.withNextShardIterator(shardIterator);
}
 
Example 16
Source Project: flink   Source File: GPUDriver.java    License: Apache License 2.0 6 votes vote down vote up
GPUDriver(Configuration config) throws Exception {
	final String discoveryScriptPathStr = config.getString(DISCOVERY_SCRIPT_PATH);
	if (StringUtils.isNullOrWhitespaceOnly(discoveryScriptPathStr)) {
		throw new IllegalConfigurationException(
			String.format("GPU discovery script ('%s') is not configured.", ExternalResourceOptions.genericKeyWithSuffix(DISCOVERY_SCRIPT_PATH.key())));
	}

	Path discoveryScriptPath = Paths.get(discoveryScriptPathStr);
	if (!discoveryScriptPath.isAbsolute()) {
		discoveryScriptPath = Paths.get(System.getenv().getOrDefault(ConfigConstants.ENV_FLINK_HOME_DIR, "."), discoveryScriptPathStr);
	}
	discoveryScriptFile = discoveryScriptPath.toFile();

	if (!discoveryScriptFile.exists()) {
		throw new FileNotFoundException(String.format("The gpu discovery script does not exist in path %s.", discoveryScriptFile.getAbsolutePath()));
	}
	if (!discoveryScriptFile.canExecute()) {
		throw new FlinkException(String.format("The discovery script %s is not executable.", discoveryScriptFile.getAbsolutePath()));
	}

	args = config.getString(DISCOVERY_SCRIPT_ARG);
}
 
Example 17
Source Project: Flink-CEPplus   Source File: SqlDateParser.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public int parseField(byte[] bytes, int startPos, int limit, byte[] delimiter, Date reusable) {
	final int endPos = nextStringEndPos(bytes, startPos, limit, delimiter);
	if (endPos < 0) {
		return -1;
	}

	if (endPos > startPos &&
			(Character.isWhitespace(bytes[startPos]) || Character.isWhitespace(bytes[(endPos - 1)]))) {
		setErrorState(ParseErrorState.NUMERIC_VALUE_ILLEGAL_CHARACTER);
		return -1;
	}

	String str = new String(bytes, startPos, endPos - startPos, ConfigConstants.DEFAULT_CHARSET);
	try {
		this.result = Date.valueOf(str);
		return (endPos == limit) ? limit : endPos + delimiter.length;
	} catch (IllegalArgumentException e) {
		setErrorState(ParseErrorState.NUMERIC_VALUE_FORMAT_ERROR);
		return -1;
	}
}
 
Example 18
Source Project: flink   Source File: ContinuousFileProcessingITCase.java    License: Apache License 2.0 6 votes vote down vote up
/** Create a file and fill it with content. */
private Tuple2<org.apache.hadoop.fs.Path, String> fillWithData(
	String base, String fileName, int fileIdx, String sampleLine) throws IOException, InterruptedException {

	assert (hdfs != null);

	org.apache.hadoop.fs.Path tmp =
		new org.apache.hadoop.fs.Path(base + "/." + fileName + fileIdx);

	FSDataOutputStream stream = hdfs.create(tmp);
	StringBuilder str = new StringBuilder();
	for (int i = 0; i < LINES_PER_FILE; i++) {
		String line = fileIdx + ": " + sampleLine + " " + i + "\n";
		str.append(line);
		stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
	}
	stream.close();
	return new Tuple2<>(tmp, str.toString());
}
 
Example 19
Source Project: Flink-CEPplus   Source File: KvStateRequest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public byte[] serialize() {

	byte[] serializedStateName = stateName.getBytes(ConfigConstants.DEFAULT_CHARSET);

	// JobID + stateName + sizeOf(stateName) + hashCode + keyAndNamespace + sizeOf(keyAndNamespace)
	final int size =
			JobID.SIZE +
			serializedStateName.length + Integer.BYTES +
			Integer.BYTES +
			serializedKeyAndNamespace.length + Integer.BYTES;

	return ByteBuffer.allocate(size)
			.putLong(jobId.getLowerPart())
			.putLong(jobId.getUpperPart())
			.putInt(serializedStateName.length)
			.put(serializedStateName)
			.putInt(keyHashCode)
			.putInt(serializedKeyAndNamespace.length)
			.put(serializedKeyAndNamespace)
			.array();
}
 
Example 20
Source Project: Flink-CEPplus   Source File: DoubleValueParser.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public int parseField(byte[] bytes, int startPos, int limit, byte[] delimiter, DoubleValue reusable) {
	final int endPos = nextStringEndPos(bytes, startPos, limit, delimiter);
	if (endPos < 0) {
		return -1;
	}

	if (endPos > startPos &&
			(Character.isWhitespace(bytes[startPos]) || Character.isWhitespace(bytes[(endPos - 1)]))) {
		setErrorState(ParseErrorState.NUMERIC_VALUE_ILLEGAL_CHARACTER);
		return -1;
	}

	String str = new String(bytes, startPos, endPos - startPos, ConfigConstants.DEFAULT_CHARSET);
	try {
		double value = Double.parseDouble(str);
		reusable.setValue(value);
		this.result = reusable;
		return (endPos == limit) ? limit : endPos + delimiter.length;
	}
	catch (NumberFormatException e) {
		setErrorState(ParseErrorState.NUMERIC_VALUE_FORMAT_ERROR);
		return -1;
	}
}
 
Example 21
Source Project: Flink-CEPplus   Source File: DataSourceTaskTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Record readRecord(Record target, byte[] record, int offset, int numBytes) {
	
	if(this.cnt == 10) {
		throw new RuntimeException("Excpected Test Exception.");
	}
	
	this.cnt++;
	
	String line = new String(record, offset, numBytes, ConfigConstants.DEFAULT_CHARSET);
	
	try {
		this.key.setValue(Integer.parseInt(line.substring(0,line.indexOf("_"))));
		this.value.setValue(Integer.parseInt(line.substring(line.indexOf("_")+1,line.length())));
	}
	catch(RuntimeException re) {
		return null;
	}
	
	target.setField(0, this.key);
	target.setField(1, this.value);
	return target;
}
 
Example 22
Source Project: flink   Source File: MetricGroupTest.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Verifies that calling {@link AbstractMetricGroup#getLogicalScope(CharacterFilter, char, int)} on {@link GenericValueMetricGroup}
 * should ignore value as well.
 */
@Test
public void testLogicalScopeShouldIgnoreValueGroupName() throws Exception {
	Configuration config = new Configuration();
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter.class.getName());

	MetricRegistryImpl registry = new MetricRegistryImpl(MetricRegistryConfiguration.fromConfiguration(config));
	try {
		GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root");

		String key = "key";
		String value = "value";

		MetricGroup group = root.addGroup(key, value);

		String logicalScope = ((AbstractMetricGroup) group)
			.getLogicalScope(new DummyCharacterFilter(), registry.getDelimiter(), 0);
		assertThat("Key is missing from logical scope.", logicalScope, containsString(key));
		assertThat("Value is present in logical scope.", logicalScope, not(containsString(value)));
	} finally {
		registry.shutdown().get();
	}
}
 
Example 23
Source Project: flink   Source File: FileInputFormatTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testReadMultiplePatterns() throws Exception {
	final String contents = "CONTENTS";

	// create some accepted, some ignored files

	File child1 = temporaryFolder.newFile("dataFile1.txt");
	File child2 = temporaryFolder.newFile("another_file.bin");
	createTempFiles(contents.getBytes(ConfigConstants.DEFAULT_CHARSET), child1, child2);

	// test that only the valid files are accepted

	Configuration configuration = new Configuration();

	final DummyFileInputFormat format = new DummyFileInputFormat();
	format.setFilePath(temporaryFolder.getRoot().toURI().toString());
	format.configure(configuration);
	format.setFilesFilter(new GlobFilePathFilter(
		Collections.singletonList("**"),
		Arrays.asList("**/another_file.bin", "**/dataFile1.txt")
	));
	FileInputSplit[] splits = format.createInputSplits(1);

	Assert.assertEquals(0, splits.length);
}
 
Example 24
/** Create a file and fill it with content. */
private Tuple2<org.apache.hadoop.fs.Path, String> fillWithData(
	String base, String fileName, int fileIdx, String sampleLine) throws IOException, InterruptedException {

	assert (hdfs != null);

	org.apache.hadoop.fs.Path tmp =
		new org.apache.hadoop.fs.Path(base + "/." + fileName + fileIdx);

	FSDataOutputStream stream = hdfs.create(tmp);
	StringBuilder str = new StringBuilder();
	for (int i = 0; i < LINES_PER_FILE; i++) {
		String line = fileIdx + ": " + sampleLine + " " + i + "\n";
		str.append(line);
		stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
	}
	stream.close();
	return new Tuple2<>(tmp, str.toString());
}
 
Example 25
/**
 * This tests that per default the off heap memory is set to what the network buffers require.
 */
@Test
public void testOffHeapMemoryWithDefaultConfiguration() {
	Configuration conf = new Configuration();

	ContaineredTaskManagerParameters params =
		ContaineredTaskManagerParameters.create(conf, CONTAINER_MEMORY, 1);

	final float memoryCutoffRatio = conf.getFloat(
		ConfigConstants.CONTAINERIZED_HEAP_CUTOFF_RATIO,
		ConfigConstants.DEFAULT_YARN_HEAP_CUTOFF_RATIO);
	final int minCutoff = conf.getInteger(
		ConfigConstants.CONTAINERIZED_HEAP_CUTOFF_MIN,
		ConfigConstants.DEFAULT_YARN_HEAP_CUTOFF);

	long cutoff = Math.max((long) (CONTAINER_MEMORY * memoryCutoffRatio), minCutoff);
	final long networkBufMB =
		calculateNetworkBufferMemory(
			(CONTAINER_MEMORY - cutoff) << 20, // megabytes to bytes
			conf) >> 20; // bytes to megabytes
	assertEquals(networkBufMB + cutoff, params.taskManagerDirectMemoryLimitMB());
}
 
Example 26
Source Project: flink   Source File: PackagedProgramTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testExtractContainedLibraries() throws Exception {
	String s = "testExtractContainedLibraries";
	byte[] nestedJarContent = s.getBytes(ConfigConstants.DEFAULT_CHARSET);
	File fakeJar = temporaryFolder.newFile("test.jar");
	try (ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(fakeJar))) {
		ZipEntry entry = new ZipEntry("lib/internalTest.jar");
		zos.putNextEntry(entry);
		zos.write(nestedJarContent);
		zos.closeEntry();
	}

	final List<File> files = PackagedProgram.extractContainedLibraries(fakeJar.toURI().toURL());
	Assert.assertEquals(1, files.size());
	Assert.assertArrayEquals(nestedJarContent, Files.readAllBytes(files.iterator().next().toPath()));
}
 
Example 27
Source Project: flink   Source File: DataSourceTaskTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Record readRecord(Record target, byte[] record, int offset, int numBytes) {
	
	if(this.cnt == 10) {
		throw new RuntimeException("Excpected Test Exception.");
	}
	
	this.cnt++;
	
	String line = new String(record, offset, numBytes, ConfigConstants.DEFAULT_CHARSET);
	
	try {
		this.key.setValue(Integer.parseInt(line.substring(0,line.indexOf("_"))));
		this.value.setValue(Integer.parseInt(line.substring(line.indexOf("_")+1,line.length())));
	}
	catch(RuntimeException re) {
		return null;
	}
	
	target.setField(0, this.key);
	target.setField(1, this.value);
	return target;
}
 
Example 28
Source Project: Flink-CEPplus   Source File: VarLengthStringParserTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testParseInvalidQuotedStrings() {

	this.parser = new StringValueParser();
	this.parser.enableQuotedStringParsing((byte)'"');

	// check valid strings with out whitespaces and trailing delimiter
	byte[] recBytes = "\"abcdefgh\"-|\"jklmno  ".getBytes(ConfigConstants.DEFAULT_CHARSET);
	StringValue s = new StringValue();

	int startPos = 0;
	startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[] {'|'}, s);
	assertTrue(startPos < 0);

	startPos = 12;
	startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[] {'|'}, s);
	assertTrue(startPos < 0);
}
 
Example 29
Source Project: Flink-CEPplus   Source File: VarLengthStringParserTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testParseValidMixedStringsWithCharset() {

	Charset charset = StandardCharsets.US_ASCII;
	this.parser = new StringValueParser();
	this.parser.enableQuotedStringParsing((byte) '@');

	// check valid strings with out whitespaces and trailing delimiter
	byte[] recBytes = "@abcde|[email protected]|@[email protected]|jklmnopq|@[email protected]|tuv".getBytes(ConfigConstants.DEFAULT_CHARSET);
	StringValue s = new StringValue();

	int startPos = 0;
	parser.setCharset(charset);
	startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[]{'|'}, s);
	assertEquals(11, startPos);
	assertEquals("abcde|gh", s.getValue());
}
 
Example 30
Source Project: flink   Source File: FakeKinesisBehavioursFactory.java    License: Apache License 2.0 6 votes vote down vote up
public static List<Record> createRecordBatchWithRange(int min, int max) {
	List<Record> batch = new LinkedList<>();
	long	sumRecordBatchBytes = 0L;
	// Create record of size 10Kb
	String data = createDataSize(10 * 1024L);

	for (int i = min; i < max; i++) {
		Record record = new Record()
						.withData(
								ByteBuffer.wrap(String.valueOf(data).getBytes(ConfigConstants.DEFAULT_CHARSET)))
						.withPartitionKey(UUID.randomUUID().toString())
						.withApproximateArrivalTimestamp(new Date(System.currentTimeMillis()))
						.withSequenceNumber(String.valueOf(i));
		batch.add(record);
		sumRecordBatchBytes += record.getData().remaining();

	}
	if (batch.size() != 0) {
		averageRecordSizeBytes = sumRecordBatchBytes / batch.size();
	}

	return batch;
}