org.apache.flink.configuration.ConfigConstants Java Examples

The following examples show how to use org.apache.flink.configuration.ConfigConstants. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: NiFiSinkTopologyExample.java    From flink with Apache License 2.0 7 votes vote down vote up
public static void main(String[] args) throws Exception {
	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

	SiteToSiteClientConfig clientConfig = new SiteToSiteClient.Builder()
			.url("http://localhost:8080/nifi")
			.portName("Data from Flink")
			.buildConfig();

	DataStreamSink<String> dataStream = env.fromElements("one", "two", "three", "four", "five", "q")
			.addSink(new NiFiSink<>(clientConfig, new NiFiDataPacketBuilder<String>() {
				@Override
				public NiFiDataPacket createNiFiDataPacket(String s, RuntimeContext ctx) {
					return new StandardNiFiDataPacket(s.getBytes(ConfigConstants.DEFAULT_CHARSET),
						new HashMap<String, String>());
				}
			}));

	env.execute();
}
 
Example #2
Source File: DataSourceTaskTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public Record readRecord(Record target, byte[] record, int offset, int numBytes) {
	
	if(this.cnt == 10) {
		throw new RuntimeException("Excpected Test Exception.");
	}
	
	this.cnt++;
	
	String line = new String(record, offset, numBytes, ConfigConstants.DEFAULT_CHARSET);
	
	try {
		this.key.setValue(Integer.parseInt(line.substring(0,line.indexOf("_"))));
		this.value.setValue(Integer.parseInt(line.substring(line.indexOf("_")+1,line.length())));
	}
	catch(RuntimeException re) {
		return null;
	}
	
	target.setField(0, this.key);
	target.setField(1, this.value);
	return target;
}
 
Example #3
Source File: BlobsCleanupITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void setup() throws Exception {
	blobBaseDir = TEMPORARY_FOLDER.newFolder();

	Configuration cfg = new Configuration();
	cfg.setString(BlobServerOptions.STORAGE_DIRECTORY, blobBaseDir.getAbsolutePath());
	cfg.setString(ConfigConstants.RESTART_STRATEGY, "fixeddelay");
	cfg.setInteger(ConfigConstants.RESTART_STRATEGY_FIXED_DELAY_ATTEMPTS, 1);
	// BLOBs are deleted from BlobCache between 1s and 2s after last reference
	// -> the BlobCache may still have the BLOB or not (let's test both cases randomly)
	cfg.setLong(BlobServerOptions.CLEANUP_INTERVAL, 1L);

	configuration = new UnmodifiableConfiguration(cfg);

	miniClusterResource = new MiniClusterResource(new MiniClusterResourceConfiguration.Builder()
		.setNumberSlotsPerTaskManager(2)
		.setNumberTaskManagers(1)
		.setConfiguration(configuration)
		.build());

	miniClusterResource.before();
}
 
Example #4
Source File: FlinkPlanner.java    From cascading-flink with Apache License 2.0 6 votes vote down vote up
public FlinkPlanner(List<String> classPath) {
	super();
	this.classPath = classPath;

	env.getConfig().disableSysoutLogging();
	if (env.getParallelism() <= 0) {
		// load the default parallelism from config
		GlobalConfiguration.loadConfiguration(new File(CliFrontend.getConfigurationDirectoryFromEnv()).getAbsolutePath());
		org.apache.flink.configuration.Configuration configuration = GlobalConfiguration.getConfiguration();
		int parallelism = configuration.getInteger(ConfigConstants.DEFAULT_PARALLELISM_KEY, -1);
		if (parallelism <= 0) {
			throw new RuntimeException("Please set the default parallelism via the -p command-line flag");
		} else {
			env.setParallelism(parallelism);
		}
	}

}
 
Example #5
Source File: RocksDBOperationUtils.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a column descriptor for sate column family.
 *
 * <p>Sets TTL compaction filter if {@code ttlCompactFiltersManager} is not {@code null}.
 */
public static ColumnFamilyDescriptor createColumnFamilyDescriptor(
	RegisteredStateMetaInfoBase metaInfoBase,
	Function<String, ColumnFamilyOptions> columnFamilyOptionsFactory,
	@Nullable RocksDbTtlCompactFiltersManager ttlCompactFiltersManager) {

	ColumnFamilyOptions options = createColumnFamilyOptions(columnFamilyOptionsFactory, metaInfoBase.getName());
	if (ttlCompactFiltersManager != null) {
		ttlCompactFiltersManager.setAndRegisterCompactFilterIfStateTtl(metaInfoBase, options);
	}
	byte[] nameBytes = metaInfoBase.getName().getBytes(ConfigConstants.DEFAULT_CHARSET);
	Preconditions.checkState(!Arrays.equals(RocksDB.DEFAULT_COLUMN_FAMILY, nameBytes),
		"The chosen state name 'default' collides with the name of the default column family!");

	return new ColumnFamilyDescriptor(nameBytes, options);
}
 
Example #6
Source File: SavepointMigrationTestBase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private Configuration getConfiguration() throws Exception {
	// Flink configuration
	final Configuration config = new Configuration();

	config.setInteger(ConfigConstants.LOCAL_NUMBER_TASK_MANAGER, 1);
	config.setInteger(TaskManagerOptions.NUM_TASK_SLOTS, DEFAULT_PARALLELISM);

	UUID id = UUID.randomUUID();
	final File checkpointDir = TEMP_FOLDER.newFolder("checkpoints_" + id).getAbsoluteFile();
	final File savepointDir = TEMP_FOLDER.newFolder("savepoints_" + id).getAbsoluteFile();

	if (!checkpointDir.exists() || !savepointDir.exists()) {
		throw new Exception("Test setup failed: failed to create (temporary) directories.");
	}

	LOG.info("Created temporary checkpoint directory: " + checkpointDir + ".");
	LOG.info("Created savepoint directory: " + savepointDir + ".");

	config.setString(CheckpointingOptions.STATE_BACKEND, "memory");
	config.setString(CheckpointingOptions.CHECKPOINTS_DIRECTORY, checkpointDir.toURI().toString());
	config.setInteger(CheckpointingOptions.FS_SMALL_FILE_THRESHOLD, 0);
	config.setString(CheckpointingOptions.SAVEPOINT_DIRECTORY, savepointDir.toURI().toString());

	return config;
}
 
Example #7
Source File: ContinuousFileProcessingCheckpointITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Fill the file with content and put the content in the {@code hdPathContents} list.
 * */
private Tuple2<Path, String> fillWithData(
	String base, String fileName, int fileIdx, String sampleLine) throws IOException, InterruptedException {

	assert (localFs != null);

	org.apache.hadoop.fs.Path tmp =
		new org.apache.hadoop.fs.Path(base + "/." + fileName + fileIdx);

	FSDataOutputStream stream = localFs.create(tmp);
	StringBuilder str = new StringBuilder();
	for (int i = 0; i < LINES_PER_FILE; i++) {
		String line = fileIdx + ": " + sampleLine + " " + i + "\n";
		str.append(line);
		stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
	}
	stream.close();
	return new Tuple2<>(tmp, str.toString());
}
 
Example #8
Source File: ReporterSetupTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Verifies that factory/reflection approaches can be mixed freely.
 */
@Test
public void testMixedSetupsFactoryParsing() throws Exception {
	final Configuration config = new Configuration();
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test1." + ConfigConstants.METRICS_REPORTER_FACTORY_CLASS_SUFFIX, InstantiationTypeTrackingTestReporterFactory.class.getName());
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test2." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, InstantiationTypeTrackingTestReporter.class.getName());

	final List<ReporterSetup> reporterSetups = ReporterSetup.fromConfiguration(config);

	assertEquals(2, reporterSetups.size());

	final ReporterSetup reporterSetup1 = reporterSetups.get(0);
	final ReporterSetup reporterSetup2 = reporterSetups.get(1);

	final InstantiationTypeTrackingTestReporter metricReporter1 = (InstantiationTypeTrackingTestReporter) reporterSetup1.getReporter();
	final InstantiationTypeTrackingTestReporter metricReporter2 = (InstantiationTypeTrackingTestReporter) reporterSetup2.getReporter();

	assertTrue(metricReporter1.createdByFactory ^ metricReporter2.createdByFactory);
}
 
Example #9
Source File: ZooKeeperMesosWorkerStore.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Get the persisted framework ID.
 * @return the current ID or empty if none is yet persisted.
 * @throws Exception on ZK failures, interruptions.
 */
@Override
public Option<Protos.FrameworkID> getFrameworkID() throws Exception {
	synchronized (startStopLock) {
		verifyIsRunning();

		Option<Protos.FrameworkID> frameworkID;
		byte[] value = frameworkIdInZooKeeper.getValue();
		if (value.length == 0) {
			frameworkID = Option.empty();
		} else {
			frameworkID = Option.apply(Protos.FrameworkID.newBuilder().setValue(new String(value,
				ConfigConstants.DEFAULT_CHARSET)).build());
		}

		return frameworkID;
	}
}
 
Example #10
Source File: ReporterSetupTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Verifies that multiple reporters are instantiated correctly.
 */
@Test
public void testMultipleReporterInstantiation() throws Exception {
	Configuration config = new Configuration();

	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test1." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter11.class.getName());
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test2." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter12.class.getName());
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test3." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter13.class.getName());

	List<ReporterSetup> reporterSetups = ReporterSetup.fromConfiguration(config);

	assertEquals(3, reporterSetups.size());

	Assert.assertTrue(TestReporter11.wasOpened);
	Assert.assertTrue(TestReporter12.wasOpened);
	Assert.assertTrue(TestReporter13.wasOpened);
}
 
Example #11
Source File: FakeKinesisBehavioursFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public GetRecordsResult getRecords(String shardIterator, int maxRecordsToGet) {
	BlockingQueue<String> queue = Preconditions.checkNotNull(this.shardIteratorToQueueMap.get(shardIterator),
	"no queue for iterator %s", shardIterator);
	List<Record> records = Collections.emptyList();
	try {
		String data = queue.take();
		Record record = new Record()
			.withData(
				ByteBuffer.wrap(data.getBytes(ConfigConstants.DEFAULT_CHARSET)))
			.withPartitionKey(UUID.randomUUID().toString())
			.withApproximateArrivalTimestamp(new Date(System.currentTimeMillis()))
			.withSequenceNumber(String.valueOf(0));
		records = Collections.singletonList(record);
	} catch (InterruptedException e) {
		shardIterator = null;
	}
	return new GetRecordsResult()
		.withRecords(records)
		.withMillisBehindLatest(0L)
		.withNextShardIterator(shardIterator);
}
 
Example #12
Source File: GPUDriver.java    From flink with Apache License 2.0 6 votes vote down vote up
GPUDriver(Configuration config) throws Exception {
	final String discoveryScriptPathStr = config.getString(DISCOVERY_SCRIPT_PATH);
	if (StringUtils.isNullOrWhitespaceOnly(discoveryScriptPathStr)) {
		throw new IllegalConfigurationException(
			String.format("GPU discovery script ('%s') is not configured.", ExternalResourceOptions.genericKeyWithSuffix(DISCOVERY_SCRIPT_PATH.key())));
	}

	Path discoveryScriptPath = Paths.get(discoveryScriptPathStr);
	if (!discoveryScriptPath.isAbsolute()) {
		discoveryScriptPath = Paths.get(System.getenv().getOrDefault(ConfigConstants.ENV_FLINK_HOME_DIR, "."), discoveryScriptPathStr);
	}
	discoveryScriptFile = discoveryScriptPath.toFile();

	if (!discoveryScriptFile.exists()) {
		throw new FileNotFoundException(String.format("The gpu discovery script does not exist in path %s.", discoveryScriptFile.getAbsolutePath()));
	}
	if (!discoveryScriptFile.canExecute()) {
		throw new FlinkException(String.format("The discovery script %s is not executable.", discoveryScriptFile.getAbsolutePath()));
	}

	args = config.getString(DISCOVERY_SCRIPT_ARG);
}
 
Example #13
Source File: CliFrontend.java    From flink with Apache License 2.0 6 votes vote down vote up
public static String getConfigurationDirectoryFromEnv() {
	String location = System.getenv(ConfigConstants.ENV_FLINK_CONF_DIR);

	if (location != null) {
		if (new File(location).exists()) {
			return location;
		}
		else {
			throw new RuntimeException("The configuration directory '" + location + "', specified in the '" +
				ConfigConstants.ENV_FLINK_CONF_DIR + "' environment variable, does not exist.");
		}
	}
	else if (new File(CONFIG_DIRECTORY_FALLBACK_1).exists()) {
		location = CONFIG_DIRECTORY_FALLBACK_1;
	}
	else if (new File(CONFIG_DIRECTORY_FALLBACK_2).exists()) {
		location = CONFIG_DIRECTORY_FALLBACK_2;
	}
	else {
		throw new RuntimeException("The configuration directory was not specified. " +
				"Please specify the directory containing the configuration file through the '" +
			ConfigConstants.ENV_FLINK_CONF_DIR + "' environment variable.");
	}
	return location;
}
 
Example #14
Source File: ContinuousFileProcessingITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
/** Create a file and fill it with content. */
private Tuple2<org.apache.hadoop.fs.Path, String> fillWithData(
	String base, String fileName, int fileIdx, String sampleLine) throws IOException, InterruptedException {

	assert (hdfs != null);

	org.apache.hadoop.fs.Path tmp =
		new org.apache.hadoop.fs.Path(base + "/." + fileName + fileIdx);

	FSDataOutputStream stream = hdfs.create(tmp);
	StringBuilder str = new StringBuilder();
	for (int i = 0; i < LINES_PER_FILE; i++) {
		String line = fileIdx + ": " + sampleLine + " " + i + "\n";
		str.append(line);
		stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
	}
	stream.close();
	return new Tuple2<>(tmp, str.toString());
}
 
Example #15
Source File: KvStateRequest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public byte[] serialize() {

	byte[] serializedStateName = stateName.getBytes(ConfigConstants.DEFAULT_CHARSET);

	// JobID + stateName + sizeOf(stateName) + hashCode + keyAndNamespace + sizeOf(keyAndNamespace)
	final int size =
			JobID.SIZE +
			serializedStateName.length + Integer.BYTES +
			Integer.BYTES +
			serializedKeyAndNamespace.length + Integer.BYTES;

	return ByteBuffer.allocate(size)
			.putLong(jobId.getLowerPart())
			.putLong(jobId.getUpperPart())
			.putInt(serializedStateName.length)
			.put(serializedStateName)
			.putInt(keyHashCode)
			.putInt(serializedKeyAndNamespace.length)
			.put(serializedKeyAndNamespace)
			.array();
}
 
Example #16
Source File: DoubleValueParser.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public int parseField(byte[] bytes, int startPos, int limit, byte[] delimiter, DoubleValue reusable) {
	final int endPos = nextStringEndPos(bytes, startPos, limit, delimiter);
	if (endPos < 0) {
		return -1;
	}

	if (endPos > startPos &&
			(Character.isWhitespace(bytes[startPos]) || Character.isWhitespace(bytes[(endPos - 1)]))) {
		setErrorState(ParseErrorState.NUMERIC_VALUE_ILLEGAL_CHARACTER);
		return -1;
	}

	String str = new String(bytes, startPos, endPos - startPos, ConfigConstants.DEFAULT_CHARSET);
	try {
		double value = Double.parseDouble(str);
		reusable.setValue(value);
		this.result = reusable;
		return (endPos == limit) ? limit : endPos + delimiter.length;
	}
	catch (NumberFormatException e) {
		setErrorState(ParseErrorState.NUMERIC_VALUE_FORMAT_ERROR);
		return -1;
	}
}
 
Example #17
Source File: SqlDateParser.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public int parseField(byte[] bytes, int startPos, int limit, byte[] delimiter, Date reusable) {
	final int endPos = nextStringEndPos(bytes, startPos, limit, delimiter);
	if (endPos < 0) {
		return -1;
	}

	if (endPos > startPos &&
			(Character.isWhitespace(bytes[startPos]) || Character.isWhitespace(bytes[(endPos - 1)]))) {
		setErrorState(ParseErrorState.NUMERIC_VALUE_ILLEGAL_CHARACTER);
		return -1;
	}

	String str = new String(bytes, startPos, endPos - startPos, ConfigConstants.DEFAULT_CHARSET);
	try {
		this.result = Date.valueOf(str);
		return (endPos == limit) ? limit : endPos + delimiter.length;
	} catch (IllegalArgumentException e) {
		setErrorState(ParseErrorState.NUMERIC_VALUE_FORMAT_ERROR);
		return -1;
	}
}
 
Example #18
Source File: MetricGroupTest.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Verifies that calling {@link AbstractMetricGroup#getLogicalScope(CharacterFilter, char, int)} on {@link GenericValueMetricGroup}
 * should ignore value as well.
 */
@Test
public void testLogicalScopeShouldIgnoreValueGroupName() throws Exception {
	Configuration config = new Configuration();
	config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter.class.getName());

	MetricRegistryImpl registry = new MetricRegistryImpl(MetricRegistryConfiguration.fromConfiguration(config));
	try {
		GenericMetricGroup root = new GenericMetricGroup(registry, new DummyAbstractMetricGroup(registry), "root");

		String key = "key";
		String value = "value";

		MetricGroup group = root.addGroup(key, value);

		String logicalScope = ((AbstractMetricGroup) group)
			.getLogicalScope(new DummyCharacterFilter(), registry.getDelimiter(), 0);
		assertThat("Key is missing from logical scope.", logicalScope, containsString(key));
		assertThat("Value is present in logical scope.", logicalScope, not(containsString(value)));
	} finally {
		registry.shutdown().get();
	}
}
 
Example #19
Source File: FileInputFormatTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testReadMultiplePatterns() throws Exception {
	final String contents = "CONTENTS";

	// create some accepted, some ignored files

	File child1 = temporaryFolder.newFile("dataFile1.txt");
	File child2 = temporaryFolder.newFile("another_file.bin");
	createTempFiles(contents.getBytes(ConfigConstants.DEFAULT_CHARSET), child1, child2);

	// test that only the valid files are accepted

	Configuration configuration = new Configuration();

	final DummyFileInputFormat format = new DummyFileInputFormat();
	format.setFilePath(temporaryFolder.getRoot().toURI().toString());
	format.configure(configuration);
	format.setFilesFilter(new GlobFilePathFilter(
		Collections.singletonList("**"),
		Arrays.asList("**/another_file.bin", "**/dataFile1.txt")
	));
	FileInputSplit[] splits = format.createInputSplits(1);

	Assert.assertEquals(0, splits.length);
}
 
Example #20
Source File: ContinuousFileProcessingITCase.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/** Create a file and fill it with content. */
private Tuple2<org.apache.hadoop.fs.Path, String> fillWithData(
	String base, String fileName, int fileIdx, String sampleLine) throws IOException, InterruptedException {

	assert (hdfs != null);

	org.apache.hadoop.fs.Path tmp =
		new org.apache.hadoop.fs.Path(base + "/." + fileName + fileIdx);

	FSDataOutputStream stream = hdfs.create(tmp);
	StringBuilder str = new StringBuilder();
	for (int i = 0; i < LINES_PER_FILE; i++) {
		String line = fileIdx + ": " + sampleLine + " " + i + "\n";
		str.append(line);
		stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
	}
	stream.close();
	return new Tuple2<>(tmp, str.toString());
}
 
Example #21
Source File: ContaineredTaskManagerParametersTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * This tests that per default the off heap memory is set to what the network buffers require.
 */
@Test
public void testOffHeapMemoryWithDefaultConfiguration() {
	Configuration conf = new Configuration();

	ContaineredTaskManagerParameters params =
		ContaineredTaskManagerParameters.create(conf, CONTAINER_MEMORY, 1);

	final float memoryCutoffRatio = conf.getFloat(
		ConfigConstants.CONTAINERIZED_HEAP_CUTOFF_RATIO,
		ConfigConstants.DEFAULT_YARN_HEAP_CUTOFF_RATIO);
	final int minCutoff = conf.getInteger(
		ConfigConstants.CONTAINERIZED_HEAP_CUTOFF_MIN,
		ConfigConstants.DEFAULT_YARN_HEAP_CUTOFF);

	long cutoff = Math.max((long) (CONTAINER_MEMORY * memoryCutoffRatio), minCutoff);
	final long networkBufMB =
		calculateNetworkBufferMemory(
			(CONTAINER_MEMORY - cutoff) << 20, // megabytes to bytes
			conf) >> 20; // bytes to megabytes
	assertEquals(networkBufMB + cutoff, params.taskManagerDirectMemoryLimitMB());
}
 
Example #22
Source File: PackagedProgramTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testExtractContainedLibraries() throws Exception {
	String s = "testExtractContainedLibraries";
	byte[] nestedJarContent = s.getBytes(ConfigConstants.DEFAULT_CHARSET);
	File fakeJar = temporaryFolder.newFile("test.jar");
	try (ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(fakeJar))) {
		ZipEntry entry = new ZipEntry("lib/internalTest.jar");
		zos.putNextEntry(entry);
		zos.write(nestedJarContent);
		zos.closeEntry();
	}

	final List<File> files = PackagedProgram.extractContainedLibraries(fakeJar.toURI().toURL());
	Assert.assertEquals(1, files.size());
	Assert.assertArrayEquals(nestedJarContent, Files.readAllBytes(files.iterator().next().toPath()));
}
 
Example #23
Source File: DataSourceTaskTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public Record readRecord(Record target, byte[] record, int offset, int numBytes) {
	
	if(this.cnt == 10) {
		throw new RuntimeException("Excpected Test Exception.");
	}
	
	this.cnt++;
	
	String line = new String(record, offset, numBytes, ConfigConstants.DEFAULT_CHARSET);
	
	try {
		this.key.setValue(Integer.parseInt(line.substring(0,line.indexOf("_"))));
		this.value.setValue(Integer.parseInt(line.substring(line.indexOf("_")+1,line.length())));
	}
	catch(RuntimeException re) {
		return null;
	}
	
	target.setField(0, this.key);
	target.setField(1, this.value);
	return target;
}
 
Example #24
Source File: VarLengthStringParserTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseInvalidQuotedStrings() {

	this.parser = new StringValueParser();
	this.parser.enableQuotedStringParsing((byte)'"');

	// check valid strings with out whitespaces and trailing delimiter
	byte[] recBytes = "\"abcdefgh\"-|\"jklmno  ".getBytes(ConfigConstants.DEFAULT_CHARSET);
	StringValue s = new StringValue();

	int startPos = 0;
	startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[] {'|'}, s);
	assertTrue(startPos < 0);

	startPos = 12;
	startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[] {'|'}, s);
	assertTrue(startPos < 0);
}
 
Example #25
Source File: VarLengthStringParserTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseValidMixedStringsWithCharset() {

	Charset charset = StandardCharsets.US_ASCII;
	this.parser = new StringValueParser();
	this.parser.enableQuotedStringParsing((byte) '@');

	// check valid strings with out whitespaces and trailing delimiter
	byte[] recBytes = "@abcde|gh@|@i@|jklmnopq|@rs@|tuv".getBytes(ConfigConstants.DEFAULT_CHARSET);
	StringValue s = new StringValue();

	int startPos = 0;
	parser.setCharset(charset);
	startPos = parser.parseField(recBytes, startPos, recBytes.length, new byte[]{'|'}, s);
	assertEquals(11, startPos);
	assertEquals("abcde|gh", s.getValue());
}
 
Example #26
Source File: FakeKinesisBehavioursFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
public static List<Record> createRecordBatchWithRange(int min, int max) {
	List<Record> batch = new LinkedList<>();
	long	sumRecordBatchBytes = 0L;
	// Create record of size 10Kb
	String data = createDataSize(10 * 1024L);

	for (int i = min; i < max; i++) {
		Record record = new Record()
						.withData(
								ByteBuffer.wrap(String.valueOf(data).getBytes(ConfigConstants.DEFAULT_CHARSET)))
						.withPartitionKey(UUID.randomUUID().toString())
						.withApproximateArrivalTimestamp(new Date(System.currentTimeMillis()))
						.withSequenceNumber(String.valueOf(i));
		batch.add(record);
		sumRecordBatchBytes += record.getData().remaining();

	}
	if (batch.size() != 0) {
		averageRecordSizeBytes = sumRecordBatchBytes / batch.size();
	}

	return batch;
}
 
Example #27
Source File: ContinuousFileProcessingITCase.java    From flink with Apache License 2.0 6 votes vote down vote up
/** Create a file and fill it with content. */
private Tuple2<org.apache.hadoop.fs.Path, String> fillWithData(
	String base, String fileName, int fileIdx, String sampleLine) throws IOException, InterruptedException {

	assert (hdfs != null);

	org.apache.hadoop.fs.Path tmp =
		new org.apache.hadoop.fs.Path(base + "/." + fileName + fileIdx);

	FSDataOutputStream stream = hdfs.create(tmp);
	StringBuilder str = new StringBuilder();
	for (int i = 0; i < LINES_PER_FILE; i++) {
		String line = fileIdx + ": " + sampleLine + " " + i + "\n";
		str.append(line);
		stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET));
	}
	stream.close();
	return new Tuple2<>(tmp, str.toString());
}
 
Example #28
Source File: CsvInputFormatTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseStringErrors() throws Exception {
	StringParser stringParser = new StringParser();
	stringParser.enableQuotedStringParsing((byte) '"');

	Object[][] failures = {
			{"\"string\" trailing", FieldParser.ParseErrorState.UNQUOTED_CHARS_AFTER_QUOTED_STRING},
			{"\"unterminated ", FieldParser.ParseErrorState.UNTERMINATED_QUOTED_STRING}
	};

	for (Object[] failure : failures) {
		String input = (String) failure[0];

		int result = stringParser.parseField(input.getBytes(ConfigConstants.DEFAULT_CHARSET), 0,
			input.length(), new byte[]{'|'}, null);

		assertThat(result, is(-1));
		assertThat(stringParser.getErrorState(), is(failure[1]));
	}

}
 
Example #29
Source File: HDFSTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Before
public void createHDFS() {
	try {
		Configuration hdConf = new Configuration();

		File baseDir = new File("./target/hdfs/hdfsTest").getAbsoluteFile();
		FileUtil.fullyDelete(baseDir);
		hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
		MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
		hdfsCluster = builder.build();

		hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() + "/";

		hdPath = new org.apache.hadoop.fs.Path("/test");
		hdfs = hdPath.getFileSystem(hdConf);
		FSDataOutputStream stream = hdfs.create(hdPath);
		for (int i = 0; i < 10; i++) {
			stream.write("Hello HDFS\n".getBytes(ConfigConstants.DEFAULT_CHARSET));
		}
		stream.close();

	} catch (Throwable e) {
		e.printStackTrace();
		Assert.fail("Test failed " + e.getMessage());
	}
}
 
Example #30
Source File: HDFSTest.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Before
public void createHDFS() {
	try {
		Configuration hdConf = new Configuration();

		File baseDir = new File("./target/hdfs/hdfsTest").getAbsoluteFile();
		FileUtil.fullyDelete(baseDir);
		hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
		MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
		hdfsCluster = builder.build();

		hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() + "/";

		hdPath = new org.apache.hadoop.fs.Path("/test");
		hdfs = hdPath.getFileSystem(hdConf);
		FSDataOutputStream stream = hdfs.create(hdPath);
		for (int i = 0; i < 10; i++) {
			stream.write("Hello HDFS\n".getBytes(ConfigConstants.DEFAULT_CHARSET));
		}
		stream.close();

	} catch (Throwable e) {
		e.printStackTrace();
		Assert.fail("Test failed " + e.getMessage());
	}
}