Java Code Examples for org.apache.flink.streaming.connectors.fs.StringWriter
The following examples show how to use
org.apache.flink.streaming.connectors.fs.StringWriter. These examples are extracted from open source projects.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: Flink-CEPplus Source File: BucketingSinkTest.java License: Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<String, Object> createRescalingTestSink( File outDir, int totalParallelism, int taskIdx, long inactivityInterval) throws Exception { BucketingSink<String> sink = new BucketingSink<String>(outDir.getAbsolutePath()) .setBucketer(new Bucketer<String>() { private static final long serialVersionUID = 1L; @Override public Path getBucketPath(Clock clock, Path basePath, String element) { return new Path(basePath, element); } }) .setWriter(new StringWriter<String>()) .setInactiveBucketCheckInterval(inactivityInterval) .setInactiveBucketThreshold(inactivityInterval) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX); return createTestSink(sink, totalParallelism, taskIdx); }
Example 2
Source Project: Flink-CEPplus Source File: BucketingSinkTest.java License: Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<String, Object> createTestSink(File dataDir, int totalParallelism, int taskIdx) throws Exception { BucketingSink<String> sink = new BucketingSink<String>(dataDir.getAbsolutePath()) .setBucketer(new Bucketer<String>() { private static final long serialVersionUID = 1L; @Override public Path getBucketPath(Clock clock, Path basePath, String element) { return new Path(basePath, element); } }) .setWriter(new StringWriter<String>()) .setPartPrefix(PART_PREFIX) .setPendingPrefix("") .setInactiveBucketCheckInterval(5 * 60 * 1000L) .setInactiveBucketThreshold(5 * 60 * 1000L) .setPendingSuffix(PENDING_SUFFIX) .setInProgressSuffix(IN_PROGRESS_SUFFIX); return createTestSink(sink, totalParallelism, taskIdx); }
Example 3
Source Project: Flink-CEPplus Source File: BucketingSinkTest.java License: Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<String, Object> createRescalingTestSinkWithRollover( File outDir, int totalParallelism, int taskIdx, long inactivityInterval, long rolloverInterval) throws Exception { BucketingSink<String> sink = new BucketingSink<String>(outDir.getAbsolutePath()) .setBucketer(new Bucketer<String>() { private static final long serialVersionUID = 1L; @Override public Path getBucketPath(Clock clock, Path basePath, String element) { return new Path(basePath, element); } }) .setWriter(new StringWriter<String>()) .setInactiveBucketCheckInterval(inactivityInterval) .setInactiveBucketThreshold(inactivityInterval) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX) .setBatchRolloverInterval(rolloverInterval); return createTestSink(sink, totalParallelism, taskIdx); }
Example 4
Source Project: flink Source File: BucketingSinkTest.java License: Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<String, Object> createRescalingTestSink( File outDir, int totalParallelism, int taskIdx, long inactivityInterval) throws Exception { BucketingSink<String> sink = new BucketingSink<String>(outDir.getAbsolutePath()) .setBucketer(new Bucketer<String>() { private static final long serialVersionUID = 1L; @Override public Path getBucketPath(Clock clock, Path basePath, String element) { return new Path(basePath, element); } }) .setWriter(new StringWriter<String>()) .setInactiveBucketCheckInterval(inactivityInterval) .setInactiveBucketThreshold(inactivityInterval) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX); return createTestSink(sink, totalParallelism, taskIdx); }
Example 5
Source Project: flink Source File: BucketingSinkTest.java License: Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<String, Object> createTestSink(File dataDir, int totalParallelism, int taskIdx) throws Exception { BucketingSink<String> sink = new BucketingSink<String>(dataDir.getAbsolutePath()) .setBucketer(new Bucketer<String>() { private static final long serialVersionUID = 1L; @Override public Path getBucketPath(Clock clock, Path basePath, String element) { return new Path(basePath, element); } }) .setWriter(new StringWriter<String>()) .setPartPrefix(PART_PREFIX) .setPendingPrefix("") .setInactiveBucketCheckInterval(5 * 60 * 1000L) .setInactiveBucketThreshold(5 * 60 * 1000L) .setPendingSuffix(PENDING_SUFFIX) .setInProgressSuffix(IN_PROGRESS_SUFFIX); return createTestSink(sink, totalParallelism, taskIdx); }
Example 6
Source Project: flink Source File: BucketingSinkTest.java License: Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<String, Object> createRescalingTestSinkWithRollover( File outDir, int totalParallelism, int taskIdx, long inactivityInterval, long rolloverInterval) throws Exception { BucketingSink<String> sink = new BucketingSink<String>(outDir.getAbsolutePath()) .setBucketer(new Bucketer<String>() { private static final long serialVersionUID = 1L; @Override public Path getBucketPath(Clock clock, Path basePath, String element) { return new Path(basePath, element); } }) .setWriter(new StringWriter<String>()) .setInactiveBucketCheckInterval(inactivityInterval) .setInactiveBucketThreshold(inactivityInterval) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX) .setBatchRolloverInterval(rolloverInterval); return createTestSink(sink, totalParallelism, taskIdx); }
Example 7
Source Project: flink Source File: BucketingSinkTest.java License: Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<String, Object> createRescalingTestSink( File outDir, int totalParallelism, int taskIdx, long inactivityInterval) throws Exception { BucketingSink<String> sink = new BucketingSink<String>(outDir.getAbsolutePath()) .setBucketer(new Bucketer<String>() { private static final long serialVersionUID = 1L; @Override public Path getBucketPath(Clock clock, Path basePath, String element) { return new Path(basePath, element); } }) .setWriter(new StringWriter<String>()) .setInactiveBucketCheckInterval(inactivityInterval) .setInactiveBucketThreshold(inactivityInterval) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX); return createTestSink(sink, totalParallelism, taskIdx); }
Example 8
Source Project: flink Source File: BucketingSinkTest.java License: Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<String, Object> createTestSink(File dataDir, int totalParallelism, int taskIdx) throws Exception { BucketingSink<String> sink = new BucketingSink<String>(dataDir.getAbsolutePath()) .setBucketer(new Bucketer<String>() { private static final long serialVersionUID = 1L; @Override public Path getBucketPath(Clock clock, Path basePath, String element) { return new Path(basePath, element); } }) .setWriter(new StringWriter<String>()) .setPartPrefix(PART_PREFIX) .setPendingPrefix("") .setInactiveBucketCheckInterval(5 * 60 * 1000L) .setInactiveBucketThreshold(5 * 60 * 1000L) .setPendingSuffix(PENDING_SUFFIX) .setInProgressSuffix(IN_PROGRESS_SUFFIX); return createTestSink(sink, totalParallelism, taskIdx); }
Example 9
Source Project: flink Source File: BucketingSinkTest.java License: Apache License 2.0 | 6 votes |
private OneInputStreamOperatorTestHarness<String, Object> createRescalingTestSinkWithRollover( File outDir, int totalParallelism, int taskIdx, long inactivityInterval, long rolloverInterval) throws Exception { BucketingSink<String> sink = new BucketingSink<String>(outDir.getAbsolutePath()) .setBucketer(new Bucketer<String>() { private static final long serialVersionUID = 1L; @Override public Path getBucketPath(Clock clock, Path basePath, String element) { return new Path(basePath, element); } }) .setWriter(new StringWriter<String>()) .setInactiveBucketCheckInterval(inactivityInterval) .setInactiveBucketThreshold(inactivityInterval) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX) .setBatchRolloverInterval(rolloverInterval); return createTestSink(sink, totalParallelism, taskIdx); }
Example 10
Source Project: Flink-CEPplus Source File: BucketingSinkMigrationTest.java License: Apache License 2.0 | 5 votes |
/** * Manually run this to write binary snapshot data. Remove @Ignore to run. */ @Ignore @Test public void writeSnapshot() throws Exception { final File outDir = tempFolder.newFolder(); BucketingSink<String> sink = new BucketingSink<String>(outDir.getAbsolutePath()) .setWriter(new StringWriter<String>()) .setBatchSize(5) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX); OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink)); testHarness.setup(); testHarness.open(); testHarness.processElement(new StreamRecord<>("test1", 0L)); testHarness.processElement(new StreamRecord<>("test2", 0L)); checkLocalFs(outDir, 1, 1, 0, 0); testHarness.processElement(new StreamRecord<>("test3", 0L)); testHarness.processElement(new StreamRecord<>("test4", 0L)); testHarness.processElement(new StreamRecord<>("test5", 0L)); checkLocalFs(outDir, 1, 4, 0, 0); OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L); OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/bucketing-sink-migration-test-flink" + flinkGenerateSavepointVersion + "-snapshot"); testHarness.close(); }
Example 11
Source Project: Flink-CEPplus Source File: BucketingSinkMigrationTest.java License: Apache License 2.0 | 5 votes |
@Test public void testRestore() throws Exception { final File outDir = tempFolder.newFolder(); ValidatingBucketingSink<String> sink = (ValidatingBucketingSink<String>) new ValidatingBucketingSink<String>(outDir.getAbsolutePath(), expectedBucketFilesPrefix) .setWriter(new StringWriter<String>()) .setBatchSize(5) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX) .setUseTruncate(false); // don't use truncate because files do not exist OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>( new StreamSink<>(sink), 10, 1, 0); testHarness.setup(); testHarness.initializeState( OperatorSnapshotUtil.getResourceFilename( "bucketing-sink-migration-test-flink" + testMigrateVersion + "-snapshot")); testHarness.open(); assertTrue(sink.initializeCalled); testHarness.processElement(new StreamRecord<>("test1", 0L)); testHarness.processElement(new StreamRecord<>("test2", 0L)); checkLocalFs(outDir, 1, 1, 0, 0); testHarness.close(); }
Example 12
Source Project: flink Source File: BucketingSinkMigrationTest.java License: Apache License 2.0 | 5 votes |
/** * Manually run this to write binary snapshot data. Remove @Ignore to run. */ @Ignore @Test public void writeSnapshot() throws Exception { final File outDir = tempFolder.newFolder(); BucketingSink<String> sink = new BucketingSink<String>(outDir.getAbsolutePath()) .setWriter(new StringWriter<String>()) .setBatchSize(5) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX); OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink)); testHarness.setup(); testHarness.open(); testHarness.processElement(new StreamRecord<>("test1", 0L)); testHarness.processElement(new StreamRecord<>("test2", 0L)); checkLocalFs(outDir, 1, 1, 0, 0); testHarness.processElement(new StreamRecord<>("test3", 0L)); testHarness.processElement(new StreamRecord<>("test4", 0L)); testHarness.processElement(new StreamRecord<>("test5", 0L)); checkLocalFs(outDir, 1, 4, 0, 0); OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L); OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/bucketing-sink-migration-test-flink" + flinkGenerateSavepointVersion + "-snapshot"); testHarness.close(); }
Example 13
Source Project: flink Source File: BucketingSinkMigrationTest.java License: Apache License 2.0 | 5 votes |
@Test public void testRestore() throws Exception { final File outDir = tempFolder.newFolder(); ValidatingBucketingSink<String> sink = (ValidatingBucketingSink<String>) new ValidatingBucketingSink<String>(outDir.getAbsolutePath(), expectedBucketFilesPrefix) .setWriter(new StringWriter<String>()) .setBatchSize(5) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX) .setUseTruncate(false); // don't use truncate because files do not exist OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>( new StreamSink<>(sink), 10, 1, 0); testHarness.setup(); testHarness.initializeState( OperatorSnapshotUtil.getResourceFilename( "bucketing-sink-migration-test-flink" + testMigrateVersion + "-snapshot")); testHarness.open(); assertTrue(sink.initializeCalled); testHarness.processElement(new StreamRecord<>("test1", 0L)); testHarness.processElement(new StreamRecord<>("test2", 0L)); checkLocalFs(outDir, 1, 1, 0, 0); testHarness.close(); }
Example 14
Source Project: flink Source File: BucketingSinkMigrationTest.java License: Apache License 2.0 | 5 votes |
/** * Manually run this to write binary snapshot data. Remove @Ignore to run. */ @Ignore @Test public void writeSnapshot() throws Exception { final File outDir = tempFolder.newFolder(); BucketingSink<String> sink = new BucketingSink<String>(outDir.getAbsolutePath()) .setWriter(new StringWriter<String>()) .setBatchSize(5) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX); OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>(new StreamSink<>(sink)); testHarness.setup(); testHarness.open(); testHarness.processElement(new StreamRecord<>("test1", 0L)); testHarness.processElement(new StreamRecord<>("test2", 0L)); checkLocalFs(outDir, 1, 1, 0, 0); testHarness.processElement(new StreamRecord<>("test3", 0L)); testHarness.processElement(new StreamRecord<>("test4", 0L)); testHarness.processElement(new StreamRecord<>("test5", 0L)); checkLocalFs(outDir, 1, 4, 0, 0); OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L); OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/bucketing-sink-migration-test-flink" + flinkGenerateSavepointVersion + "-snapshot"); testHarness.close(); }
Example 15
Source Project: flink Source File: BucketingSinkMigrationTest.java License: Apache License 2.0 | 5 votes |
@Test public void testRestore() throws Exception { final File outDir = tempFolder.newFolder(); ValidatingBucketingSink<String> sink = (ValidatingBucketingSink<String>) new ValidatingBucketingSink<String>(outDir.getAbsolutePath(), expectedBucketFilesPrefix) .setWriter(new StringWriter<String>()) .setBatchSize(5) .setPartPrefix(PART_PREFIX) .setInProgressPrefix("") .setPendingPrefix("") .setValidLengthPrefix("") .setInProgressSuffix(IN_PROGRESS_SUFFIX) .setPendingSuffix(PENDING_SUFFIX) .setValidLengthSuffix(VALID_LENGTH_SUFFIX) .setUseTruncate(false); // don't use truncate because files do not exist OneInputStreamOperatorTestHarness<String, Object> testHarness = new OneInputStreamOperatorTestHarness<>( new StreamSink<>(sink), 10, 1, 0); testHarness.setup(); testHarness.initializeState( OperatorSnapshotUtil.getResourceFilename( "bucketing-sink-migration-test-flink" + testMigrateVersion + "-snapshot")); testHarness.open(); assertTrue(sink.initializeCalled); testHarness.processElement(new StreamRecord<>("test1", 0L)); testHarness.processElement(new StreamRecord<>("test2", 0L)); checkLocalFs(outDir, 1, 1, 0, 0); testHarness.close(); }
Example 16
Source Project: Flink-CEPplus Source File: BucketingSink.java License: Apache License 2.0 | 2 votes |
/** * Creates a new {@code BucketingSink} that writes files to the given base directory. * * * <p>This uses a{@link DateTimeBucketer} as {@link Bucketer} and a {@link StringWriter} has writer. * The maximum bucket size is set to 384 MB. * * @param basePath The directory to which to write the bucket files. */ public BucketingSink(String basePath) { this.basePath = basePath; this.bucketer = new DateTimeBucketer<>(); this.writerTemplate = new StringWriter<>(); }
Example 17
Source Project: flink Source File: BucketingSink.java License: Apache License 2.0 | 2 votes |
/** * Creates a new {@code BucketingSink} that writes files to the given base directory. * * * <p>This uses a{@link DateTimeBucketer} as {@link Bucketer} and a {@link StringWriter} has writer. * The maximum bucket size is set to 384 MB. * * @param basePath The directory to which to write the bucket files. */ public BucketingSink(String basePath) { this.basePath = basePath; this.bucketer = new DateTimeBucketer<>(); this.writerTemplate = new StringWriter<>(); }
Example 18
Source Project: flink Source File: BucketingSink.java License: Apache License 2.0 | 2 votes |
/** * Creates a new {@code BucketingSink} that writes files to the given base directory. * * * <p>This uses a{@link DateTimeBucketer} as {@link Bucketer} and a {@link StringWriter} has writer. * The maximum bucket size is set to 384 MB. * * @param basePath The directory to which to write the bucket files. */ public BucketingSink(String basePath) { this.basePath = basePath; this.bucketer = new DateTimeBucketer<>(); this.writerTemplate = new StringWriter<>(); }