Java Code Examples for org.apache.beam.sdk.testing.TestPipeline#testingPipelineOptions()
The following examples show how to use
org.apache.beam.sdk.testing.TestPipeline#testingPipelineOptions() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: UnboundedEventSourceTest.java From beam with Apache License 2.0 | 6 votes |
/** * Check aggressively checkpointing and resuming a reader gives us exactly the same event stream * as reading directly. */ @Ignore("TODO(BEAM-5070) Test is flaky. Fix before reenabling.") @Test public void resumeFromCheckpoint() throws IOException { Random random = new Random(297); int n = 47293; GeneratorConfig config = makeConfig(n); Generator modelGenerator = new Generator(config); EventIdChecker checker = new EventIdChecker(); PipelineOptions options = TestPipeline.testingPipelineOptions(); UnboundedEventSource source = new UnboundedEventSource(config, 1, 0, false); UnboundedReader<Event> reader = source.createReader(options, null); while (n > 0) { int m = Math.min(459 + random.nextInt(455), n); System.out.printf("reading %d...%n", m); checker.add(m, reader, modelGenerator); n -= m; System.out.printf("splitting with %d remaining...%n", n); CheckpointMark checkpointMark = reader.getCheckpointMark(); reader = source.createReader(options, (GeneratorCheckpoint) checkpointMark); } assertFalse(reader.advance()); }
Example 2
Source File: PipelineTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testPipelineSDKExceptionHandling() { PipelineOptions options = TestPipeline.testingPipelineOptions(); options.setRunner(TestPipelineRunnerThrowingSdkException.class); Pipeline p = Pipeline.create(options); // Check pipeline runner correctly catches SDK errors. try { p.run(); fail("Should have thrown an exception."); } catch (RuntimeException exn) { // Make sure the exception isn't a UserCodeException. assertThat(exn, not(instanceOf(UserCodeException.class))); // Assert that the message is correct. assertThat(exn.getMessage(), containsString("SDK exception")); // RuntimeException should be IllegalStateException. assertThat(exn, instanceOf(IllegalStateException.class)); } }
Example 3
Source File: TextIOReadTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testInitialSplitAutoModeTxt() throws Exception { PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeTxt = writeToFile(LARGE, tempFolder, "large.txt", UNCOMPRESSED); // Sanity check: file is at least 2 bundles long. assertThat(largeTxt.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeTxt.getPath()).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); // At least 2 splits and they are equal to reading the whole file. assertThat(splits, hasSize(greaterThan(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); }
Example 4
Source File: TextIOReadTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testInitialSplitGzipModeTxt() throws Exception { PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeTxt = writeToFile(LARGE, tempFolder, "large.txt", UNCOMPRESSED); // Sanity check: file is at least 2 bundles long. assertThat(largeTxt.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeTxt.getPath()).withCompression(GZIP).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); // Exactly 1 split, even though splittable text file, since using GZIP mode. assertThat(splits, hasSize(equalTo(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); }
Example 5
Source File: BigQueryIOReadTest.java From beam with Apache License 2.0 | 6 votes |
@Override public Statement apply(final Statement base, final Description description) { // We need to set up the temporary folder, and then set up the TestPipeline based on the // chosen folder. Unfortunately, since rule evaluation order is unspecified and unrelated // to field order, and is separate from construction, that requires manually creating this // TestRule. Statement withPipeline = new Statement() { @Override public void evaluate() throws Throwable { options = TestPipeline.testingPipelineOptions(); options.as(BigQueryOptions.class).setProject("project-id"); options .as(BigQueryOptions.class) .setTempLocation(testFolder.getRoot().getAbsolutePath()); p = TestPipeline.fromOptions(options); p.apply(base, description).evaluate(); } }; return testFolder.apply(withPipeline, description); }
Example 6
Source File: BigQueryIOStorageReadTest.java From beam with Apache License 2.0 | 6 votes |
@Override public Statement apply(Statement base, Description description) { // We need to set up the temporary folder, and then set up the TestPipeline based on the // chosen folder. Unfortunately, since rule evaluation order is unspecified and unrelated // to field order, and is separate from construction, that requires manually creating this // TestRule. Statement withPipeline = new Statement() { @Override public void evaluate() throws Throwable { options = TestPipeline.testingPipelineOptions(); options.as(BigQueryOptions.class).setProject("project-id"); options .as(BigQueryOptions.class) .setTempLocation(testFolder.getRoot().getAbsolutePath()); p = TestPipeline.fromOptions(options); p.apply(base, description).evaluate(); } }; return testFolder.apply(withPipeline, description); }
Example 7
Source File: BigQueryIOWriteTest.java From beam with Apache License 2.0 | 6 votes |
@Override public Statement apply(final Statement base, final Description description) { // We need to set up the temporary folder, and then set up the TestPipeline based on the // chosen folder. Unfortunately, since rule evaluation order is unspecified and unrelated // to field order, and is separate from construction, that requires manually creating this // TestRule. Statement withPipeline = new Statement() { @Override public void evaluate() throws Throwable { options = TestPipeline.testingPipelineOptions(); options.as(BigQueryOptions.class).setProject("project-id"); options .as(BigQueryOptions.class) .setTempLocation(testFolder.getRoot().getAbsolutePath()); p = TestPipeline.fromOptions(options); p.apply(base, description).evaluate(); } }; return testFolder.apply(withPipeline, description); }
Example 8
Source File: PubsubIOTest.java From beam with Apache License 2.0 | 6 votes |
@Override public Statement apply(final Statement base, final Description description) { // We need to set up the temporary folder, and then set up the TestPipeline based on the // chosen folder. Unfortunately, since rule evaluation order is unspecified and unrelated // to field order, and is separate from construction, that requires manually creating this // TestRule. Statement withPipeline = new Statement() { @Override public void evaluate() throws Throwable { options = TestPipeline.testingPipelineOptions(); options.as(PubsubOptions.class).setProject("test-project"); readPipeline = TestPipeline.fromOptions(options); readPipeline.apply(base, description).evaluate(); } }; return withPipeline; }
Example 9
Source File: JvmInitializersTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void runBeforeProcessing_runsInitializersWithOptions() { PipelineOptions options = TestPipeline.testingPipelineOptions(); JvmInitializers.runBeforeProcessing(options); assertTrue(beforeProcessingRan); assertEquals(options, receivedOptions); }
Example 10
Source File: PipelineTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void testPipelineUserExceptionHandling() { PipelineOptions options = TestPipeline.testingPipelineOptions(); options.setRunner(TestPipelineRunnerThrowingUserException.class); Pipeline p = Pipeline.create(options); // Check pipeline runner correctly catches user errors. thrown.expect(PipelineExecutionException.class); thrown.expectCause(isA(IllegalStateException.class)); thrown.expectMessage("user code exception"); p.run(); }
Example 11
Source File: TextIOReadTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void testInitialSplitAutoModeGz() throws Exception { PipelineOptions options = TestPipeline.testingPipelineOptions(); long desiredBundleSize = 1000; File largeGz = writeToFile(LARGE, tempFolder, "large.gz", GZIP); // Sanity check: file is at least 2 bundles long. assertThat(largeGz.length(), greaterThan(2 * desiredBundleSize)); FileBasedSource<String> source = TextIO.read().from(largeGz.getPath()).getSource(); List<? extends FileBasedSource<String>> splits = source.split(desiredBundleSize, options); // Exactly 1 split, even in AUTO mode, since it is a gzip file. assertThat(splits, hasSize(equalTo(1))); SourceTestUtils.assertSourcesEqualReferenceSource(source, splits, options); }
Example 12
Source File: BigQuerySinkTest.java From feast with Apache License 2.0 | 4 votes |
public static PipelineOptions makePipelineOptions() { PipelineOptions options = TestPipeline.testingPipelineOptions(); options.setTempLocation("/tmp/feast"); return options; }
Example 13
Source File: DisplayDataEvaluator.java From beam with Apache License 2.0 | 4 votes |
/** The default {@link PipelineOptions} which will be used by {@link #create()}. */ public static PipelineOptions getDefaultOptions() { return TestPipeline.testingPipelineOptions(); }
Example 14
Source File: DataflowRunnerTest.java From beam with Apache License 2.0 | 4 votes |
@Test public void testStreamingWriteWithNoShardingReturnsNewTransform() { PipelineOptions options = TestPipeline.testingPipelineOptions(); options.as(DataflowPipelineWorkerPoolOptions.class).setMaxNumWorkers(10); testStreamingWriteOverride(options, 20); }
Example 15
Source File: DataflowRunnerTest.java From beam with Apache License 2.0 | 4 votes |
@Test public void testStreamingWriteWithNoShardingReturnsNewTransformMaxWorkersUnset() { PipelineOptions options = TestPipeline.testingPipelineOptions(); testStreamingWriteOverride(options, StreamingShardedWriteFactory.DEFAULT_NUM_SHARDS); }