Java Code Examples for org.apache.beam.runners.dataflow.options.DataflowPipelineOptions#setPathValidatorClass()

The following examples show how to use org.apache.beam.runners.dataflow.options.DataflowPipelineOptions#setPathValidatorClass() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that the {@link DataflowRunner} with {@code --templateLocation} returns normally when the
 * runner is successfully run.
 */
@Test
public void testTemplateRunnerFullCompletion() throws Exception {
  File existingFile = tmpFolder.newFile();
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setJobName("TestJobName");
  options.setGcpCredential(new TestCredential());
  options.setPathValidatorClass(NoopPathValidator.class);
  options.setProject("test-project");
  options.setRegion(REGION_ID);
  options.setRunner(DataflowRunner.class);
  options.setTemplateLocation(existingFile.getPath());
  options.setTempLocation(tmpFolder.getRoot().getPath());
  Pipeline p = Pipeline.create(options);

  p.run();
  expectedLogs.verifyInfo("Template successfully created");
}
 
Example 2
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that the {@link DataflowRunner} with {@code --templateLocation} returns normally when the
 * runner is successfully run with upload_graph experiment turned on. The result template should
 * not contain raw steps and stepsLocation file should be set.
 */
@Test
public void testTemplateRunnerWithUploadGraph() throws Exception {
  File existingFile = tmpFolder.newFile();
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setExperiments(Arrays.asList("upload_graph"));
  options.setJobName("TestJobName");
  options.setGcpCredential(new TestCredential());
  options.setPathValidatorClass(NoopPathValidator.class);
  options.setProject("test-project");
  options.setRegion(REGION_ID);
  options.setRunner(DataflowRunner.class);
  options.setTemplateLocation(existingFile.getPath());
  options.setTempLocation(tmpFolder.getRoot().getPath());
  Pipeline p = Pipeline.create(options);
  p.apply(Create.of(ImmutableList.of(1)));
  p.run();
  expectedLogs.verifyInfo("Template successfully created");
  ObjectMapper objectMapper = new ObjectMapper();
  JsonNode node = objectMapper.readTree(existingFile);
  assertEquals(0, node.get("steps").size());
  assertNotNull(node.get("stepsLocation"));
}
 
Example 3
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that the {@link DataflowRunner} with {@code --templateLocation} throws the appropriate
 * exception when an output file is not writable.
 */
@Test
public void testTemplateRunnerLoggedErrorForFile() throws Exception {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setJobName("TestJobName");
  options.setRunner(DataflowRunner.class);
  options.setTemplateLocation("//bad/path");
  options.setProject("test-project");
  options.setRegion(REGION_ID);
  options.setTempLocation(tmpFolder.getRoot().getPath());
  options.setGcpCredential(new TestCredential());
  options.setPathValidatorClass(NoopPathValidator.class);
  Pipeline p = Pipeline.create(options);

  thrown.expectMessage("Cannot create output file at");
  thrown.expect(RuntimeException.class);
  p.run();
}
 
Example 4
Source File: DataflowGroupByKeyTest.java    From beam with Apache License 2.0 5 votes vote down vote up
/**
 * Create a test pipeline that uses the {@link DataflowRunner} so that {@link GroupByKey} is not
 * expanded. This is used for verifying that even without expansion the proper errors show up.
 */
private Pipeline createTestServiceRunner() {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setProject("someproject");
  options.setRegion("some-region1");
  options.setGcpTempLocation("gs://staging");
  options.setPathValidatorClass(NoopPathValidator.class);
  options.setDataflowClient(dataflow);
  return Pipeline.create(options);
}
 
Example 5
Source File: DataflowViewTest.java    From beam with Apache License 2.0 5 votes vote down vote up
private Pipeline createTestBatchRunner() {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setProject("someproject");
  options.setRegion("some-region1");
  options.setGcpTempLocation("gs://staging");
  options.setPathValidatorClass(NoopPathValidator.class);
  options.setDataflowClient(dataflow);
  return Pipeline.create(options);
}
 
Example 6
Source File: DataflowViewTest.java    From beam with Apache License 2.0 5 votes vote down vote up
private Pipeline createTestStreamingRunner() {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setStreaming(true);
  options.setProject("someproject");
  options.setRegion("some-region1");
  options.setGcpTempLocation("gs://staging");
  options.setPathValidatorClass(NoopPathValidator.class);
  options.setDataflowClient(dataflow);
  return Pipeline.create(options);
}
 
Example 7
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testToString() {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setJobName("TestJobName");
  options.setProject("test-project");
  options.setRegion(REGION_ID);
  options.setTempLocation("gs://test/temp/location");
  options.setGcpCredential(new TestCredential());
  options.setPathValidatorClass(NoopPathValidator.class);
  options.setRunner(DataflowRunner.class);
  assertEquals("DataflowRunner#testjobname", DataflowRunner.fromOptions(options).toString());
}
 
Example 8
Source File: WorkerCustomSourcesSplitOnlySourceTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void testAllSplitsAreReturned() throws Exception {
  final long apiSizeLimitForTest = 500 * 1024;
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setAppName("TestAppName");
  options.setProject("test-project");
  options.setRegion("some-region1");
  options.setTempLocation("gs://test/temp/location");
  options.setGcpCredential(new TestCredential());
  options.setRunner(DataflowRunner.class);
  options.setPathValidatorClass(NoopPathValidator.class);
  // Generate a CountingSource and split it into the desired number of splits
  // (desired size = 1 byte), triggering the re-split with a larger bundle size.
  // Thus below we expect to produce 'numberOfSplits' splits.
  com.google.api.services.dataflow.model.Source source =
      WorkerCustomSourcesTest.translateIOToCloudSource(
          CountingSource.upTo(numberOfSplits), options);
  SourceSplitResponse split =
      WorkerCustomSourcesTest.performSplit(
          source, options, 1L, null /* numBundles limit */, apiSizeLimitForTest);
  assertThat(
      split.getBundles().size(),
      lessThanOrEqualTo(WorkerCustomSources.DEFAULT_NUM_BUNDLES_LIMIT));

  List<OffsetBasedSource<?>> originalSplits = new ArrayList<>(numberOfSplits);
  // Collect all the splits
  for (DerivedSource derivedSource : split.getBundles()) {
    Object deserializedSource =
        WorkerCustomSources.deserializeFromCloudSource(derivedSource.getSource().getSpec());
    if (deserializedSource instanceof SplittableOnlyBoundedSource) {
      SplittableOnlyBoundedSource<?> splittableOnlySource =
          (SplittableOnlyBoundedSource<?>) deserializedSource;
      originalSplits.addAll((List) splittableOnlySource.split(1L, options));
    } else {
      originalSplits.add((OffsetBasedSource<?>) deserializedSource);
    }
  }

  assertEquals(numberOfSplits, originalSplits.size());
  for (int i = 0; i < originalSplits.size(); i++) {
    OffsetBasedSource<?> offsetBasedSource = (OffsetBasedSource<?>) originalSplits.get(i);
    assertEquals(i, offsetBasedSource.getStartOffset());
    assertEquals(i + 1, offsetBasedSource.getEndOffset());
  }
}