Java Code Examples for org.apache.beam.runners.dataflow.options.DataflowPipelineOptions#setProject()

The following examples show how to use org.apache.beam.runners.dataflow.options.DataflowPipelineOptions#setProject() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 6 votes vote down vote up
/**
 * Tests that the {@link DataflowRunner} with {@code --templateLocation} throws the appropriate
 * exception when an output file is not writable.
 */
@Test
public void testTemplateRunnerLoggedErrorForFile() throws Exception {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setJobName("TestJobName");
  options.setRunner(DataflowRunner.class);
  options.setTemplateLocation("//bad/path");
  options.setProject("test-project");
  options.setRegion(REGION_ID);
  options.setTempLocation(tmpFolder.getRoot().getPath());
  options.setGcpCredential(new TestCredential());
  options.setPathValidatorClass(NoopPathValidator.class);
  Pipeline p = Pipeline.create(options);

  thrown.expectMessage("Cannot create output file at");
  thrown.expect(RuntimeException.class);
  p.run();
}
 
Example 2
Source File: BatchStatefulParDoOverridesTest.java    From beam with Apache License 2.0 6 votes vote down vote up
private static DataflowPipelineOptions buildPipelineOptions(String... args) throws IOException {
  GcsUtil mockGcsUtil = mock(GcsUtil.class);
  when(mockGcsUtil.expand(any(GcsPath.class)))
      .then(invocation -> ImmutableList.of((GcsPath) invocation.getArguments()[0]));
  when(mockGcsUtil.bucketAccessible(any(GcsPath.class))).thenReturn(true);

  DataflowPipelineOptions options =
      PipelineOptionsFactory.fromArgs(args).as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setGcpCredential(new TestCredential());
  options.setJobName("some-job-name");
  options.setProject("some-project");
  options.setRegion("some-region");
  options.setTempLocation(GcsPath.fromComponents("somebucket", "some/path").toString());
  options.setFilesToStage(new ArrayList<>());
  options.setGcsUtil(mockGcsUtil);

  // Enable the FileSystems API to know about gs:// URIs in this test.
  FileSystems.setDefaultPipelineOptions(options);

  return options;
}
 
Example 3
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 6 votes vote down vote up
private DataflowPipelineOptions buildPipelineOptions() throws IOException {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setProject(PROJECT_ID);
  options.setTempLocation(VALID_TEMP_BUCKET);
  options.setRegion(REGION_ID);
  // Set FILES_PROPERTY to empty to prevent a default value calculated from classpath.
  options.setFilesToStage(new ArrayList<>());
  options.setDataflowClient(buildMockDataflow());
  options.setGcsUtil(mockGcsUtil);
  options.setGcpCredential(new TestCredential());

  // Configure the FileSystem registrar to use these options.
  FileSystems.setDefaultPipelineOptions(options);

  return options;
}
 
Example 4
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testGcsStagingLocationInitialization() throws Exception {
  // Set temp location (required), and check that staging location is set.
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setTempLocation(VALID_TEMP_BUCKET);
  options.setProject(PROJECT_ID);
  options.setRegion(REGION_ID);
  options.setGcpCredential(new TestCredential());
  options.setGcsUtil(mockGcsUtil);
  options.setRunner(DataflowRunner.class);

  DataflowRunner.fromOptions(options);

  assertNotNull(options.getStagingLocation());
}
 
Example 5
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testToString() {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setJobName("TestJobName");
  options.setProject("test-project");
  options.setRegion(REGION_ID);
  options.setTempLocation("gs://test/temp/location");
  options.setGcpCredential(new TestCredential());
  options.setPathValidatorClass(NoopPathValidator.class);
  options.setRunner(DataflowRunner.class);
  assertEquals("DataflowRunner#testjobname", DataflowRunner.fromOptions(options).toString());
}
 
Example 6
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testTempLocationAndNoGcpTempLocationSucceeds() throws Exception {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setGcpCredential(new TestCredential());
  options.setProject("foo-project");
  options.setRegion(REGION_ID);
  options.setTempLocation(VALID_TEMP_BUCKET);
  options.setGcsUtil(mockGcsUtil);

  DataflowRunner.fromOptions(options);
}
 
Example 7
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testGcpTempAndNoTempLocationSucceeds() throws Exception {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setGcpCredential(new TestCredential());
  options.setProject("foo-project");
  options.setRegion(REGION_ID);
  options.setGcpTempLocation(VALID_TEMP_BUCKET);
  options.setGcsUtil(mockGcsUtil);

  DataflowRunner.fromOptions(options);
}
 
Example 8
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testNoStagingLocationAndNoTempLocationFails() {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setProject("foo-project");
  options.setRegion(REGION_ID);

  thrown.expect(IllegalArgumentException.class);
  thrown.expectMessage(
      "DataflowRunner requires gcpTempLocation, "
          + "but failed to retrieve a value from PipelineOption");
  DataflowRunner.fromOptions(options);
}
 
Example 9
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testProjectDescription() throws IOException {
  DataflowPipelineOptions options = buildPipelineOptions();
  options.setProject("some project");

  thrown.expect(IllegalArgumentException.class);
  thrown.expectMessage("Project ID");
  thrown.expectMessage("project description");

  DataflowRunner.fromOptions(options);
}
 
Example 10
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testProjectNumber() throws IOException {
  DataflowPipelineOptions options = buildPipelineOptions();
  options.setProject("12345");

  thrown.expect(IllegalArgumentException.class);
  thrown.expectMessage("Project ID");
  thrown.expectMessage("project number");

  DataflowRunner.fromOptions(options);
}
 
Example 11
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testProjectPrefix() throws IOException {
  DataflowPipelineOptions options = buildPipelineOptions();
  options.setProject("google.com:some-project-12345");

  DataflowRunner.fromOptions(options);
}
 
Example 12
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testProjectId() throws IOException {
  DataflowPipelineOptions options = buildPipelineOptions();
  options.setProject("foo-12345");

  DataflowRunner.fromOptions(options);
}
 
Example 13
Source File: MonitoringUtilTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testOverridesEndpointWithStagedDataflowEndpoint() {
  DataflowPipelineOptions options =
      PipelineOptionsFactory.create().as(DataflowPipelineOptions.class);
  options.setProject(PROJECT_ID);
  options.setRegion(REGION_ID);
  options.setGcpCredential(new TestCredential());
  String stagingDataflowEndpoint = "v0neverExisted";
  options.setDataflowEndpoint(stagingDataflowEndpoint);
  String cancelCommand = MonitoringUtil.getGcloudCancelCommand(options, JOB_ID);
  assertEquals(
      "CLOUDSDK_API_ENDPOINT_OVERRIDES_DATAFLOW=https://dataflow.googleapis.com/v0neverExisted/ "
          + "gcloud dataflow jobs --project=someProject cancel --region=thatRegion 1234",
      cancelCommand);
}
 
Example 14
Source File: MonitoringUtilTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testDontOverrideEndpointWithDefaultApi() {
  DataflowPipelineOptions options =
      PipelineOptionsFactory.create().as(DataflowPipelineOptions.class);
  options.setProject(PROJECT_ID);
  options.setRegion(REGION_ID);
  options.setGcpCredential(new TestCredential());
  String cancelCommand = MonitoringUtil.getGcloudCancelCommand(options, JOB_ID);
  assertEquals(
      "gcloud dataflow jobs --project=someProject cancel --region=thatRegion 1234",
      cancelCommand);
}
 
Example 15
Source File: DataflowViewTest.java    From beam with Apache License 2.0 5 votes vote down vote up
private Pipeline createTestStreamingRunner() {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setStreaming(true);
  options.setProject("someproject");
  options.setRegion("some-region1");
  options.setGcpTempLocation("gs://staging");
  options.setPathValidatorClass(NoopPathValidator.class);
  options.setDataflowClient(dataflow);
  return Pipeline.create(options);
}
 
Example 16
Source File: DataflowViewTest.java    From beam with Apache License 2.0 5 votes vote down vote up
private Pipeline createTestBatchRunner() {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setProject("someproject");
  options.setRegion("some-region1");
  options.setGcpTempLocation("gs://staging");
  options.setPathValidatorClass(NoopPathValidator.class);
  options.setDataflowClient(dataflow);
  return Pipeline.create(options);
}
 
Example 17
Source File: DataflowGroupByKeyTest.java    From beam with Apache License 2.0 5 votes vote down vote up
/**
 * Create a test pipeline that uses the {@link DataflowRunner} so that {@link GroupByKey} is not
 * expanded. This is used for verifying that even without expansion the proper errors show up.
 */
private Pipeline createTestServiceRunner() {
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setRunner(DataflowRunner.class);
  options.setProject("someproject");
  options.setRegion("some-region1");
  options.setGcpTempLocation("gs://staging");
  options.setPathValidatorClass(NoopPathValidator.class);
  options.setDataflowClient(dataflow);
  return Pipeline.create(options);
}
 
Example 18
Source File: WorkerCustomSourcesSplitOnlySourceTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void testAllSplitsAreReturned() throws Exception {
  final long apiSizeLimitForTest = 500 * 1024;
  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setAppName("TestAppName");
  options.setProject("test-project");
  options.setRegion("some-region1");
  options.setTempLocation("gs://test/temp/location");
  options.setGcpCredential(new TestCredential());
  options.setRunner(DataflowRunner.class);
  options.setPathValidatorClass(NoopPathValidator.class);
  // Generate a CountingSource and split it into the desired number of splits
  // (desired size = 1 byte), triggering the re-split with a larger bundle size.
  // Thus below we expect to produce 'numberOfSplits' splits.
  com.google.api.services.dataflow.model.Source source =
      WorkerCustomSourcesTest.translateIOToCloudSource(
          CountingSource.upTo(numberOfSplits), options);
  SourceSplitResponse split =
      WorkerCustomSourcesTest.performSplit(
          source, options, 1L, null /* numBundles limit */, apiSizeLimitForTest);
  assertThat(
      split.getBundles().size(),
      lessThanOrEqualTo(WorkerCustomSources.DEFAULT_NUM_BUNDLES_LIMIT));

  List<OffsetBasedSource<?>> originalSplits = new ArrayList<>(numberOfSplits);
  // Collect all the splits
  for (DerivedSource derivedSource : split.getBundles()) {
    Object deserializedSource =
        WorkerCustomSources.deserializeFromCloudSource(derivedSource.getSource().getSpec());
    if (deserializedSource instanceof SplittableOnlyBoundedSource) {
      SplittableOnlyBoundedSource<?> splittableOnlySource =
          (SplittableOnlyBoundedSource<?>) deserializedSource;
      originalSplits.addAll((List) splittableOnlySource.split(1L, options));
    } else {
      originalSplits.add((OffsetBasedSource<?>) deserializedSource);
    }
  }

  assertEquals(numberOfSplits, originalSplits.size());
  for (int i = 0; i < originalSplits.size(); i++) {
    OffsetBasedSource<?> offsetBasedSource = (OffsetBasedSource<?>) originalSplits.get(i);
    assertEquals(i, offsetBasedSource.getStartOffset());
    assertEquals(i + 1, offsetBasedSource.getEndOffset());
  }
}
 
Example 19
Source File: PubSubToBQPipeline.java    From pubsub-to-bigquery with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws GeneralSecurityException, IOException, ParseException, ParserConfigurationException, SAXException {
	String params = null;
	for (int i = 0; i < args.length; i++) {
		if (args[i].startsWith("--params="))
			params = args[i].replaceFirst("--params=", "");
	}

	System.out.println(params);
	init(params);

	GoogleCredentials credentials = ServiceAccountCredentials.fromStream(new FileInputStream(keyFile))
	        .createScoped(Arrays.asList(new String[] { "https://www.googleapis.com/auth/cloud-platform" }));

	DataflowPipelineOptions options = PipelineOptionsFactory.create().as(DataflowPipelineOptions.class);
	
	options.setRunner(DataflowRunner.class);
	// Your project ID is required in order to run your pipeline on the Google Cloud.
	options.setProject(projectId);
	// Your Google Cloud Storage path is required for staging local files.
	options.setStagingLocation(workingBucket);
	options.setTempLocation(workingBucket + "/temp");
	options.setGcpCredential(credentials);
	options.setServiceAccount(accountEmail);
	options.setMaxNumWorkers(maxNumWorkers);
	options.setDiskSizeGb(diskSizeGb);
	options.setWorkerMachineType(machineType);
	options.setAutoscalingAlgorithm(AutoscalingAlgorithmType.THROUGHPUT_BASED);
	options.setZone(zone);
	options.setStreaming(isStreaming);
	options.setJobName(pipelineName);
	Pipeline pipeline = Pipeline.create(options);
	
	Gson gson = new Gson();
	TableSchema schema = gson.fromJson(schemaStr, TableSchema.class);
	
	PCollection<String> streamData = null;
	if(pubSubTopicSub != null && !StringUtils.isEmpty(pubSubTopicSub)){
		streamData = pipeline.apply("ReadPubSub",PubsubIO.readStrings().fromSubscription(String.format("projects/%1$s/subscriptions/%2$s",projectId,pubSubTopicSub)));
	}
	else if(pubSubTopic != null && !StringUtils.isEmpty(pubSubTopic)){
		streamData = pipeline.apply("ReadPubSub",PubsubIO.readStrings().fromTopic(String.format("projects/%1$s/topics/%2$s",projectId,pubSubTopic)));
	}
	
	PCollection<TableRow> tableRow = streamData.apply("ToTableRow",ParDo.of(new PrepData.ToTableRow(owTimestamp, debugMode)));
	
	
	tableRow.apply("WriteToBQ",
			BigQueryIO.writeTableRows()
			.to(String.format("%1$s.%2$s",bqDataSet, bqTable))
			.withSchema(schema)
			.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND));

	System.out.println("Starting pipeline " + pipelineName);
	pipeline.run();
}
 
Example 20
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 4 votes vote down vote up
@Test
public void testRunWithFiles() throws IOException {
  // Test that the function DataflowRunner.stageFiles works as expected.
  final String cloudDataflowDataset = "somedataset";

  // Create some temporary files.
  File temp1 = File.createTempFile("DataflowRunnerTest-", ".txt");
  temp1.deleteOnExit();
  File temp2 = File.createTempFile("DataflowRunnerTest2-", ".txt");
  temp2.deleteOnExit();

  String overridePackageName = "alias.txt";

  when(mockGcsUtil.getObjects(anyListOf(GcsPath.class)))
      .thenReturn(
          ImmutableList.of(
              GcsUtil.StorageObjectOrIOException.create(new FileNotFoundException("some/path"))));

  DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
  options.setFilesToStage(
      ImmutableList.of(
          temp1.getAbsolutePath(), overridePackageName + "=" + temp2.getAbsolutePath()));
  options.setStagingLocation(VALID_STAGING_BUCKET);
  options.setTempLocation(VALID_TEMP_BUCKET);
  options.setTempDatasetId(cloudDataflowDataset);
  options.setProject(PROJECT_ID);
  options.setRegion(REGION_ID);
  options.setJobName("job");
  options.setDataflowClient(buildMockDataflow());
  options.setGcsUtil(mockGcsUtil);
  options.setGcpCredential(new TestCredential());

  when(mockGcsUtil.create(any(GcsPath.class), anyString(), anyInt()))
      .then(
          invocation ->
              FileChannel.open(
                  Files.createTempFile("channel-", ".tmp"),
                  StandardOpenOption.CREATE,
                  StandardOpenOption.WRITE,
                  StandardOpenOption.DELETE_ON_CLOSE));

  Pipeline p = buildDataflowPipeline(options);

  DataflowPipelineJob job = (DataflowPipelineJob) p.run();
  assertEquals("newid", job.getJobId());

  ArgumentCaptor<Job> jobCaptor = ArgumentCaptor.forClass(Job.class);
  Mockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture());
  Job workflowJob = jobCaptor.getValue();
  assertValidJob(workflowJob);

  assertEquals(2, workflowJob.getEnvironment().getWorkerPools().get(0).getPackages().size());
  DataflowPackage workflowPackage1 =
      workflowJob.getEnvironment().getWorkerPools().get(0).getPackages().get(0);
  assertThat(workflowPackage1.getName(), endsWith(getFileExtension(temp1.getAbsolutePath())));
  DataflowPackage workflowPackage2 =
      workflowJob.getEnvironment().getWorkerPools().get(0).getPackages().get(1);
  assertEquals(overridePackageName, workflowPackage2.getName());

  assertEquals(
      GcsPath.fromUri(VALID_TEMP_BUCKET).toResourceName(),
      workflowJob.getEnvironment().getTempStoragePrefix());
  assertEquals(cloudDataflowDataset, workflowJob.getEnvironment().getDataset());
  assertEquals(
      DataflowRunnerInfo.getDataflowRunnerInfo().getName(),
      workflowJob.getEnvironment().getUserAgent().get("name"));
  assertEquals(
      DataflowRunnerInfo.getDataflowRunnerInfo().getVersion(),
      workflowJob.getEnvironment().getUserAgent().get("version"));
}