Java Code Examples for org.apache.beam.sdk.testing.TestPipeline#apply()

The following examples show how to use org.apache.beam.sdk.testing.TestPipeline#apply() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ExportTimestampTest.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
private void exportAndImportDbAtTime(String sourceDb, String destDb,
                                     String jobIdName, String ts,
                                     TestPipeline exportPipeline,
                                     TestPipeline importPipeline) {
  ValueProvider.StaticValueProvider<String> destination = ValueProvider.StaticValueProvider
      .of(tmpDir);
  ValueProvider.StaticValueProvider<String> jobId = ValueProvider.StaticValueProvider
      .of(jobIdName);
  ValueProvider.StaticValueProvider<String> source = ValueProvider.StaticValueProvider
      .of(tmpDir + "/" + jobIdName);
  ValueProvider.StaticValueProvider<String> timestamp = ValueProvider.StaticValueProvider.of(ts);
  SpannerConfig sourceConfig = spannerServer.getSpannerConfig(sourceDb);
  exportPipeline.apply("Export", new ExportTransform(sourceConfig, destination,
                                                     jobId, timestamp));
  PipelineResult exportResult = exportPipeline.run();
  exportResult.waitUntilFinish();

  SpannerConfig copyConfig = spannerServer.getSpannerConfig(destDb);
  importPipeline.apply("Import", new ImportTransform(
      copyConfig, source, ValueProvider.StaticValueProvider.of(true),
      ValueProvider.StaticValueProvider.of(true),
      ValueProvider.StaticValueProvider.of(true)));
  PipelineResult importResult = importPipeline.run();
  importResult.waitUntilFinish();
}
 
Example 2
Source File: ExportTimestampTest.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
private void compareDbs(String sourceDb, String destDb, TestPipeline comparePipeline) {
  SpannerConfig sourceConfig = spannerServer.getSpannerConfig(sourceDb);
  SpannerConfig copyConfig = spannerServer.getSpannerConfig(destDb);
  PCollection<Long> mismatchCount = comparePipeline
      .apply("Compare", new CompareDatabases(sourceConfig, copyConfig));
  PAssert.that(mismatchCount).satisfies((x) -> {
    assertEquals(Lists.newArrayList(x), Lists.newArrayList(0L));
    return null;
  });
  PipelineResult compareResult = comparePipeline.run();
  compareResult.waitUntilFinish();

  Ddl sourceDdl = readDdl(sourceDb);
  Ddl destinationDdl = readDdl(destDb);

  assertThat(sourceDdl.prettyPrint(), equalToIgnoringWhiteSpace(destinationDdl.prettyPrint()));
}
 
Example 3
Source File: PCollectionTupleTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testEquals() {
  TestPipeline p = TestPipeline.create();
  TupleTag<Long> longTag = new TupleTag<>();
  PCollection<Long> longs = p.apply(GenerateSequence.from(0));
  TupleTag<String> strTag = new TupleTag<>();
  PCollection<String> strs = p.apply(Create.of("foo", "bar"));

  EqualsTester tester = new EqualsTester();
  // Empty tuples in the same pipeline are equal
  tester.addEqualityGroup(PCollectionTuple.empty(p), PCollectionTuple.empty(p));

  tester.addEqualityGroup(
      PCollectionTuple.of(longTag, longs).and(strTag, strs),
      PCollectionTuple.of(longTag, longs).and(strTag, strs));

  tester.addEqualityGroup(PCollectionTuple.of(longTag, longs));
  tester.addEqualityGroup(PCollectionTuple.of(strTag, strs));

  TestPipeline otherPipeline = TestPipeline.create();
  // Empty tuples in different pipelines are not equal
  tester.addEqualityGroup(PCollectionTuple.empty(otherPipeline));
  tester.testEquals();
}
 
Example 4
Source File: FlinkTransformOverridesTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testRunnerDeterminedSharding() {
  FlinkPipelineOptions options = PipelineOptionsFactory.as(FlinkPipelineOptions.class);
  options.setRunner(TestFlinkRunner.class);
  options.setFlinkMaster("[auto]");
  options.setParallelism(5);

  TestPipeline p = TestPipeline.fromOptions(options);

  StreamingShardedWriteFactory<Object, Void, Object> factory =
      new StreamingShardedWriteFactory<>(p.getOptions());

  WriteFiles<Object, Void, Object> original = WriteFiles.to(new TestSink(tmpFolder.toString()));
  @SuppressWarnings("unchecked")
  PCollection<Object> objs = (PCollection) p.apply(Create.empty(VoidCoder.of()));
  AppliedPTransform<PCollection<Object>, WriteFilesResult<Void>, WriteFiles<Object, Void, Object>>
      originalApplication =
          AppliedPTransform.of("writefiles", objs.expand(), Collections.emptyMap(), original, p);

  WriteFiles<Object, Void, Object> replacement =
      (WriteFiles<Object, Void, Object>)
          factory.getReplacementTransform(originalApplication).getTransform();

  assertThat(replacement, not(equalTo((Object) original)));
  assertThat(replacement.getNumShardsProvider().get(), is(10));
}
 
Example 5
Source File: DataflowPTransformMatchersTest.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Creates a simple pipeline with a {@link Combine.PerKey} with side inputs. */
private static TestPipeline createCombinePerKeyWithSideInputsPipeline() {
  TestPipeline pipeline = TestPipeline.create().enableAbandonedNodeEnforcement(false);
  PCollection<KV<String, Integer>> input =
      pipeline
          .apply(Create.of(KV.of("key", 1)))
          .setCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()));
  PCollection<String> sideInput = pipeline.apply(Create.of("side input"));
  PCollectionView<String> sideInputView = sideInput.apply(View.asSingleton());

  input.apply(
      Combine.<String, Integer, Integer>perKey(new SumCombineFnWithContext())
          .withSideInputs(sideInputView));

  return pipeline;
}
 
Example 6
Source File: DataflowPTransformMatchersTest.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Creates a simple pipeline with a {@link Combine.GroupedValues} with side inputs. */
private static TestPipeline createCombineGroupedValuesWithSideInputsPipeline() {
  TestPipeline pipeline = TestPipeline.create().enableAbandonedNodeEnforcement(false);
  PCollection<KV<String, Integer>> input =
      pipeline
          .apply(Create.of(KV.of("key", 1)))
          .setCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()));
  PCollection<String> sideInput = pipeline.apply(Create.of("side input"));
  PCollectionView<String> sideInputView = sideInput.apply(View.asSingleton());

  input
      .apply(GroupByKey.create())
      .apply(
          Combine.<String, Integer, Integer>groupedValues(new SumCombineFnWithContext())
              .withSideInputs(sideInputView));

  return pipeline;
}
 
Example 7
Source File: DataflowRunnerTest.java    From beam with Apache License 2.0 6 votes vote down vote up
private void testStreamingWriteOverride(PipelineOptions options, int expectedNumShards) {
  TestPipeline p = TestPipeline.fromOptions(options);

  StreamingShardedWriteFactory<Object, Void, Object> factory =
      new StreamingShardedWriteFactory<>(p.getOptions());
  WriteFiles<Object, Void, Object> original = WriteFiles.to(new TestSink(tmpFolder.toString()));
  PCollection<Object> objs = (PCollection) p.apply(Create.empty(VoidCoder.of()));
  AppliedPTransform<PCollection<Object>, WriteFilesResult<Void>, WriteFiles<Object, Void, Object>>
      originalApplication =
          AppliedPTransform.of("writefiles", objs.expand(), Collections.emptyMap(), original, p);

  WriteFiles<Object, Void, Object> replacement =
      (WriteFiles<Object, Void, Object>)
          factory.getReplacementTransform(originalApplication).getTransform();
  assertThat(replacement, not(equalTo((Object) original)));
  assertThat(replacement.getNumShardsProvider().get(), equalTo(expectedNumShards));

  WriteFilesResult<Void> originalResult = objs.apply(original);
  WriteFilesResult<Void> replacementResult = objs.apply(replacement);
  Map<PValue, ReplacementOutput> res =
      factory.mapOutputs(originalResult.expand(), replacementResult);
  assertEquals(1, res.size());
  assertEquals(
      originalResult.getPerDestinationOutputFilenames(),
      res.get(replacementResult.getPerDestinationOutputFilenames()).getOriginal().getValue());
}
 
Example 8
Source File: MusicBrainzTransformsTest.java    From bigquery-etl-dataflow-sample with Apache License 2.0 5 votes vote down vote up
@org.junit.Test
public void testNest() {
  TestPipeline p = TestPipeline.create().enableAbandonedNodeEnforcement(false);
  PCollection<String> artistText = p.apply("artist", Create.of(artistLinesOfJson)).setCoder(StringUtf8Coder.of());
  PCollection<String> artistCreditNameText = p.apply("artist_credit_name", Create.of(artistCreditLinesOfJson));
  PCollection<String> recordingText = p.apply("recording", Create.of(recordingLinesOfJson)).setCoder(StringUtf8Coder.of());

  PCollection<KV<Long, MusicBrainzDataObject>> artistsById = MusicBrainzTransforms.loadTableFromText(artistText, "artist", "id");

  PCollection<KV<Long, MusicBrainzDataObject>> recordingsByArtistCredit =
      MusicBrainzTransforms.loadTableFromText(recordingText, "recording", "artist_credit");
  PCollection<KV<Long, MusicBrainzDataObject>> artistCreditByArtistCredit =
      MusicBrainzTransforms.loadTableFromText(artistCreditNameText, "artist_credit_name", "artist_credit");

  PCollection<MusicBrainzDataObject> recordingsWithCredits = MusicBrainzTransforms.innerJoin("credited recordings", artistCreditByArtistCredit, recordingsByArtistCredit);
  PCollection<KV<Long, MusicBrainzDataObject>> recordingsJoinedWithCredits =
      MusicBrainzTransforms.by("artist_credit_name_artist", recordingsWithCredits);
  PCollection<MusicBrainzDataObject> artistsWithNestedRecordings = MusicBrainzTransforms.nest(artistsById, recordingsJoinedWithCredits, "recordings");


  PAssert.that(artistsWithNestedRecordings).satisfies( (artistCollection) -> {
    List<MusicBrainzDataObject> theList = new ArrayList<>();
    artistCollection.forEach( (x) -> theList.add(x) );

    assert (theList.size() == 1 );
    assert (((List<MusicBrainzDataObject>) theList.get(0).getColumnValue("artist_recordings")).size() == 448);

    return null;
  });

  p.run();


}
 
Example 9
Source File: PubsubIOTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testRuntimeValueProviderSubscription() {
  TestPipeline pipeline = TestPipeline.create();
  ValueProvider<String> subscription =
      pipeline.newProvider("projects/project/subscriptions/subscription");
  Read<String> pubsubRead = PubsubIO.readStrings().fromSubscription(subscription);
  pipeline.apply(pubsubRead);
  assertThat(pubsubRead.getSubscriptionProvider(), not(nullValue()));
  assertThat(pubsubRead.getSubscriptionProvider().isAccessible(), is(false));
}
 
Example 10
Source File: PubsubIOTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testRuntimeValueProviderTopic() {
  TestPipeline pipeline = TestPipeline.create();
  ValueProvider<String> topic = pipeline.newProvider("projects/project/topics/topic");
  Read<String> pubsubRead = PubsubIO.readStrings().fromTopic(topic);
  pipeline.apply(pubsubRead);
  assertThat(pubsubRead.getTopicProvider(), not(nullValue()));
  assertThat(pubsubRead.getTopicProvider().isAccessible(), is(false));
}
 
Example 11
Source File: DirectGroupByKeyOverrideFactoryTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void getInputSucceeds() {
  TestPipeline p = TestPipeline.create();
  PCollection<KV<String, Integer>> input =
      p.apply(
          Create.of(KV.of("foo", 1))
              .withCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of())));
  PCollection<KV<String, Iterable<Integer>>> grouped = input.apply(GroupByKey.create());
  AppliedPTransform<?, ?, ?> producer = DirectGraphs.getProducer(grouped);
  PTransformReplacement<
          PCollection<KV<String, Integer>>, PCollection<KV<String, Iterable<Integer>>>>
      replacement = factory.getReplacementTransform((AppliedPTransform) producer);
  assertThat(replacement.getInput(), Matchers.<PCollection<?>>equalTo(input));
}