org.apache.beam.sdk.values.PBegin Java Examples

The following examples show how to use org.apache.beam.sdk.values.PBegin. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PeriodicImpulse.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public PCollection<Instant> expand(PBegin input) {
  PCollection<Instant> result =
      input
          .apply(
              Create.<PeriodicSequence.SequenceDefinition>of(
                  new PeriodicSequence.SequenceDefinition(
                      startTimestamp, stopTimestamp, fireInterval)))
          .apply(PeriodicSequence.create());

  if (this.applyWindowing) {
    result =
        result.apply(
            Window.<Instant>into(FixedWindows.of(Duration.millis(fireInterval.getMillis()))));
  }

  return result;
}
 
Example #2
Source File: HadoopFormatIO.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public PCollection<KV<K, V>> expand(PBegin input) {
  validateTransform();
  // Get the key and value coders based on the key and value classes.
  CoderRegistry coderRegistry = input.getPipeline().getCoderRegistry();
  Coder<K> keyCoder = getDefaultCoder(getKeyTypeDescriptor(), coderRegistry);
  Coder<V> valueCoder = getDefaultCoder(getValueTypeDescriptor(), coderRegistry);
  HadoopInputFormatBoundedSource<K, V> source =
      new HadoopInputFormatBoundedSource<>(
          getConfiguration(),
          keyCoder,
          valueCoder,
          getKeyTranslationFunction(),
          getValueTranslationFunction());
  return input.getPipeline().apply(org.apache.beam.sdk.io.Read.from(source));
}
 
Example #3
Source File: BeamIOTransformerTest.java    From component-runtime with Apache License 2.0 6 votes vote down vote up
@Test
void coderSerialization() {
    scenario((transformer, loader) -> {
        final Class<?> coder = loader.loadClass(JdbcSource.WorkAroundCoder.class.getName());
        assertEquals(loader, coder.getClassLoader());
        final Field collection = coder.getDeclaredField("collection");
        if (!collection.isAccessible()) {
            collection.setAccessible(true);
        }
        final Object instance = newInstance(coder, loader);
        final PCollection<JsonObject> collectionInstance = PBegin
                .in(Pipeline.create(PipelineOptionsFactory.create()))
                .apply(Create.empty(JsonpJsonObjectCoder.of("test")));
        collection.set(instance, collectionInstance);
        final JsonObject original = Json.createObjectBuilder().add("init", true).build();
        final ByteArrayOutputStream out = new ByteArrayOutputStream();
        Coder.class.cast(instance).encode(original, out);
        out.flush();
        final Coder<?> deserialized = SerializableUtils.ensureSerializable((Coder<?>) instance);
        final ByteArrayInputStream inStream = new ByteArrayInputStream(out.toByteArray());
        assertEquals(original, deserialized.decode(inStream));

    });
}
 
Example #4
Source File: TestStreamTranslation.java    From beam with Apache License 2.0 6 votes vote down vote up
/**
 * Converts an {@link AppliedPTransform}, which may be a rehydrated transform or an original
 * {@link TestStream}, to a {@link TestStream}.
 */
public static <T> TestStream<T> getTestStream(
    AppliedPTransform<PBegin, PCollection<T>, PTransform<PBegin, PCollection<T>>> application)
    throws IOException {
  // For robustness, we don't take this shortcut:
  // if (application.getTransform() instanceof TestStream) {
  //   return application.getTransform()
  // }

  SdkComponents sdkComponents = SdkComponents.create(application.getPipeline().getOptions());
  RunnerApi.PTransform transformProto = PTransformTranslation.toProto(application, sdkComponents);
  checkArgument(
      TEST_STREAM_TRANSFORM_URN.equals(transformProto.getSpec().getUrn()),
      "Attempt to get %s from a transform with wrong URN %s",
      TestStream.class.getSimpleName(),
      transformProto.getSpec().getUrn());
  RunnerApi.TestStreamPayload testStreamPayload =
      RunnerApi.TestStreamPayload.parseFrom(transformProto.getSpec().getPayload());

  return (TestStream<T>)
      testStreamFromProtoPayload(
          testStreamPayload, RehydratedComponents.forComponents(sdkComponents.toComponents()));
}
 
Example #5
Source File: KinesisIO.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public PCollection<KinesisRecord> expand(PBegin input) {
  Unbounded<KinesisRecord> unbounded =
      org.apache.beam.sdk.io.Read.from(
          new KinesisSource(
              getAWSClientsProvider(),
              getStreamName(),
              getInitialPosition(),
              getUpToDateThreshold(),
              getWatermarkPolicyFactory(),
              getRateLimitPolicyFactory(),
              getRequestRecordsLimit(),
              getMaxCapacityPerShard()));

  PTransform<PBegin, PCollection<KinesisRecord>> transform = unbounded;

  if (getMaxNumRecords() < Long.MAX_VALUE || getMaxReadTime() != null) {
    transform =
        unbounded.withMaxReadTime(getMaxReadTime()).withMaxNumRecords(getMaxNumRecords());
  }

  return input.apply(transform);
}
 
Example #6
Source File: BoundedReadEvaluatorFactory.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public Collection<CommittedBundle<BoundedSourceShard<T>>> getInitialInputs(
    AppliedPTransform<PBegin, PCollection<T>, PTransform<PBegin, PCollection<T>>> transform,
    int targetParallelism)
    throws Exception {
  BoundedSource<T> source = ReadTranslation.boundedSourceFromTransform(transform);
  long estimatedBytes = source.getEstimatedSizeBytes(options);
  long bytesPerBundle = estimatedBytes / targetParallelism;
  List<? extends BoundedSource<T>> bundles = source.split(bytesPerBundle, options);
  ImmutableList.Builder<CommittedBundle<BoundedSourceShard<T>>> shards =
      ImmutableList.builder();
  for (BoundedSource<T> bundle : bundles) {
    CommittedBundle<BoundedSourceShard<T>> inputShard =
        evaluationContext
            .<BoundedSourceShard<T>>createRootBundle()
            .add(WindowedValue.valueInGlobalWindow(BoundedSourceShard.of(bundle)))
            .commit(BoundedWindow.TIMESTAMP_MAX_VALUE);
    shards.add(inputShard);
  }
  return shards.build();
}
 
Example #7
Source File: JdbcIO.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public PCollection<T> expand(PBegin input) {
  checkArgument(getQuery() != null, "withQuery() is required");
  checkArgument(getRowMapper() != null, "withRowMapper() is required");
  checkArgument(getCoder() != null, "withCoder() is required");
  checkArgument(
      (getDataSourceProviderFn() != null),
      "withDataSourceConfiguration() or withDataSourceProviderFn() is required");

  return input
      .apply(Create.of((Void) null))
      .apply(
          JdbcIO.<Void, T>readAll()
              .withDataSourceProviderFn(getDataSourceProviderFn())
              .withQuery(getQuery())
              .withCoder(getCoder())
              .withRowMapper(getRowMapper())
              .withFetchSize(getFetchSize())
              .withOutputParallelization(getOutputParallelization())
              .withParameterSetter(
                  (element, preparedStatement) -> {
                    if (getStatementPreparator() != null) {
                      getStatementPreparator().setParameters(preparedStatement);
                    }
                  }));
}
 
Example #8
Source File: TextIO.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public PCollection<String> expand(PBegin input) {
  checkNotNull(getFilepattern(), "need to set the filepattern of a TextIO.Read transform");
  if (getMatchConfiguration().getWatchInterval() == null && !getHintMatchesManyFiles()) {
    return input.apply("Read", org.apache.beam.sdk.io.Read.from(getSource()));
  }

  // All other cases go through FileIO + ReadFiles
  return input
      .apply("Create filepattern", Create.ofProvider(getFilepattern(), StringUtf8Coder.of()))
      .apply("Match All", FileIO.matchAll().withConfiguration(getMatchConfiguration()))
      .apply(
          "Read Matches",
          FileIO.readMatches()
              .withCompression(getCompression())
              .withDirectoryTreatment(DirectoryTreatment.PROHIBIT))
      .apply("Via ReadFiles", readFiles().withDelimiter(getDelimiter()));
}
 
Example #9
Source File: BaseBeamTable.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public PCollection<Row> buildIOReader(
    PBegin begin, BeamSqlTableFilter filters, List<String> fieldNames) {
  String error = "%s does not support predicate/project push-down, yet non-empty %s is passed.";

  if (!(filters instanceof DefaultTableFilter)) {
    throw new UnsupportedOperationException(
        String.format(error, this.getClass().getName(), "'filters'"));
  }

  if (!fieldNames.isEmpty()) {
    throw new UnsupportedOperationException(
        String.format(error, this.getClass().getName(), "'fieldNames'"));
  }

  return buildIOReader(begin);
}
 
Example #10
Source File: DatastoreConverters.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
@Override
public PCollection<String> expand(PBegin begin) {
  return begin.apply("ReadFromDatastore",
      DatastoreIO.v1().read()
          .withProjectId(projectId())
          .withLiteralGqlQuery(gqlQuery())
          .withNamespace(namespace()))
      .apply("ParseEntitySchema", ParDo.of(new EntityToSchemaJson()))
      .apply("CountUniqueSchemas", Count.<String>perElement())
      .apply("Jsonify", ParDo.of(new DoFn<KV<String, Long>, String>(){
        @ProcessElement
        public void processElement(ProcessContext c) {
          JsonObject out = new JsonObject();
          out.addProperty("schema", c.element().getKey());
          out.addProperty("count", c.element().getValue());
          c.output(out.toString());
        }
      }));
}
 
Example #11
Source File: ParquetIO.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<GenericRecord> expand(PBegin input) {
  checkNotNull(getFilepattern(), "Filepattern cannot be null.");

  return input
      .apply("Create filepattern", Create.ofProvider(getFilepattern(), StringUtf8Coder.of()))
      .apply(FileIO.matchAll())
      .apply(FileIO.readMatches())
      .apply(readFiles(getSchema()).withAvroDataModel(getAvroDataModel()));
}
 
Example #12
Source File: TestStreamEvaluatorFactory.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<T> expand(PBegin input) {
  runner.setClockSupplier(new TestClockSupplier());
  return PCollection.createPrimitiveOutputInternal(
      input.getPipeline(),
      WindowingStrategy.globalDefault(),
      IsBounded.UNBOUNDED,
      original.getValueCoder());
}
 
Example #13
Source File: TransformHierarchyTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void producingOwnAndOthersOutputsFails() {
  PCollection<Long> created =
      PCollection.createPrimitiveOutputInternal(
          pipeline, WindowingStrategy.globalDefault(), IsBounded.BOUNDED, VarLongCoder.of());
  hierarchy.pushNode("Create", PBegin.in(pipeline), Create.of(1));
  hierarchy.setOutput(created);
  hierarchy.popNode();
  PCollectionList<Long> pcList = PCollectionList.of(created);

  final PCollectionList<Long> appended =
      pcList.and(
          PCollection.createPrimitiveOutputInternal(
                  pipeline,
                  WindowingStrategy.globalDefault(),
                  IsBounded.BOUNDED,
                  VarLongCoder.of())
              .setName("prim"));
  hierarchy.pushNode(
      "AddPc",
      pcList,
      new PTransform<PCollectionList<Long>, PCollectionList<Long>>() {
        @Override
        public PCollectionList<Long> expand(PCollectionList<Long> input) {
          return appended;
        }
      });
  thrown.expect(IllegalArgumentException.class);
  thrown.expectMessage("contains a primitive POutput produced by it");
  thrown.expectMessage("AddPc");
  thrown.expectMessage("Create");
  thrown.expectMessage(appended.expand().toString());
  hierarchy.setOutput(appended);
}
 
Example #14
Source File: AmqpIO.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<Message> expand(PBegin input) {
  checkArgument(addresses() != null, "withAddresses() is required");

  org.apache.beam.sdk.io.Read.Unbounded<Message> unbounded =
      org.apache.beam.sdk.io.Read.from(new UnboundedAmqpSource(this));

  PTransform<PBegin, PCollection<Message>> transform = unbounded;

  if (maxNumRecords() < Long.MAX_VALUE || maxReadTime() != null) {
    transform = unbounded.withMaxReadTime(maxReadTime()).withMaxNumRecords(maxNumRecords());
  }

  return input.getPipeline().apply(transform);
}
 
Example #15
Source File: TestBoundedTableTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testCreatingEmptyTable() {
  TestBoundedTable emptyTable =
      TestBoundedTable.of(
          Schema.builder().addInt32Field("ColId").addStringField("Value").build());
  emptyTable.buildIOReader(PBegin.in(pipeline));
  pipeline.run();
}
 
Example #16
Source File: S3InputRuntime.java    From components with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<IndexedRecord> expand(PBegin in) {
    // The UGI does not control security for S3.
    UgiDoAs doAs = UgiDoAs.ofNone();
    String path = S3Connection.getUriPath(properties.getDatasetProperties());
    boolean overwrite = false; // overwrite is ignored for reads.
    int limit = properties.limit.getValue();
    boolean mergeOutput = false; // mergeOutput is ignored for reads.

    SimpleRecordFormatBase rf = null;
    switch (properties.getDatasetProperties().format.getValue()) {

    case AVRO:
        rf = new SimpleRecordFormatAvroIO(doAs, path, overwrite, limit, mergeOutput);
        break;

    case CSV:
        S3DatasetProperties dataset = properties.getDatasetProperties();
        rf = new SimpleRecordFormatCsvIO(doAs, path, limit, dataset.getRecordDelimiter(),
            dataset.getMaxRowSize(), dataset.getFieldDelimiter(), dataset.getEncoding(),
            dataset.getHeaderLine(), dataset.getTextEnclosureCharacter(), dataset.getEscapeCharacter());
        break;

    case PARQUET:
        rf = new SimpleRecordFormatParquetIO(doAs, path, overwrite, limit, mergeOutput);
        break;
        
    case EXCEL:
        S3DatasetProperties ds = properties.getDatasetProperties();
        rf = new SimpleRecordFormatExcelIO(doAs, path, overwrite, limit, mergeOutput, ds.getEncoding(), ds.getSheetName(), ds.getHeaderLine(), ds.getFooterLine(), ds.getExcelFormat());
        break;
    }

    if (rf == null) {
        throw new RuntimeException("To be implemented: " + properties.getDatasetProperties().format.getValue());
    }

    S3Connection.setS3Configuration(rf.getExtraHadoopConfiguration(), properties.getDatasetProperties());
    return rf.read(in);
}
 
Example #17
Source File: BigQueryTable.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<Row> buildIOReader(
    PBegin begin, BeamSqlTableFilter filters, List<String> fieldNames) {
  if (!method.equals(Method.DIRECT_READ)) {
    LOG.info("Predicate/project push-down only available for `DIRECT_READ` method, skipping.");
    return buildIOReader(begin);
  }

  final FieldAccessDescriptor resolved =
      FieldAccessDescriptor.withFieldNames(fieldNames).resolve(getSchema());
  final Schema newSchema = SelectHelpers.getOutputSchema(getSchema(), resolved);

  TypedRead<Row> typedRead = getBigQueryTypedRead(newSchema);

  if (!(filters instanceof DefaultTableFilter)) {
    BigQueryFilter bigQueryFilter = (BigQueryFilter) filters;
    if (!bigQueryFilter.getSupported().isEmpty()) {
      String rowRestriction = generateRowRestrictions(getSchema(), bigQueryFilter.getSupported());
      if (!rowRestriction.isEmpty()) {
        LOG.info("Pushing down the following filter: " + rowRestriction);
        typedRead = typedRead.withRowRestriction(rowRestriction);
      }
    }
  }

  if (!fieldNames.isEmpty()) {
    typedRead = typedRead.withSelectedFields(fieldNames);
  }

  return begin.apply("Read Input BQ Rows with push-down", typedRead);
}
 
Example #18
Source File: JdbcSource.java    From component-runtime with Apache License 2.0 5 votes vote down vote up
public PCollection<JsonObject> expand(final PBegin input) {
    final WorkAroundCoder workAroundCoder = new WorkAroundCoder();
    final PCollection<JsonObject> apply = input
            .apply(JdbcIO
                    .<JsonObject> read()
                    .withRowMapper(new RecordMapper(builder))
                    .withDataSourceConfiguration(config.asBeamConfig())
                    .withQuery(config.query)
                    .withCoder(workAroundCoder));
    workAroundCoder.collection = apply;
    return apply;
}
 
Example #19
Source File: AvroTable.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<Row> buildIOReader(PBegin begin) {

  return begin
      .apply(
          "AvroIORead",
          AvroIO.readGenericRecords(AvroUtils.toAvroSchema(schema, tableName, null))
              .withBeamSchemas(true)
              .from(filePattern))
      .apply("GenericRecordToRow", Convert.toRows());
}
 
Example #20
Source File: DataStoreV1Table.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<Row> buildIOReader(PBegin begin) {
  Query.Builder q = Query.newBuilder();
  q.addKindBuilder().setName(kind);
  Query query = q.build();

  DatastoreV1.Read readInstance =
      DatastoreIO.v1().read().withProjectId(projectId).withQuery(query);

  return begin
      .apply("Read Datastore Entities", readInstance)
      .apply("Convert Datastore Entities to Rows", EntityToRow.create(getSchema(), keyField));
}
 
Example #21
Source File: StructuredStreamingPipelineStateTest.java    From beam with Apache License 2.0 5 votes vote down vote up
private PTransform<PBegin, PCollection<String>> getValues(
    final SparkStructuredStreamingPipelineOptions options) {
  final boolean doNotSyncWithWatermark = false;
  return options.isStreaming()
      ? CreateStream.of(StringUtf8Coder.of(), Duration.millis(1), doNotSyncWithWatermark)
          .nextBatch("one", "two")
      : Create.of("one", "two");
}
 
Example #22
Source File: ParquetTable.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<Row> buildIOReader(PBegin begin) {
  PTransform<PCollection<GenericRecord>, PCollection<Row>> readConverter =
      GenericRecordReadConverter.builder().beamSchema(schema).build();

  return begin
      .apply("ParquetIORead", ParquetIO.read(AvroUtils.toAvroSchema(schema)).from(filePattern))
      .apply("GenericRecordToRow", readConverter);
}
 
Example #23
Source File: FlinkStreamingTransformTranslators.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
void translateNode(
    PTransform<PBegin, PCollection<T>> transform, FlinkStreamingTranslationContext context) {
  if (context.getOutput(transform).isBounded().equals(PCollection.IsBounded.BOUNDED)) {
    boundedTranslator.translateNode(transform, context);
  } else {
    unboundedTranslator.translateNode(transform, context);
  }
}
 
Example #24
Source File: DIPipeline.java    From component-runtime with Apache License 2.0 5 votes vote down vote up
@Override
public <OutputT extends POutput> OutputT apply(final String name, final PTransform<? super PBegin, OutputT> root) {
    transformStack.add(root);
    try {
        return super.apply(name, wrapTransformIfNeeded(root));
    } finally {
        transformStack.remove(root);
    }
}
 
Example #25
Source File: ImpulseEvaluatorFactory.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public Collection<CommittedBundle<ImpulseShard>> getInitialInputs(
    AppliedPTransform<PBegin, PCollection<byte[]>, PTransform<PBegin, PCollection<byte[]>>>
        transform,
    int targetParallelism) {
  return Collections.singleton(
      ctxt.<ImpulseShard>createRootBundle()
          .add(WindowedValue.valueInGlobalWindow(new ImpulseShard()))
          .commit(BoundedWindow.TIMESTAMP_MIN_VALUE));
}
 
Example #26
Source File: BeamSampleSource.java    From component-runtime with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<JsonObject> expand(final PBegin input) {
    return input.apply(Create.of((Void) null)).apply(ParDo.of(new DoFn<Void, JsonObject>() {

        @ProcessElement
        public void processElement(final ProcessContext context) throws Exception {
            context
                    .output(jsonBuilderFactory
                            .createObjectBuilder()
                            .add(configuration.getColumnName(), configuration.getValue())
                            .build());
        }
    }));
}
 
Example #27
Source File: FlinkBatchTransformTranslators.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public void translateNode(
    PTransform<PBegin, PCollection<byte[]>> transform, FlinkBatchTranslationContext context) {
  String name = transform.getName();
  PCollection<byte[]> output = context.getOutput(transform);

  TypeInformation<WindowedValue<byte[]>> typeInformation = context.getTypeInfo(output);
  DataSource<WindowedValue<byte[]>> dataSource =
      new DataSource<>(
          context.getExecutionEnvironment(), new ImpulseInputFormat(), typeInformation, name);

  context.setOutputDataSet(output, dataSource);
}
 
Example #28
Source File: RedisIO.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<KV<String, String>> expand(PBegin input) {
  checkArgument(connectionConfiguration() != null, "withConnectionConfiguration() is required");

  return input
      .apply(Create.of(keyPattern()))
      .apply(ParDo.of(new ReadKeysWithPattern(connectionConfiguration())))
      .apply(
          RedisIO.readKeyPatterns()
              .withConnectionConfiguration(connectionConfiguration())
              .withBatchSize(batchSize())
              .withOutputParallelization(outputParallelization()));
}
 
Example #29
Source File: SimpleRecordFormatCsvIO.java    From components with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<IndexedRecord> read(PBegin in) {
    boolean isGSFileSystem = false;
    
    PCollection<?> pc2;
    if (path.startsWith("gs://")) {
        isGSFileSystem = true;
        pc2 = in.apply(TextIO.read().from(path));
    } else {
        CsvHdfsFileSource source = CsvHdfsFileSource.of(doAs, path, recordDelimiter, this.maxRowSize, encoding, header, textEnclosure, escapeChar);
        source.getExtraHadoopConfiguration().addFrom(getExtraHadoopConfiguration());

        source.setLimit(limit);

        PCollection<KV<org.apache.hadoop.io.LongWritable, BytesWritable>> pc1 = in.apply(Read.from(source));

        pc2 = pc1.apply(Values.<BytesWritable> create());
    }

    Character te = null;
    if(this.textEnclosure!=null && !this.textEnclosure.isEmpty()) {
        te = this.textEnclosure.charAt(0);
    }
    
    Character ec = null;
    if(this.escapeChar!=null && !this.escapeChar.isEmpty()) {
        ec = this.escapeChar.charAt(0);
    }
    
    PCollection<IndexedRecord> pc3 = pc2.apply(ParDo.of(new ExtractCsvRecord<>(fieldDelimiter.charAt(0), isGSFileSystem, encoding, te, ec)));
    return pc3;
}
 
Example #30
Source File: ImportTransform.java    From DataflowTemplates with Apache License 2.0 5 votes vote down vote up
@Override
public PCollection<Export> expand(PBegin input) {
  NestedValueProvider<String, String> manifestFile =
      NestedValueProvider.of(importDirectory, s -> GcsUtil.joinPath(s, "spanner-export.json"));
  return input
      .apply("Read manifest", FileIO.match().filepattern(manifestFile))
      .apply(
          "Resource id",
          MapElements.into(TypeDescriptor.of(ResourceId.class))
              .via((MatchResult.Metadata::resourceId)))
      .apply(
          "Read manifest json",
          MapElements.into(TypeDescriptor.of(Export.class))
              .via(ReadExportManifestFile::readManifest));
}