org.apache.beam.sdk.values.TypeDescriptor Java Examples

The following examples show how to use org.apache.beam.sdk.values.TypeDescriptor. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FieldValueTypeInformation.java    From beam with Apache License 2.0 6 votes vote down vote up
@Nullable
static FieldValueTypeInformation getIterableComponentType(TypeDescriptor valueType) {
  // TODO: Figure out nullable elements.
  TypeDescriptor componentType = ReflectUtils.getIterableComponentType(valueType);
  if (componentType == null) {
    return null;
  }

  return new AutoValue_FieldValueTypeInformation.Builder()
      .setName("")
      .setNullable(false)
      .setType(componentType)
      .setRawType(componentType.getRawType())
      .setElementType(getIterableComponentType(componentType))
      .setMapKeyType(getMapKeyType(componentType))
      .setMapValueType(getMapValueType(componentType))
      .setOneOfTypes(Collections.emptyMap())
      .build();
}
 
Example #2
Source File: DoFnSignaturesSplittableDoFnTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testNewTrackerUnsupportedSchemaElementArgument() throws Exception {
  thrown.expectMessage(
      "Schema @Element are not supported for @NewTracker method. Found String, did you mean to use Integer?");
  DoFnSignatures.analyzeNewTrackerMethod(
      errors(),
      TypeDescriptor.of(FakeDoFn.class),
      new AnonymousMethod() {
        SomeRestrictionTracker method(
            @Element String element, @Restriction SomeRestriction restriction) {
          return null;
        }
      }.getMethod(),
      TypeDescriptor.of(Integer.class),
      TypeDescriptor.of(String.class),
      TypeDescriptor.of(SomeRestriction.class),
      FnAnalysisContext.create());
}
 
Example #3
Source File: CoderProviders.java    From beam with Apache License 2.0 6 votes vote down vote up
@Override
public <T> Coder<T> coderFor(TypeDescriptor<T> type, List<? extends Coder<?>> componentCoders)
    throws CannotProvideCoderException {
  if (!this.rawType.equals(type.getRawType())) {
    throw new CannotProvideCoderException(
        String.format(
            "Unable to provide coder for %s, this factory can only provide coders for %s",
            type, this.rawType));
  }
  try {
    return (Coder) factoryMethod.invoke(null /* static */, componentCoders.toArray());
  } catch (IllegalAccessException
      | IllegalArgumentException
      | InvocationTargetException
      | NullPointerException
      | ExceptionInInitializerError exn) {
    throw new IllegalStateException(
        "error when invoking Coder factory method " + factoryMethod, exn);
  }
}
 
Example #4
Source File: WithKeysTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
@Category(NeedsRunner.class)
public void withLambdaAndTypeDescriptorShouldSucceed() {

  PCollection<String> values = p.apply(Create.of("1234", "3210", "0", "-12"));
  PCollection<KV<Integer, String>> kvs =
      values.apply(
          WithKeys.of((SerializableFunction<String, Integer>) Integer::valueOf)
              .withKeyType(TypeDescriptor.of(Integer.class)));

  PAssert.that(kvs)
      .containsInAnyOrder(
          KV.of(1234, "1234"), KV.of(0, "0"), KV.of(-12, "-12"), KV.of(3210, "3210"));

  p.run();
}
 
Example #5
Source File: HadoopFormatIOWriteTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testWritingDataFailInvalidValueType() {

  conf.set(HadoopFormatIO.OUTPUT_DIR, tmpFolder.getRoot().getAbsolutePath());
  List<KV<Text, Text>> data = new ArrayList<>();
  data.add(KV.of(new Text("key"), new Text("value")));
  TypeDescriptor<Text> textTypeDescriptor = new TypeDescriptor<Text>() {};
  PCollection<KV<Text, Text>> input =
      p.apply(Create.of(data))
          .setTypeDescriptor(TypeDescriptors.kvs(textTypeDescriptor, textTypeDescriptor));

  thrown.expect(Pipeline.PipelineExecutionException.class);
  thrown.expectMessage(Text.class.getName());

  input.apply(
      "Write",
      HadoopFormatIO.<Text, Text>write()
          .withConfiguration(conf)
          .withPartitioning()
          .withExternalSynchronization(new HDFSSynchronization(getLocksDirPath())));

  p.run().waitUntilFinish();
}
 
Example #6
Source File: ParDo.java    From beam with Apache License 2.0 6 votes vote down vote up
private static void validateSideInputTypes(
    Map<String, PCollectionView<?>> sideInputs, DoFn<?, ?> fn) {
  DoFnSignature signature = DoFnSignatures.getSignature(fn.getClass());
  DoFnSignature.ProcessElementMethod processElementMethod = signature.processElement();
  for (SideInputParameter sideInput : processElementMethod.getSideInputParameters()) {
    PCollectionView<?> view = sideInputs.get(sideInput.sideInputId());
    checkArgument(
        view != null,
        "the ProcessElement method expects a side input identified with the tag %s, but no such side input was"
            + " supplied. Use withSideInput(String, PCollectionView) to supply this side input.",
        sideInput.sideInputId());
    TypeDescriptor<?> viewType = view.getViewFn().getTypeDescriptor();

    // Currently check that the types exactly match, even if the types are convertible.
    checkArgument(
        viewType.equals(sideInput.elementT()),
        "Side Input with tag %s and type %s cannot be bound to ProcessElement parameter with type %s",
        sideInput.sideInputId(),
        viewType,
        sideInput.elementT());
  }
}
 
Example #7
Source File: DoFnSignaturesSplittableDoFnTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testSplitRestrictionWrongArgumentType() throws Exception {
  thrown.expectMessage("Object is not a valid context parameter.");
  DoFnSignatures.analyzeSplitRestrictionMethod(
      errors(),
      TypeDescriptor.of(FakeDoFn.class),
      new AnonymousMethod() {
        private void method(
            @Element Integer element,
            @Restriction SomeRestriction restriction,
            DoFn.OutputReceiver<SomeRestriction> receiver,
            Object extra) {}
      }.getMethod(),
      TypeDescriptor.of(Integer.class),
      TypeDescriptor.of(String.class),
      TypeDescriptor.of(SomeRestriction.class),
      FnAnalysisContext.create());
}
 
Example #8
Source File: FieldTypeDescriptors.java    From beam with Apache License 2.0 6 votes vote down vote up
/** Get a {@link FieldType} from a {@link TypeDescriptor}. */
public static FieldType fieldTypeForJavaType(TypeDescriptor typeDescriptor) {
  // TODO: Convert for registered logical types.
  if (typeDescriptor.isArray()
      || typeDescriptor.isSubtypeOf(TypeDescriptor.of(Collection.class))) {
    return getArrayFieldType(typeDescriptor);
  } else if (typeDescriptor.isSubtypeOf(TypeDescriptor.of(Map.class))) {
    return getMapFieldType(typeDescriptor);
  } else if (typeDescriptor.isSubtypeOf(TypeDescriptor.of(Iterable.class))) {
    return getIterableFieldType(typeDescriptor);
  } else if (typeDescriptor.isSubtypeOf(TypeDescriptor.of(Row.class))) {
    throw new IllegalArgumentException(
        "Cannot automatically determine a field type from a Row class"
            + " as we cannot determine the schema. You should set a field type explicitly.");
  } else {
    TypeName typeName = PRIMITIVE_MAPPING.inverse().get(typeDescriptor);
    if (typeName == null) {
      throw new RuntimeException("Couldn't find field type for " + typeDescriptor);
    }
    return FieldType.of(typeName);
  }
}
 
Example #9
Source File: BeamSqlDslArrayTest.java    From beam with Apache License 2.0 6 votes vote down vote up
@Test
public void testUnnestLiteral() {
  PCollection<Row> input =
      pipeline.apply(
          "boundedInput1",
          Create.empty(TypeDescriptor.of(Row.class)).withRowSchema(INPUT_SCHEMA));

  // Because we have a multi-part FROM the DSL considers it multi-input
  TupleTag<Row> mainTag = new TupleTag<Row>("main") {};
  PCollectionTuple inputTuple = PCollectionTuple.of(mainTag, input);

  Schema resultType = Schema.builder().addStringField("f_string").build();

  PCollection<Row> result =
      inputTuple.apply(
          "sqlQuery", SqlTransform.query("SELECT * FROM UNNEST (ARRAY ['a', 'b', 'c'])"));

  PAssert.that(result)
      .containsInAnyOrder(
          Row.withSchema(resultType).addValues("a").build(),
          Row.withSchema(resultType).addValues("b").build(),
          Row.withSchema(resultType).addValues("c").build());

  pipeline.run();
}
 
Example #10
Source File: SchemaRegistry.java    From beam with Apache License 2.0 6 votes vote down vote up
@Nullable
@Override
public <T> Schema schemaFor(TypeDescriptor<T> typeDescriptor) {
  TypeDescriptor<?> type = typeDescriptor;
  do {
    SchemaProvider schemaProvider = providers.get(type);
    if (schemaProvider != null) {
      return schemaProvider.schemaFor(type);
    }
    Class<?> superClass = type.getRawType().getSuperclass();
    if (superClass == null || superClass.equals(Object.class)) {
      return null;
    }
    type = TypeDescriptor.of(superClass);
  } while (true);
}
 
Example #11
Source File: BigQueryMerger.java    From DataflowTemplates with Apache License 2.0 6 votes vote down vote up
@Override
public PCollection<Void> expand(PCollection<MergeInfo> input) {
  final MergeStatementBuilder mergeBuilder = new MergeStatementBuilder(mergeConfiguration);
  return input
      .apply(
          MapElements.into(
              TypeDescriptors.kvs(
                  TypeDescriptors.strings(), TypeDescriptor.of(MergeInfo.class)))
              .via(mergeInfo -> KV.of(mergeInfo.getReplicaTable(), mergeInfo)))
      .apply(new TriggerPerKeyOnFixedIntervals<String, MergeInfo>(windowDuration))
      .apply(Values.create())
      .apply(MapElements.into(TypeDescriptors.strings()).via(mergeInfo -> {
        return mergeBuilder.buildMergeStatement(
            mergeInfo.getReplicaTable(),
            mergeInfo.getStagingTable(),
            mergeInfo.getAllPkFields(),
            mergeInfo.getAllFields());
      }))
      .apply(ParDo.of(new BigQueryStatementIssuingFn(this.testBigQueryClient)))
      .apply(
          MapElements.into(TypeDescriptors.voids())
              .via(
                  whatever ->
                      (Void) null)); // TODO(pabloem) Remove this line and find a return type
}
 
Example #12
Source File: DoFnSignaturesTestUtils.java    From beam with Apache License 2.0 5 votes vote down vote up
static DoFnSignature.ProcessElementMethod analyzeProcessElementMethod(AnonymousMethod method)
    throws Exception {
  return DoFnSignatures.analyzeProcessElementMethod(
      errors(),
      TypeDescriptor.of(FakeDoFn.class),
      method.getMethod(),
      TypeDescriptor.of(Integer.class),
      TypeDescriptor.of(String.class),
      FnAnalysisContext.create());
}
 
Example #13
Source File: AvroUtils.java    From beam with Apache License 2.0 5 votes vote down vote up
/** Get generated getters for an AVRO-generated SpecificRecord or a POJO. */
public static <T> List<FieldValueGetter> getGetters(Class<T> clazz, Schema schema) {
  if (TypeDescriptor.of(clazz).isSubtypeOf(TypeDescriptor.of(SpecificRecord.class))) {
    return JavaBeanUtils.getGetters(
        clazz,
        schema,
        new AvroSpecificRecordFieldValueTypeSupplier(),
        new AvroTypeConversionFactory());
  } else {
    return POJOUtils.getGetters(
        clazz, schema, new AvroPojoFieldValueTypeSupplier(), new AvroTypeConversionFactory());
  }
}
 
Example #14
Source File: AvroUtils.java    From beam with Apache License 2.0 5 votes vote down vote up
/**
 * Returns an {@code SchemaCoder} instance for the Avro schema. The implicit type is
 * GenericRecord.
 */
public static SchemaCoder<GenericRecord> schemaCoder(org.apache.avro.Schema schema) {
  Schema beamSchema = toBeamSchema(schema);
  return SchemaCoder.of(
      beamSchema,
      TypeDescriptor.of(GenericRecord.class),
      getGenericRecordToRowFunction(beamSchema),
      getRowToGenericRecordFunction(schema));
}
 
Example #15
Source File: CoderRegistryTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testParameterizedWildcardTypeIsUnknown() throws Exception {
  CoderRegistry registry = CoderRegistry.createDefault();
  TypeDescriptor<List<? extends MyValue>> wildcardUnknownToken =
      new TypeDescriptor<List<? extends MyValue>>() {};

  thrown.expect(CannotProvideCoderException.class);
  thrown.expectMessage(
      String.format(
          "Cannot provide coder for parameterized type %s: Cannot provide a coder for wildcard type %s.",
          wildcardUnknownToken,
          ((ParameterizedType) wildcardUnknownToken.getType()).getActualTypeArguments()[0]));
  registry.getCoder(wildcardUnknownToken);
}
 
Example #16
Source File: CoderProvidersTest.java    From beam with Apache License 2.0 5 votes vote down vote up
/**
 * Checks that {#link CoderProviders.fromStaticMethods} successfully builds a working {@link
 * CoderProvider} from {@link IterableCoder IterableCoder.class}.
 */
@Test
public void testIterableCoderProvider() throws Exception {
  TypeDescriptor<Iterable<Double>> type = TypeDescriptors.iterables(TypeDescriptors.doubles());
  CoderProvider iterableCoderProvider =
      CoderProviders.fromStaticMethods(Iterable.class, IterableCoder.class);

  assertEquals(
      IterableCoder.of(DoubleCoder.of()),
      iterableCoderProvider.coderFor(type, Arrays.asList(DoubleCoder.of())));
}
 
Example #17
Source File: DistinctTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
@Category(NeedsRunner.class)
public void withLambdaRepresentativeValuesFnAndTypeDescriptorShouldApplyFn() {

  PCollection<String> dupes =
      p.apply(Create.of("foo", "foos", "barbaz", "barbaz", "bazbar", "foo"));
  PCollection<String> deduped =
      dupes.apply(
          Distinct.withRepresentativeValueFn(String::length)
              .withRepresentativeType(TypeDescriptor.of(Integer.class)));

  PAssert.that(deduped)
      .satisfies(
          (Iterable<String> strs) -> {
            Multimap<Integer, String> predupedContents = HashMultimap.create();
            predupedContents.put(3, "foo");
            predupedContents.put(4, "foos");
            predupedContents.put(6, "barbaz");
            predupedContents.put(6, "bazbar");

            Set<Integer> seenLengths = new HashSet<>();
            for (String s : strs) {
              assertThat(predupedContents.values(), hasItem(s));
              assertThat(seenLengths, not(contains(s.length())));
              seenLengths.add(s.length());
            }
            return null;
          });

  p.run();
}
 
Example #18
Source File: HadoopFormatIO.java    From beam with Apache License 2.0 5 votes vote down vote up
/**
 * Returns the default coder for a given type descriptor. Coder Registry is queried for correct
 * coder, if not found in Coder Registry, then check if the type descriptor provided is of type
 * Writable, then WritableCoder is returned, else exception is thrown "Cannot find coder".
 */
@SuppressWarnings({"unchecked", "WeakerAccess"})
public <T> Coder<T> getDefaultCoder(TypeDescriptor<?> typeDesc, CoderRegistry coderRegistry) {
  Class classType = typeDesc.getRawType();
  try {
    return (Coder<T>) coderRegistry.getCoder(typeDesc);
  } catch (CannotProvideCoderException e) {
    if (Writable.class.isAssignableFrom(classType)) {
      return (Coder<T>) WritableCoder.of(classType);
    }
    throw new IllegalStateException(
        String.format("Cannot find coder for %s  : ", typeDesc) + e.getMessage(), e);
  }
}
 
Example #19
Source File: AvroCoder.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public <T> Coder<T> coderFor(
    TypeDescriptor<T> typeDescriptor, List<? extends Coder<?>> componentCoders)
    throws CannotProvideCoderException {
  try {
    return AvroCoder.of(typeDescriptor);
  } catch (AvroRuntimeException e) {
    throw new CannotProvideCoderException(
        String.format("%s is not compatible with Avro", typeDescriptor), e);
  }
}
 
Example #20
Source File: CoderRegistryTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testParameterizedDefaultListCoder() throws Exception {
  CoderRegistry registry = CoderRegistry.createDefault();
  TypeDescriptor<List<Integer>> listToken = new TypeDescriptor<List<Integer>>() {};
  assertEquals(ListCoder.of(VarIntCoder.of()), registry.getCoder(listToken));

  registry.registerCoderProvider(
      CoderProviders.fromStaticMethods(MyValue.class, MyValueCoder.class));
  TypeDescriptor<KV<String, List<MyValue>>> kvToken =
      new TypeDescriptor<KV<String, List<MyValue>>>() {};
  assertEquals(
      KvCoder.of(StringUtf8Coder.of(), ListCoder.of(MyValueCoder.of())),
      registry.getCoder(kvToken));
}
 
Example #21
Source File: BigQueryIO.java    From beam with Apache License 2.0 5 votes vote down vote up
/** Like {@link #readTableRows()} but with {@link Schema} support. */
public static TypedRead<TableRow> readTableRowsWithSchema() {
  return read(new TableRowParser())
      .withCoder(TableRowJsonCoder.of())
      .withBeamRowConverters(
          TypeDescriptor.of(TableRow.class),
          BigQueryUtils.tableRowToBeamRow(),
          BigQueryUtils.tableRowFromBeamRow());
}
 
Example #22
Source File: ProtoMessageSchemaTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testOneOfRowToProto() {
  SerializableFunction<Row, OneOf> fromRow =
      new ProtoMessageSchema().fromRowFunction(TypeDescriptor.of(OneOf.class));
  assertEquals(ONEOF_PROTO_INT32, fromRow.apply(ONEOF_ROW_INT32));
  assertEquals(ONEOF_PROTO_BOOL, fromRow.apply(ONEOF_ROW_BOOL));
  assertEquals(ONEOF_PROTO_STRING, fromRow.apply(ONEOF_ROW_STRING));
  assertEquals(ONEOF_PROTO_PRIMITIVE, fromRow.apply(ONEOF_ROW_PRIMITIVE));
}
 
Example #23
Source File: ProtoMessageSchemaTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testOneOfProtoToRow() {
  SerializableFunction<OneOf, Row> toRow =
      new ProtoMessageSchema().toRowFunction(TypeDescriptor.of(OneOf.class));
  assertEquals(ONEOF_ROW_INT32, toRow.apply(ONEOF_PROTO_INT32));
  assertEquals(ONEOF_ROW_BOOL, toRow.apply(ONEOF_PROTO_BOOL));
  assertEquals(ONEOF_ROW_STRING, toRow.apply(ONEOF_PROTO_STRING));
  assertEquals(ONEOF_ROW_PRIMITIVE, toRow.apply(ONEOF_PROTO_PRIMITIVE));
}
 
Example #24
Source File: SchemaRegistryTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public <T> Schema schemaFor(TypeDescriptor<T> typeDescriptor) {
  if (typeDescriptor.equals(TypeDescriptor.of(TestSchemaClass.class))) {
    return EMPTY_SCHEMA;
  }
  return null;
}
 
Example #25
Source File: CoderRegistryTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testTypeOverSpecifiedWithMultipleCoders() throws Exception {
  thrown.expect(CannotProvideCoderException.class);
  thrown.expectMessage("type is over specified");
  CoderRegistry.createDefault()
      .getCoder(
          new TypeDescriptor<Integer>() {},
          new TypeDescriptor<KV<Integer, Integer>>() {},
          KvCoder.of(BigEndianIntegerCoder.of(), VarIntCoder.of()));
}
 
Example #26
Source File: Create.java    From beam with Apache License 2.0 5 votes vote down vote up
private TimestampedValues(
    Iterable<TimestampedValue<T>> timestampedElements,
    Optional<Coder<T>> elementCoder,
    Optional<TypeDescriptor<T>> typeDescriptor) {
  this.timestampedElements = timestampedElements;
  this.elementCoder = elementCoder;
  this.typeDescriptor = typeDescriptor;
}
 
Example #27
Source File: SchemaRegistryTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Override
public <T> SerializableFunction<T, Row> toRowFunction(TypeDescriptor<T> typeDescriptor) {
  if (typeDescriptor.equals(TypeDescriptor.of(TestSchemaClass.class))) {
    return v -> Row.withSchema(EMPTY_SCHEMA).build();
  }
  return null;
}
 
Example #28
Source File: StatefulTeamScore.java    From beam with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    // Enforce that this pipeline is always run in streaming mode.
    options.setStreaming(true);
    ExampleUtils exampleUtils = new ExampleUtils(options);
    Pipeline pipeline = Pipeline.create(options);

    pipeline
        // Read game events from Pub/Sub using custom timestamps, which are extracted from the
        // pubsub data elements, and parse the data.
        .apply(
            PubsubIO.readStrings()
                .withTimestampAttribute(GameConstants.TIMESTAMP_ATTRIBUTE)
                .fromTopic(options.getTopic()))
        .apply("ParseGameEvent", ParDo.of(new ParseEventFn()))
        // Create <team, GameActionInfo> mapping. UpdateTeamScore uses team name as key.
        .apply(
            "MapTeamAsKey",
            MapElements.into(
                    TypeDescriptors.kvs(
                        TypeDescriptors.strings(), TypeDescriptor.of(GameActionInfo.class)))
                .via((GameActionInfo gInfo) -> KV.of(gInfo.team, gInfo)))
        // Outputs a team's score every time it passes a new multiple of the threshold.
        .apply("UpdateTeamScore", ParDo.of(new UpdateTeamScoreFn(options.getThresholdScore())))
        // Write the results to BigQuery.
        .apply(
            "WriteTeamLeaders",
            new WriteWindowedToBigQuery<>(
                options.as(GcpOptions.class).getProject(),
                options.getDataset(),
                options.getLeaderBoardTableName() + "_team_leader",
                configureCompleteWindowedTableWrite()));

    // Run the pipeline and wait for the pipeline to finish; capture cancellation requests from the
    // command line.
    PipelineResult result = pipeline.run();
    exampleUtils.waitToFinish(result);
  }
 
Example #29
Source File: BigQueryTornadoesTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
@Category(ValidatesRunner.class)
public void testEmpty() {
  PCollection<KV<Integer, Long>> inputs =
      p.apply(Create.empty(new TypeDescriptor<KV<Integer, Long>>() {}));
  PCollection<TableRow> result = inputs.apply(ParDo.of(new FormatCountsFn()));
  PAssert.that(result).empty();
  p.run().waitUntilFinish();
}
 
Example #30
Source File: ParDoSchemaTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
@Category(NeedsRunner.class)
public void testUnmatchedSchema() {
  List<MyPojo> pojoList =
      Lists.newArrayList(new MyPojo("a", 1), new MyPojo("b", 2), new MyPojo("c", 3));

  Schema schema =
      Schema.builder().addStringField("string_field").addInt32Field("integer_field").build();

  thrown.expect(IllegalArgumentException.class);
  pipeline
      .apply(
          Create.of(pojoList)
              .withSchema(
                  schema,
                  TypeDescriptor.of(MyPojo.class),
                  o -> Row.withSchema(schema).addValues(o.stringField, o.integerField).build(),
                  r -> new MyPojo(r.getString("string_field"), r.getInt32("integer_field"))))
      .apply(
          ParDo.of(
              new DoFn<MyPojo, Void>() {
                @FieldAccess("a")
                FieldAccessDescriptor fieldAccess = FieldAccessDescriptor.withFieldNames("baad");

                @ProcessElement
                public void process(@FieldAccess("a") Row row) {}
              }));
  pipeline.run();
}