Java Code Examples for org.apache.beam.sdk.values.TypeDescriptor

The following examples show how to use org.apache.beam.sdk.values.TypeDescriptor. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: beam   Source File: SchemaRegistry.java    License: Apache License 2.0 6 votes vote down vote up
@Nullable
@Override
public <T> Schema schemaFor(TypeDescriptor<T> typeDescriptor) {
  TypeDescriptor<?> type = typeDescriptor;
  do {
    SchemaProvider schemaProvider = providers.get(type);
    if (schemaProvider != null) {
      return schemaProvider.schemaFor(type);
    }
    Class<?> superClass = type.getRawType().getSuperclass();
    if (superClass == null || superClass.equals(Object.class)) {
      return null;
    }
    type = TypeDescriptor.of(superClass);
  } while (true);
}
 
Example 2
Source Project: beam   Source File: FieldValueTypeInformation.java    License: Apache License 2.0 6 votes vote down vote up
@Nullable
static FieldValueTypeInformation getIterableComponentType(TypeDescriptor valueType) {
  // TODO: Figure out nullable elements.
  TypeDescriptor componentType = ReflectUtils.getIterableComponentType(valueType);
  if (componentType == null) {
    return null;
  }

  return new AutoValue_FieldValueTypeInformation.Builder()
      .setName("")
      .setNullable(false)
      .setType(componentType)
      .setRawType(componentType.getRawType())
      .setElementType(getIterableComponentType(componentType))
      .setMapKeyType(getMapKeyType(componentType))
      .setMapValueType(getMapValueType(componentType))
      .setOneOfTypes(Collections.emptyMap())
      .build();
}
 
Example 3
Source Project: beam   Source File: DoFnSignaturesSplittableDoFnTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSplitRestrictionWrongArgumentType() throws Exception {
  thrown.expectMessage("Object is not a valid context parameter.");
  DoFnSignatures.analyzeSplitRestrictionMethod(
      errors(),
      TypeDescriptor.of(FakeDoFn.class),
      new AnonymousMethod() {
        private void method(
            @Element Integer element,
            @Restriction SomeRestriction restriction,
            DoFn.OutputReceiver<SomeRestriction> receiver,
            Object extra) {}
      }.getMethod(),
      TypeDescriptor.of(Integer.class),
      TypeDescriptor.of(String.class),
      TypeDescriptor.of(SomeRestriction.class),
      FnAnalysisContext.create());
}
 
Example 4
Source Project: beam   Source File: WithKeysTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
@Category(NeedsRunner.class)
public void withLambdaAndTypeDescriptorShouldSucceed() {

  PCollection<String> values = p.apply(Create.of("1234", "3210", "0", "-12"));
  PCollection<KV<Integer, String>> kvs =
      values.apply(
          WithKeys.of((SerializableFunction<String, Integer>) Integer::valueOf)
              .withKeyType(TypeDescriptor.of(Integer.class)));

  PAssert.that(kvs)
      .containsInAnyOrder(
          KV.of(1234, "1234"), KV.of(0, "0"), KV.of(-12, "-12"), KV.of(3210, "3210"));

  p.run();
}
 
Example 5
Source Project: beam   Source File: DoFnSignaturesSplittableDoFnTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testNewTrackerUnsupportedSchemaElementArgument() throws Exception {
  thrown.expectMessage(
      "Schema @Element are not supported for @NewTracker method. Found String, did you mean to use Integer?");
  DoFnSignatures.analyzeNewTrackerMethod(
      errors(),
      TypeDescriptor.of(FakeDoFn.class),
      new AnonymousMethod() {
        SomeRestrictionTracker method(
            @Element String element, @Restriction SomeRestriction restriction) {
          return null;
        }
      }.getMethod(),
      TypeDescriptor.of(Integer.class),
      TypeDescriptor.of(String.class),
      TypeDescriptor.of(SomeRestriction.class),
      FnAnalysisContext.create());
}
 
Example 6
Source Project: DataflowTemplates   Source File: BigQueryMerger.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public PCollection<Void> expand(PCollection<MergeInfo> input) {
  final MergeStatementBuilder mergeBuilder = new MergeStatementBuilder(mergeConfiguration);
  return input
      .apply(
          MapElements.into(
              TypeDescriptors.kvs(
                  TypeDescriptors.strings(), TypeDescriptor.of(MergeInfo.class)))
              .via(mergeInfo -> KV.of(mergeInfo.getReplicaTable(), mergeInfo)))
      .apply(new TriggerPerKeyOnFixedIntervals<String, MergeInfo>(windowDuration))
      .apply(Values.create())
      .apply(MapElements.into(TypeDescriptors.strings()).via(mergeInfo -> {
        return mergeBuilder.buildMergeStatement(
            mergeInfo.getReplicaTable(),
            mergeInfo.getStagingTable(),
            mergeInfo.getAllPkFields(),
            mergeInfo.getAllFields());
      }))
      .apply(ParDo.of(new BigQueryStatementIssuingFn(this.testBigQueryClient)))
      .apply(
          MapElements.into(TypeDescriptors.voids())
              .via(
                  whatever ->
                      (Void) null)); // TODO(pabloem) Remove this line and find a return type
}
 
Example 7
Source Project: beam   Source File: HadoopFormatIOWriteTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testWritingDataFailInvalidValueType() {

  conf.set(HadoopFormatIO.OUTPUT_DIR, tmpFolder.getRoot().getAbsolutePath());
  List<KV<Text, Text>> data = new ArrayList<>();
  data.add(KV.of(new Text("key"), new Text("value")));
  TypeDescriptor<Text> textTypeDescriptor = new TypeDescriptor<Text>() {};
  PCollection<KV<Text, Text>> input =
      p.apply(Create.of(data))
          .setTypeDescriptor(TypeDescriptors.kvs(textTypeDescriptor, textTypeDescriptor));

  thrown.expect(Pipeline.PipelineExecutionException.class);
  thrown.expectMessage(Text.class.getName());

  input.apply(
      "Write",
      HadoopFormatIO.<Text, Text>write()
          .withConfiguration(conf)
          .withPartitioning()
          .withExternalSynchronization(new HDFSSynchronization(getLocksDirPath())));

  p.run().waitUntilFinish();
}
 
Example 8
Source Project: beam   Source File: FieldTypeDescriptors.java    License: Apache License 2.0 6 votes vote down vote up
/** Get a {@link FieldType} from a {@link TypeDescriptor}. */
public static FieldType fieldTypeForJavaType(TypeDescriptor typeDescriptor) {
  // TODO: Convert for registered logical types.
  if (typeDescriptor.isArray()
      || typeDescriptor.isSubtypeOf(TypeDescriptor.of(Collection.class))) {
    return getArrayFieldType(typeDescriptor);
  } else if (typeDescriptor.isSubtypeOf(TypeDescriptor.of(Map.class))) {
    return getMapFieldType(typeDescriptor);
  } else if (typeDescriptor.isSubtypeOf(TypeDescriptor.of(Iterable.class))) {
    return getIterableFieldType(typeDescriptor);
  } else if (typeDescriptor.isSubtypeOf(TypeDescriptor.of(Row.class))) {
    throw new IllegalArgumentException(
        "Cannot automatically determine a field type from a Row class"
            + " as we cannot determine the schema. You should set a field type explicitly.");
  } else {
    TypeName typeName = PRIMITIVE_MAPPING.inverse().get(typeDescriptor);
    if (typeName == null) {
      throw new RuntimeException("Couldn't find field type for " + typeDescriptor);
    }
    return FieldType.of(typeName);
  }
}
 
Example 9
Source Project: beam   Source File: BeamSqlDslArrayTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testUnnestLiteral() {
  PCollection<Row> input =
      pipeline.apply(
          "boundedInput1",
          Create.empty(TypeDescriptor.of(Row.class)).withRowSchema(INPUT_SCHEMA));

  // Because we have a multi-part FROM the DSL considers it multi-input
  TupleTag<Row> mainTag = new TupleTag<Row>("main") {};
  PCollectionTuple inputTuple = PCollectionTuple.of(mainTag, input);

  Schema resultType = Schema.builder().addStringField("f_string").build();

  PCollection<Row> result =
      inputTuple.apply(
          "sqlQuery", SqlTransform.query("SELECT * FROM UNNEST (ARRAY ['a', 'b', 'c'])"));

  PAssert.that(result)
      .containsInAnyOrder(
          Row.withSchema(resultType).addValues("a").build(),
          Row.withSchema(resultType).addValues("b").build(),
          Row.withSchema(resultType).addValues("c").build());

  pipeline.run();
}
 
Example 10
Source Project: beam   Source File: ParDo.java    License: Apache License 2.0 6 votes vote down vote up
private static void validateSideInputTypes(
    Map<String, PCollectionView<?>> sideInputs, DoFn<?, ?> fn) {
  DoFnSignature signature = DoFnSignatures.getSignature(fn.getClass());
  DoFnSignature.ProcessElementMethod processElementMethod = signature.processElement();
  for (SideInputParameter sideInput : processElementMethod.getSideInputParameters()) {
    PCollectionView<?> view = sideInputs.get(sideInput.sideInputId());
    checkArgument(
        view != null,
        "the ProcessElement method expects a side input identified with the tag %s, but no such side input was"
            + " supplied. Use withSideInput(String, PCollectionView) to supply this side input.",
        sideInput.sideInputId());
    TypeDescriptor<?> viewType = view.getViewFn().getTypeDescriptor();

    // Currently check that the types exactly match, even if the types are convertible.
    checkArgument(
        viewType.equals(sideInput.elementT()),
        "Side Input with tag %s and type %s cannot be bound to ProcessElement parameter with type %s",
        sideInput.sideInputId(),
        viewType,
        sideInput.elementT());
  }
}
 
Example 11
Source Project: beam   Source File: CoderProviders.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public <T> Coder<T> coderFor(TypeDescriptor<T> type, List<? extends Coder<?>> componentCoders)
    throws CannotProvideCoderException {
  if (!this.rawType.equals(type.getRawType())) {
    throw new CannotProvideCoderException(
        String.format(
            "Unable to provide coder for %s, this factory can only provide coders for %s",
            type, this.rawType));
  }
  try {
    return (Coder) factoryMethod.invoke(null /* static */, componentCoders.toArray());
  } catch (IllegalAccessException
      | IllegalArgumentException
      | InvocationTargetException
      | NullPointerException
      | ExceptionInInitializerError exn) {
    throw new IllegalStateException(
        "error when invoking Coder factory method " + factoryMethod, exn);
  }
}
 
Example 12
Source Project: beam   Source File: DoFnSignaturesTestUtils.java    License: Apache License 2.0 5 votes vote down vote up
static DoFnSignature.ProcessElementMethod analyzeProcessElementMethod(AnonymousMethod method)
    throws Exception {
  return DoFnSignatures.analyzeProcessElementMethod(
      errors(),
      TypeDescriptor.of(FakeDoFn.class),
      method.getMethod(),
      TypeDescriptor.of(Integer.class),
      TypeDescriptor.of(String.class),
      FnAnalysisContext.create());
}
 
Example 13
Source Project: beam   Source File: AvroCoder.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <T> Coder<T> coderFor(
    TypeDescriptor<T> typeDescriptor, List<? extends Coder<?>> componentCoders)
    throws CannotProvideCoderException {
  try {
    return AvroCoder.of(typeDescriptor);
  } catch (AvroRuntimeException e) {
    throw new CannotProvideCoderException(
        String.format("%s is not compatible with Avro", typeDescriptor), e);
  }
}
 
Example 14
Source Project: beam   Source File: ReduceByKey.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <T> ValueByReduceByBuilder<InputT, T, InputT> keyBy(
    UnaryFunction<InputT, T> keyExtractor, @Nullable TypeDescriptor<T> keyType) {
  @SuppressWarnings("unchecked")
  final Builder<InputT, T, InputT, ?, ?> cast = (Builder) this;
  cast.keyExtractor = requireNonNull(keyExtractor);
  cast.keyType = keyType;
  return cast;
}
 
Example 15
Source Project: beam   Source File: AvroUtils.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Returns an {@code SchemaCoder} instance for the Avro schema. The implicit type is
 * GenericRecord.
 */
public static SchemaCoder<GenericRecord> schemaCoder(org.apache.avro.Schema schema) {
  Schema beamSchema = toBeamSchema(schema);
  return SchemaCoder.of(
      beamSchema,
      TypeDescriptor.of(GenericRecord.class),
      getGenericRecordToRowFunction(beamSchema),
      getRowToGenericRecordFunction(schema));
}
 
Example 16
Source Project: beam   Source File: CoderProvidersTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Checks that {#link CoderProviders.fromStaticMethods} successfully builds a working {@link
 * CoderProvider} from {@link IterableCoder IterableCoder.class}.
 */
@Test
public void testIterableCoderProvider() throws Exception {
  TypeDescriptor<Iterable<Double>> type = TypeDescriptors.iterables(TypeDescriptors.doubles());
  CoderProvider iterableCoderProvider =
      CoderProviders.fromStaticMethods(Iterable.class, IterableCoder.class);

  assertEquals(
      IterableCoder.of(DoubleCoder.of()),
      iterableCoderProvider.coderFor(type, Arrays.asList(DoubleCoder.of())));
}
 
Example 17
Source Project: beam   Source File: ReduceByKeyTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testReduceWithoutWindowing() {
  execute(
      new AbstractTestCase<String, KV<String, Long>>() {

        @Override
        protected List<String> getInput() {
          String[] words =
              "one two three four one two three four one two three one two one".split(" ");
          return Arrays.asList(words);
        }

        @Override
        protected TypeDescriptor<String> getInputType() {
          return TypeDescriptors.strings();
        }

        @Override
        public List<KV<String, Long>> getUnorderedOutput() {
          return Arrays.asList(
              KV.of("one", 5L), KV.of("two", 4L), KV.of("three", 3L), KV.of("four", 2L));
        }

        @Override
        protected PCollection<KV<String, Long>> getOutput(PCollection<String> input) {
          return ReduceByKey.of(input)
              .keyBy(e -> e, TypeDescriptor.of(String.class))
              .valueBy(e -> 1L, TypeDescriptor.of(Long.class))
              .combineBy(Sums.ofLongs())
              .output();
        }
      });
}
 
Example 18
Source Project: deployment-examples   Source File: StatefulTeamScore.java    License: MIT License 5 votes vote down vote up
public static void main(String[] args) throws Exception {

    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    // Enforce that this pipeline is always run in streaming mode.
    options.setStreaming(true);
    ExampleUtils exampleUtils = new ExampleUtils(options);
    Pipeline pipeline = Pipeline.create(options);

    pipeline
        // Read game events from Pub/Sub using custom timestamps, which are extracted from the
        // pubsub data elements, and parse the data.
        .apply(
            PubsubIO.readStrings()
                .withTimestampAttribute(GameConstants.TIMESTAMP_ATTRIBUTE)
                .fromTopic(options.getTopic()))
        .apply("ParseGameEvent", ParDo.of(new ParseEventFn()))
        // Create <team, GameActionInfo> mapping. UpdateTeamScore uses team name as key.
        .apply(
            "MapTeamAsKey",
            MapElements.into(
                    TypeDescriptors.kvs(
                        TypeDescriptors.strings(), TypeDescriptor.of(GameActionInfo.class)))
                .via((GameActionInfo gInfo) -> KV.of(gInfo.team, gInfo)))
        // Outputs a team's score every time it passes a new multiple of the threshold.
        .apply("UpdateTeamScore", ParDo.of(new UpdateTeamScoreFn(options.getThresholdScore())))
        // Write the results to BigQuery.
        .apply(
            "WriteTeamLeaders",
            new WriteWindowedToBigQuery<>(
                options.as(GcpOptions.class).getProject(),
                options.getDataset(),
                options.getLeaderBoardTableName() + "_team_leader",
                configureCompleteWindowedTableWrite()));

    // Run the pipeline and wait for the pipeline to finish; capture cancellation requests from the
    // command line.
    PipelineResult result = pipeline.run();
    exampleUtils.waitToFinish(result);
  }
 
Example 19
Source Project: beam   Source File: DistinctTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
@Category(NeedsRunner.class)
public void withLambdaRepresentativeValuesFnAndTypeDescriptorShouldApplyFn() {

  PCollection<String> dupes =
      p.apply(Create.of("foo", "foos", "barbaz", "barbaz", "bazbar", "foo"));
  PCollection<String> deduped =
      dupes.apply(
          Distinct.withRepresentativeValueFn(String::length)
              .withRepresentativeType(TypeDescriptor.of(Integer.class)));

  PAssert.that(deduped)
      .satisfies(
          (Iterable<String> strs) -> {
            Multimap<Integer, String> predupedContents = HashMultimap.create();
            predupedContents.put(3, "foo");
            predupedContents.put(4, "foos");
            predupedContents.put(6, "barbaz");
            predupedContents.put(6, "bazbar");

            Set<Integer> seenLengths = new HashSet<>();
            for (String s : strs) {
              assertThat(predupedContents.values(), hasItem(s));
              assertThat(seenLengths, not(contains(s.length())));
              seenLengths.add(s.length());
            }
            return null;
          });

  p.run();
}
 
Example 20
Source Project: beam   Source File: BigQueryIO.java    License: Apache License 2.0 5 votes vote down vote up
/** Like {@link #readTableRows()} but with {@link Schema} support. */
public static TypedRead<TableRow> readTableRowsWithSchema() {
  return read(new TableRowParser())
      .withCoder(TableRowJsonCoder.of())
      .withBeamRowConverters(
          TypeDescriptor.of(TableRow.class),
          BigQueryUtils.tableRowToBeamRow(),
          BigQueryUtils.tableRowFromBeamRow());
}
 
Example 21
Source Project: beam   Source File: ProtoMessageSchemaTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testOneOfRowToProto() {
  SerializableFunction<Row, OneOf> fromRow =
      new ProtoMessageSchema().fromRowFunction(TypeDescriptor.of(OneOf.class));
  assertEquals(ONEOF_PROTO_INT32, fromRow.apply(ONEOF_ROW_INT32));
  assertEquals(ONEOF_PROTO_BOOL, fromRow.apply(ONEOF_ROW_BOOL));
  assertEquals(ONEOF_PROTO_STRING, fromRow.apply(ONEOF_ROW_STRING));
  assertEquals(ONEOF_PROTO_PRIMITIVE, fromRow.apply(ONEOF_ROW_PRIMITIVE));
}
 
Example 22
Source Project: beam   Source File: ProtoMessageSchemaTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testOneOfProtoToRow() {
  SerializableFunction<OneOf, Row> toRow =
      new ProtoMessageSchema().toRowFunction(TypeDescriptor.of(OneOf.class));
  assertEquals(ONEOF_ROW_INT32, toRow.apply(ONEOF_PROTO_INT32));
  assertEquals(ONEOF_ROW_BOOL, toRow.apply(ONEOF_PROTO_BOOL));
  assertEquals(ONEOF_ROW_STRING, toRow.apply(ONEOF_PROTO_STRING));
  assertEquals(ONEOF_ROW_PRIMITIVE, toRow.apply(ONEOF_PROTO_PRIMITIVE));
}
 
Example 23
Source Project: beam   Source File: SchemaRegistryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <T> Schema schemaFor(TypeDescriptor<T> typeDescriptor) {
  if (typeDescriptor.equals(TypeDescriptor.of(TestSchemaClass.class))) {
    return EMPTY_SCHEMA;
  }
  return null;
}
 
Example 24
Source Project: beam   Source File: Create.java    License: Apache License 2.0 5 votes vote down vote up
private TimestampedValues(
    Iterable<TimestampedValue<T>> timestampedElements,
    Optional<Coder<T>> elementCoder,
    Optional<TypeDescriptor<T>> typeDescriptor) {
  this.timestampedElements = timestampedElements;
  this.elementCoder = elementCoder;
  this.typeDescriptor = typeDescriptor;
}
 
Example 25
Source Project: beam   Source File: SchemaRegistryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public <T> SerializableFunction<T, Row> toRowFunction(TypeDescriptor<T> typeDescriptor) {
  if (typeDescriptor.equals(TypeDescriptor.of(TestSchemaClass.class))) {
    return v -> Row.withSchema(EMPTY_SCHEMA).build();
  }
  return null;
}
 
Example 26
Source Project: beam   Source File: StatefulTeamScore.java    License: Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    // Enforce that this pipeline is always run in streaming mode.
    options.setStreaming(true);
    ExampleUtils exampleUtils = new ExampleUtils(options);
    Pipeline pipeline = Pipeline.create(options);

    pipeline
        // Read game events from Pub/Sub using custom timestamps, which are extracted from the
        // pubsub data elements, and parse the data.
        .apply(
            PubsubIO.readStrings()
                .withTimestampAttribute(GameConstants.TIMESTAMP_ATTRIBUTE)
                .fromTopic(options.getTopic()))
        .apply("ParseGameEvent", ParDo.of(new ParseEventFn()))
        // Create <team, GameActionInfo> mapping. UpdateTeamScore uses team name as key.
        .apply(
            "MapTeamAsKey",
            MapElements.into(
                    TypeDescriptors.kvs(
                        TypeDescriptors.strings(), TypeDescriptor.of(GameActionInfo.class)))
                .via((GameActionInfo gInfo) -> KV.of(gInfo.team, gInfo)))
        // Outputs a team's score every time it passes a new multiple of the threshold.
        .apply("UpdateTeamScore", ParDo.of(new UpdateTeamScoreFn(options.getThresholdScore())))
        // Write the results to BigQuery.
        .apply(
            "WriteTeamLeaders",
            new WriteWindowedToBigQuery<>(
                options.as(GcpOptions.class).getProject(),
                options.getDataset(),
                options.getLeaderBoardTableName() + "_team_leader",
                configureCompleteWindowedTableWrite()));

    // Run the pipeline and wait for the pipeline to finish; capture cancellation requests from the
    // command line.
    PipelineResult result = pipeline.run();
    exampleUtils.waitToFinish(result);
  }
 
Example 27
Source Project: beam   Source File: BigQueryTornadoesTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
@Category(ValidatesRunner.class)
public void testEmpty() {
  PCollection<KV<Integer, Long>> inputs =
      p.apply(Create.empty(new TypeDescriptor<KV<Integer, Long>>() {}));
  PCollection<TableRow> result = inputs.apply(ParDo.of(new FormatCountsFn()));
  PAssert.that(result).empty();
  p.run().waitUntilFinish();
}
 
Example 28
Source Project: beam   Source File: ParDoSchemaTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
@Category(NeedsRunner.class)
public void testUnmatchedSchema() {
  List<MyPojo> pojoList =
      Lists.newArrayList(new MyPojo("a", 1), new MyPojo("b", 2), new MyPojo("c", 3));

  Schema schema =
      Schema.builder().addStringField("string_field").addInt32Field("integer_field").build();

  thrown.expect(IllegalArgumentException.class);
  pipeline
      .apply(
          Create.of(pojoList)
              .withSchema(
                  schema,
                  TypeDescriptor.of(MyPojo.class),
                  o -> Row.withSchema(schema).addValues(o.stringField, o.integerField).build(),
                  r -> new MyPojo(r.getString("string_field"), r.getInt32("integer_field"))))
      .apply(
          ParDo.of(
              new DoFn<MyPojo, Void>() {
                @FieldAccess("a")
                FieldAccessDescriptor fieldAccess = FieldAccessDescriptor.withFieldNames("baad");

                @ProcessElement
                public void process(@FieldAccess("a") Row row) {}
              }));
  pipeline.run();
}
 
Example 29
Source Project: beam   Source File: CoderRegistryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testTypeOverSpecifiedWithMultipleCoders() throws Exception {
  thrown.expect(CannotProvideCoderException.class);
  thrown.expectMessage("type is over specified");
  CoderRegistry.createDefault()
      .getCoder(
          new TypeDescriptor<Integer>() {},
          new TypeDescriptor<KV<Integer, Integer>>() {},
          KvCoder.of(BigEndianIntegerCoder.of(), VarIntCoder.of()));
}
 
Example 30
Source Project: beam   Source File: AvroUtils.java    License: Apache License 2.0 5 votes vote down vote up
/** Get generated getters for an AVRO-generated SpecificRecord or a POJO. */
public static <T> List<FieldValueGetter> getGetters(Class<T> clazz, Schema schema) {
  if (TypeDescriptor.of(clazz).isSubtypeOf(TypeDescriptor.of(SpecificRecord.class))) {
    return JavaBeanUtils.getGetters(
        clazz,
        schema,
        new AvroSpecificRecordFieldValueTypeSupplier(),
        new AvroTypeConversionFactory());
  } else {
    return POJOUtils.getGetters(
        clazz, schema, new AvroPojoFieldValueTypeSupplier(), new AvroTypeConversionFactory());
  }
}