Java Code Examples for org.apache.beam.sdk.transforms.DoFn#ProcessElement
The following examples show how to use
org.apache.beam.sdk.transforms.DoFn#ProcessElement .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LocalSpannerIO.java From DataflowTemplates with Apache License 2.0 | 6 votes |
@DoFn.ProcessElement public void processElement(ProcessContext c) { MutationGroup mg = c.element(); if (mg.primary().getOperation() == Op.DELETE && !isPointDelete(mg.primary())) { // Ranged deletes are not batchable. c.output(unbatchableMutationsTag, Arrays.asList(mg)); unBatchableMutationGroupsCounter.inc(); return; } SpannerSchema spannerSchema = c.sideInput(schemaView); long groupSize = MutationSizeEstimator.sizeOf(mg); long groupCells = MutationCellCounter.countOf(spannerSchema, mg); long groupRows = Iterables.size(mg); if (groupSize >= batchSizeBytes || groupCells >= maxNumMutations || groupRows >= maxNumRows) { c.output(unbatchableMutationsTag, Arrays.asList(mg)); unBatchableMutationGroupsCounter.inc(); } else { c.output(mg); batchableMutationGroupsCounter.inc(); } }
Example 2
Source File: DataStoreV1Table.java From beam with Apache License 2.0 | 6 votes |
@DoFn.ProcessElement public void processElement(ProcessContext context) { Row row = context.element(); Schema schemaWithoutKeyField = Schema.builder() .addFields( row.getSchema().getFields().stream() .filter(field -> !field.getName().equals(keyField)) .collect(Collectors.toList())) .build(); Entity.Builder entityBuilder = constructEntityFromRow(schemaWithoutKeyField, row); entityBuilder.setKey(constructKeyFromRow(row)); context.output(entityBuilder.build()); }
Example 3
Source File: DoFnInvokersTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testDoFnWithAllParameters() throws Exception { class MockFn extends DoFn<String, String> { @DoFn.ProcessElement public void processElement( ProcessContext c, @Element String element, @Timestamp Instant timestamp, IntervalWindow w, // PaneInfo p, OutputReceiver<String> receiver, MultiOutputReceiver multiReceiver) throws Exception {} } MockFn fn = mock(MockFn.class); assertEquals(stop(), invokeProcessElement(fn)); verify(fn) .processElement( mockProcessContext, mockElement, mockTimestamp, mockWindow, mockOutputReceiver, mockMultiOutputReceiver); }
Example 4
Source File: DoFnInvokersTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testDoFnWithReturn() throws Exception { class MockFn extends DoFn<String, String> { @DoFn.ProcessElement public ProcessContinuation processElement( ProcessContext c, RestrictionTracker<SomeRestriction, Void> tracker) throws Exception { return null; } @GetInitialRestriction public SomeRestriction getInitialRestriction(@Element String element) { return null; } @NewTracker public SomeRestrictionTracker newTracker(@Restriction SomeRestriction restriction) { return null; } } MockFn fn = mock(MockFn.class); when(fn.processElement(mockProcessContext, null)).thenReturn(resume()); assertEquals(resume(), invokeProcessElement(fn)); }
Example 5
Source File: SpannerIO.java From beam with Apache License 2.0 | 6 votes |
@DoFn.ProcessElement public void processElement(ProcessContext c) { MutationGroup mg = c.element(); if (mg.primary().getOperation() == Op.DELETE && !isPointDelete(mg.primary())) { // Ranged deletes are not batchable. c.output(unbatchableMutationsTag, Arrays.asList(mg)); unBatchableMutationGroupsCounter.inc(); return; } SpannerSchema spannerSchema = c.sideInput(schemaView); long groupSize = MutationSizeEstimator.sizeOf(mg); long groupCells = MutationCellCounter.countOf(spannerSchema, mg); long groupRows = Iterables.size(mg); if (groupSize >= batchSizeBytes || groupCells >= maxNumMutations || groupRows >= maxNumRows) { c.output(unbatchableMutationsTag, Arrays.asList(mg)); unBatchableMutationGroupsCounter.inc(); } else { c.output(mg); batchableMutationGroupsCounter.inc(); } }
Example 6
Source File: DataStoreV1Table.java From beam with Apache License 2.0 | 5 votes |
@DoFn.ProcessElement public void processElement(ProcessContext context) { Entity entity = context.element(); ImmutableMap.Builder<String, Value> mapBuilder = ImmutableMap.builder(); mapBuilder.put(keyField, makeValue(entity.getKey()).build()); mapBuilder.putAll(entity.getPropertiesMap()); context.output(extractRowFromProperties(schema, mapBuilder.build())); }
Example 7
Source File: MongoDbTable.java From beam with Apache License 2.0 | 5 votes |
@DoFn.ProcessElement public void processElement(ProcessContext context) { context.output( context .element() .toJson(JsonWriterSettings.builder().outputMode(JsonMode.RELAXED).build())); }
Example 8
Source File: KinesisInputRuntime.java From components with Apache License 2.0 | 5 votes |
@DoFn.ProcessElement public void processElement(ProcessContext c) throws IOException { if (schema == null) { schema = new Schema.Parser().parse(schemaStr); datumReader = new GenericDatumReader<GenericRecord>(schema); } decoder = DecoderFactory.get().binaryDecoder(c.element().getDataAsBytes(), decoder); GenericRecord record = datumReader.read(null, decoder); c.output(record); }
Example 9
Source File: BigQueryInputRuntime.java From components with Apache License 2.0 | 5 votes |
@DoFn.ProcessElement public void processElement(ProcessContext c) throws IOException { TableRow row = c.element(); if (row == null) { return; } if (converter == null) { converter = new BigQueryTableRowIndexedRecordConverter(); converter.setSchema(new Schema.Parser().parse(schemaStr)); } c.output(converter.convertToAvro(row)); }
Example 10
Source File: HadoopFormatIO.java From beam with Apache License 2.0 | 5 votes |
@DoFn.ProcessElement public void processElement( @DoFn.Element KV<KeyT, ValueT> element, OutputReceiver<KV<KeyT, ValueT>> receiver, BoundedWindow window, ProcessContext c) { receiver.output(element); if (isSetupJobAttempted) { // setup of job was already attempted return; } Configuration conf = c.sideInput(configView); // validate configuration and input // must be done first, because in all later operations are required assumptions from // validation validateConfiguration(conf); validateInputData(conf); boolean isJobLockAcquired = externalSynchronization.tryAcquireJobLock(conf); isSetupJobAttempted = true; if (!isJobLockAcquired) { // some parallel execution acquired task return; } try { // setup job JobID jobId = HadoopFormats.getJobId(conf); trySetupJob(jobId, conf, window); } catch (Exception e) { throw new IllegalStateException(e); } }
Example 11
Source File: HadoopFormatIO.java From beam with Apache License 2.0 | 5 votes |
@DoFn.ProcessElement public void processElement( @DoFn.Element KV<KeyT, ValueT> element, OutputReceiver<KV<Integer, KV<KeyT, ValueT>>> output, ProcessContext c) { if (taskId == null) { Configuration conf = c.sideInput(configView); taskId = externalSynchronization.acquireTaskIdLock(conf); } output.output(KV.of(taskId.getId(), element)); }
Example 12
Source File: PubSubInputRuntime.java From components with Apache License 2.0 | 5 votes |
@DoFn.ProcessElement public void processElement(ProcessContext c) throws IOException { if (schema == null) { schema = new Schema.Parser().parse(schemaStr); datumReader = new GenericDatumReader<GenericRecord>(schema); } decoder = DecoderFactory.get().binaryDecoder(c.element().getPayload(), decoder); GenericRecord record = datumReader.read(null, decoder); c.output(record); }
Example 13
Source File: KafkaInputPTransformRuntime.java From components with Apache License 2.0 | 4 votes |
@DoFn.ProcessElement public void processElement(ProcessContext c) { c.output(c.element().getKV()); }
Example 14
Source File: ExtractCsvSplit.java From components with Apache License 2.0 | 4 votes |
@DoFn.ProcessElement public void processElement(ProcessContext c) { String record = new String(c.element().getPayload(), Charset.forName("UTF-8")); c.output(record.split(fieldDelimiter)); }
Example 15
Source File: DirectCollector.java From components with Apache License 2.0 | 4 votes |
@DoFn.ProcessElement public void processElement(ProcessContext c) throws Exception { // Guaranteed to be present and initialized. DirectCollector.getRecords(uid).add(c.element()); }
Example 16
Source File: HadoopFormatIOSequenceFileTest.java From beam with Apache License 2.0 | 4 votes |
@DoFn.ProcessElement public void processElement(@DoFn.Element String element, OutputReceiver<String> receiver) { receiver.output(element.toLowerCase()); }
Example 17
Source File: HadoopFormatIOSequenceFileTest.java From beam with Apache License 2.0 | 4 votes |
@DoFn.ProcessElement public void processElement(@DoFn.Element InputT element, OutputReceiver<OutputT> outReceiver) { outReceiver.output(transformFn.apply(element)); }
Example 18
Source File: ElasticsearchOutputRuntime.java From components with Apache License 2.0 | 4 votes |
@DoFn.ProcessElement public void processElement(ProcessContext c) { IndexedRecord in = c.element(); c.output(in.toString()); }
Example 19
Source File: DoFnInvokersTest.java From beam with Apache License 2.0 | 4 votes |
@DoFn.ProcessElement void processElement(DoFn<String, String>.ProcessContext c);
Example 20
Source File: TestParseUserAgentSQL.java From yauaa with Apache License 2.0 | 4 votes |
@DoFn.ProcessElement public void processElement(ProcessContext c) { final Row row = c.element(); LOG.info("ROW: {} --> {}", row, row.getSchema()); }