Java Code Examples for org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer#setStartFromEarliest()

The following examples show how to use org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer#setStartFromEarliest() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaItemTransactionJob.java    From flink-tutorials with Apache License 2.0 6 votes vote down vote up
public DataStream<ItemTransaction> readTransactionStream(ParameterTool params, StreamExecutionEnvironment env) {
	// We read the ItemTransaction objects directly using the schema
	FlinkKafkaConsumer<ItemTransaction> transactionSource = new FlinkKafkaConsumer<>(
			params.getRequired(TRANSACTION_INPUT_TOPIC_KEY), new TransactionSchema(),
			Utils.readKafkaProperties(params, true));

	transactionSource.setCommitOffsetsOnCheckpoints(true);
	transactionSource.setStartFromEarliest();

	// In case event time processing is enabled we assign trailing watermarks for each partition
	transactionSource.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<ItemTransaction>(Time.minutes(1)) {
		@Override
		public long extractTimestamp(ItemTransaction transaction) {
			return transaction.ts;
		}
	});

	return env.addSource(transactionSource)
			.name("Kafka Transaction Source")
			.uid("Kafka Transaction Source");
}
 
Example 2
Source File: KafkaSourceProvider.java    From flink-statefun with Apache License 2.0 6 votes vote down vote up
private static <T> void configureStartupPosition(
    FlinkKafkaConsumer<T> consumer, KafkaIngressStartupPosition startupPosition) {
  if (startupPosition.isGroupOffsets()) {
    consumer.setStartFromGroupOffsets();
  } else if (startupPosition.isEarliest()) {
    consumer.setStartFromEarliest();
  } else if (startupPosition.isLatest()) {
    consumer.setStartFromLatest();
  } else if (startupPosition.isSpecificOffsets()) {
    KafkaIngressStartupPosition.SpecificOffsetsPosition offsetsPosition =
        startupPosition.asSpecificOffsets();
    consumer.setStartFromSpecificOffsets(
        convertKafkaTopicPartitionMap(offsetsPosition.specificOffsets()));
  } else if (startupPosition.isDate()) {
    KafkaIngressStartupPosition.DatePosition datePosition = startupPosition.asDate();
    consumer.setStartFromTimestamp(datePosition.epochMilli());
  } else {
    throw new IllegalStateException("Safe guard; should not occur");
  }
}
 
Example 3
Source File: SpanCountJob.java    From Mastering-Distributed-Tracing with MIT License 6 votes vote down vote up
public static void main(String[] args) throws Exception {
    final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

    Properties properties = new Properties();
    properties.setProperty("bootstrap.servers", "localhost:9092");
    properties.setProperty("group.id", "tracefeatures");

    FlinkKafkaConsumer<Span> consumer = new FlinkKafkaConsumer<>(//
            "jaeger-spans", //
            new ProtoUnmarshaler(), properties);

    // replay Kafka stream from beginning, useful for testing
    consumer.setStartFromEarliest();

    DataStream<Span> spans = env.addSource(consumer).name("spans");
    DataStream<Trace> traces = aggregateSpansToTraces(spans);
    DataStream<TraceSummary> spanCounts = countSpansByService(traces);

    spanCounts.print();
    spanCounts.addSink(ESSink.build());

    // execute program
    env.execute("Span Count Job");
}
 
Example 4
Source File: KafkaSourceBuilder.java    From Alink with Apache License 2.0 5 votes vote down vote up
@Override
public RichParallelSourceFunction<Row> build() {
    FlinkKafkaConsumer<Row> consumer;
    if (!StringUtils.isNullOrWhitespaceOnly(topicPattern)) {
        Pattern pattern = Pattern.compile(topicPattern);
        consumer = new FlinkKafkaConsumer<Row>(pattern, new MessageDeserialization(), properties);
    } else {
        consumer = new FlinkKafkaConsumer<Row>(topic, new MessageDeserialization(), properties);
    }

    switch (super.startupMode) {
        case LATEST: {
            consumer.setStartFromLatest();
            break;
        }
        case EARLIEST: {
            consumer.setStartFromEarliest();
            break;
        }
        case GROUP_OFFSETS: {
            consumer.setStartFromGroupOffsets();
            break;
        }
        case TIMESTAMP: {
            consumer.setStartFromTimestamp(startTimeMs);
            break;
        }
        default: {
            throw new IllegalArgumentException("invalid startupMode.");
        }
    }

    return consumer;
}