Java Code Examples for org.apache.kafka.clients.consumer.KafkaConsumer#assignment()

The following examples show how to use org.apache.kafka.clients.consumer.KafkaConsumer#assignment() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SeekDemoAssignment.java    From kafka_book_demo with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
    Properties props = initConfig();
    KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
    consumer.subscribe(Arrays.asList(topic));
    long start = System.currentTimeMillis();
    Set<TopicPartition> assignment = new HashSet<>();
    while (assignment.size() == 0) {
        consumer.poll(Duration.ofMillis(100));
        assignment = consumer.assignment();
    }
    long end = System.currentTimeMillis();
    System.out.println(end - start);
    System.out.println(assignment);
    for (TopicPartition tp : assignment) {
        consumer.seek(tp, 10);
    }
    while (true) {
        ConsumerRecords<String, String> records =
                consumer.poll(Duration.ofMillis(1000));
        //consume the record.
        for (ConsumerRecord<String, String> record : records) {
            System.out.println(record.offset() + ":" + record.value());
        }
    }
}
 
Example 2
Source File: SeekDemo.java    From kafka_book_demo with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
        Properties props = initConfig();
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Arrays.asList(topic));
        consumer.poll(Duration.ofMillis(2000));
        Set<TopicPartition> assignment = consumer.assignment();
        System.out.println(assignment);
        for (TopicPartition tp : assignment) {
            consumer.seek(tp, 10);
        }
//        consumer.seek(new TopicPartition(topic,0),10);
        while (true) {
            ConsumerRecords<String, String> records =
                    consumer.poll(Duration.ofMillis(1000));
            //consume the record.
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record.offset() + ":" + record.value());
            }
        }
    }
 
Example 3
Source File: SeekDemoAssignment.java    From BigData-In-Practice with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
    KafkaConsumer<String, String> consumer = new ConsumerFactory<String, String>().create();
    long start = System.currentTimeMillis();
    Set<TopicPartition> assignment = new HashSet<>();
    while (assignment.size() == 0) {
        consumer.poll(Duration.ofMillis(100));
        assignment = consumer.assignment();
    }
    long end = System.currentTimeMillis();
    System.out.println(end - start);
    System.out.println(assignment);
    for (TopicPartition tp : assignment) {
        consumer.seek(tp, 10);
    }
    while (true) {
        ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
        //consume the record.
        for (ConsumerRecord<String, String> record : records) {
            System.out.println(record.offset() + ":" + record.value());
        }
    }
}
 
Example 4
Source File: SeekDemo.java    From BigData-In-Practice with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
    KafkaConsumer<String, String> consumer = new ConsumerFactory<String, String>().create();
    consumer.poll(Duration.ofMillis(2000));
    Set<TopicPartition> assignment = consumer.assignment();
    System.out.println(assignment);
    for (TopicPartition tp : assignment) {
        consumer.seek(tp, 10);
    }
    // consumer.seek(new TopicPartition(ConsumerFactory.topic,0),10);
    while (true) {
        ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
        //consume the record.
        for (ConsumerRecord<String, String> record : records) {
            System.out.println(record.offset() + ":" + record.value());
        }
    }
}
 
Example 5
Source File: SeekToEnd.java    From BigData-In-Practice with Apache License 2.0 6 votes vote down vote up
public static void main(String[] args) {
        KafkaConsumer<String, String> consumer = new ConsumerFactory<String, String>().create();
        Set<TopicPartition> assignment = new HashSet<>();
        while (assignment.size() == 0) {
            consumer.poll(Duration.ofMillis(100));
            assignment = consumer.assignment();
        }
        Map<TopicPartition, Long> offsets = consumer.endOffsets(assignment);
        for (TopicPartition tp : assignment) {
//            consumer.seek(tp, offsets.get(tp));
            consumer.seek(tp, offsets.get(tp) + 1);
        }
        System.out.println(assignment);
        System.out.println(offsets);

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
            //consume the record.
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record.offset() + ":" + record.value());
            }
        }

    }
 
Example 6
Source File: KafkaConsumerFromOffset.java    From post-kafka-rewind-consumer-offset with MIT License 6 votes vote down vote up
public static void main(String[] args) {
    KafkaConsumer<String, String> consumer = createConsumer();
    consumer.subscribe(Arrays.asList(TOPIC));

    boolean flag = true;


    while (true) {
        ConsumerRecords<String, String> records = consumer.poll(100);
        if (flag) {
            Set<TopicPartition> assignments = consumer.assignment();
            assignments.forEach(topicPartition ->
                    consumer.seek(
                            topicPartition,
                            90));
            flag = false;
        }


        for (ConsumerRecord<String, String> record : records)
            System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
    }


}
 
Example 7
Source File: KafkaConsumerFromBeginning.java    From post-kafka-rewind-consumer-offset with MIT License 6 votes vote down vote up
public static void main(String[] args) {

        KafkaConsumer<String, String> consumer = KafkaConsumerUtil.createConsumer();
        consumer.subscribe(Arrays.asList(TOPIC));
        boolean flag = true;

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(100);

            if (flag) {
                Set<TopicPartition> assignments = consumer.assignment();
                assignments.forEach(topicPartition ->
                        consumer.seekToBeginning(
                                Arrays.asList(topicPartition)));
                flag = false;
            }

            for (ConsumerRecord<String, String> record : records)
                System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
        }
    }
 
Example 8
Source File: SeekToEnd.java    From kafka_book_demo with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
        Properties props = initConfig();
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Arrays.asList(topic));
        Set<TopicPartition> assignment = new HashSet<>();
        while (assignment.size() == 0) {
            consumer.poll(Duration.ofMillis(100));
            assignment = consumer.assignment();
        }
        Map<TopicPartition, Long> offsets = consumer.endOffsets(assignment);
        for (TopicPartition tp : assignment) {
//            consumer.seek(tp, offsets.get(tp));
            consumer.seek(tp, offsets.get(tp) + 1);
        }
        System.out.println(assignment);
        System.out.println(offsets);

        while (true) {
            ConsumerRecords<String, String> records =
                    consumer.poll(Duration.ofMillis(1000));
            //consume the record.
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record.offset() + ":" + record.value());
            }
        }

    }
 
Example 9
Source File: KafkaConsumerFromTime.java    From post-kafka-rewind-consumer-offset with MIT License 5 votes vote down vote up
public static void main(String[] args) {
    KafkaConsumer<String, String> consumer = createConsumer();
    consumer.subscribe(Arrays.asList(TOPIC));

    boolean flag = true;

    while (true) {
        ConsumerRecords<String, String> records = consumer.poll(100);

        if (flag) {
            Set<TopicPartition> assignments = consumer.assignment();
            Map<TopicPartition, Long> query = new HashMap<>();
            for (TopicPartition topicPartition : assignments) {
                query.put(
                        topicPartition,
                        Instant.now().minus(10, MINUTES).toEpochMilli());
            }

            Map<TopicPartition, OffsetAndTimestamp> result = consumer.offsetsForTimes(query);

            result.entrySet()
                    .stream()
                    .forEach(entry ->
                            consumer.seek(
                                    entry.getKey(),
                                    Optional.ofNullable(entry.getValue())
                                            .map(OffsetAndTimestamp::offset)
                                            .orElse(new Long(0))));

            flag = false;
        }


        for (ConsumerRecord<String, String> record : records)
            System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
    }


}
 
Example 10
Source File: NewApiTopicConsumer.java    From jeesuite-libs with Apache License 2.0 5 votes vote down vote up
/**
 * 按上次记录重置offsets
 */
private void resetCorrectOffsets(ConsumerWorker worker) {	
	
	KafkaConsumer<String, Serializable> consumer = worker.consumer;
	Map<String, List<PartitionInfo>> topicInfos = consumer.listTopics();
	Set<String> topics = topicInfos.keySet();
	
	List<String> expectTopics = new ArrayList<>(topicHandlers.keySet());
	
	List<PartitionInfo> patitions = null;
	
	consumer.poll(200);
	
	for (String topic : topics) {
		if(!expectTopics.contains(topic))continue;
		
		patitions = topicInfos.get(topic);
		for (PartitionInfo partition : patitions) {
			try {						
				//期望的偏移
				long expectOffsets = consumerContext.getLatestProcessedOffsets(topic, partition.partition());
				//
				TopicPartition topicPartition = new TopicPartition(partition.topic(), partition.partition());
				OffsetAndMetadata metadata = consumer.committed(topicPartition);
				
				Set<TopicPartition> assignment = consumer.assignment();
				if(assignment.contains(topicPartition)){
					if(expectOffsets > 0 && expectOffsets < metadata.offset()){								
						consumer.seek(topicPartition, expectOffsets);
						//consumer.seekToBeginning(assignment);
				        logger.info(">>>>>>> seek Topic[{}] partition[{}] from {} to {}",topic, partition.partition(),metadata.offset(),expectOffsets);
					}
				}
			} catch (Exception e) {
				logger.warn("try seek topic["+topic+"] partition["+partition.partition()+"] offsets error");
			}
		}
	}
	consumer.resume(consumer.assignment());
}
 
Example 11
Source File: KafkaOperationsTest.java    From kafka-webview with MIT License 4 votes vote down vote up
/**
 * Helper method to consumer records from a topic.
 *  @param topics topics to consume from.
 * @param consumerId Consumer's consumerId
 * @param consumerPrefix Any consumer Id prefix.
 */
private KafkaConsumer<String, String> consumeFromTopics(final Collection<String> topics, final String consumerId, final String consumerPrefix) {
    // Create cluster config.
    final ClusterConfig clusterConfig = ClusterConfig.newBuilder()
        .withBrokerHosts(sharedKafkaTestResource.getKafkaConnectString())
        .build();

    // Create Deserializer Config
    final DeserializerConfig deserializerConfig = DeserializerConfig.newBuilder()
        .withKeyDeserializerClass(KafkaConsumerFactoryTest.TestDeserializer.class)
        .withKeyDeserializerOption("key.option", "key.value")
        .withKeyDeserializerOption("key.option2", "key.value2")

        // Attempt to override a real setting, it should get filtered
        .withKeyDeserializerOption(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "MadeUpValue")

        .withValueDeserializerClass(KafkaConsumerFactoryTest.TestDeserializer.class)
        .withValueDeserializerOption("value.option", "value.value")
        .withValueDeserializerOption("value.option2", "value.value2")

        // Attempt to override a real setting, it should get filtered
        .withValueDeserializerOption(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, "MadeUpValue")
        .build();

    // Create Topic Config
    final String topic = topics.iterator().next();
    final org.sourcelab.kafka.webview.ui.manager.kafka.config.TopicConfig topicConfig = new org.sourcelab.kafka.webview.ui.manager.kafka.config.TopicConfig(clusterConfig, deserializerConfig, topic);

    // Create FilterConfig
    final FilterConfig filterConfig = FilterConfig.withNoFilters();

    // Create ClientConfig, instructing to start from tail.
    final ClientConfig clientConfig = ClientConfig.newBuilder()
        .withConsumerId(consumerId)
        .withFilterConfig(filterConfig)
        .withAllPartitions()
        .withStartingPosition(StartingPosition.newHeadPosition())
        .withMaxResultsPerPartition(100)
        .withTopicConfig(topicConfig)
        .build();

    // Create consumer and consume the entries, storing state in Kafka.
    final KafkaConsumerFactory kafkaConsumerFactory = new KafkaConsumerFactory(new KafkaClientConfigUtil("not/used", consumerPrefix));
    final KafkaConsumer<String, String> consumer = kafkaConsumerFactory.createConsumerAndSubscribe(clientConfig);

    // subscribe to all topics.
    consumer.unsubscribe();
    consumer.subscribe(topics);

    // consume and commit offsets.
    // Wait for assignment to complete.
    for (int attempts = 0; attempts < 10; attempts++) {
        consumer.poll(Duration.ofMillis(1000L));
        final Set<TopicPartition> assignmentSet = consumer.assignment();
        if (!assignmentSet.isEmpty()) {
            break;
        }
    }

    // Commit offsets.
    consumer.commitSync();

    return consumer;
}