Java Code Examples for org.apache.kafka.clients.consumer.KafkaConsumer#wakeup()

The following examples show how to use org.apache.kafka.clients.consumer.KafkaConsumer#wakeup() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaDispatcherImpl.java    From arcusplatform with Apache License 2.0 5 votes vote down vote up
@Override
public void updatePartitions(Set<PlatformPartition> partitions) {
	partitionRef.set(partitions);
	KafkaConsumer<?, ?> consumer = consumerRef.get();
	if(consumer != null) {
		consumer.wakeup();
	}
}
 
Example 2
Source File: KafkaDispatcherImpl.java    From arcusplatform with Apache License 2.0 5 votes vote down vote up
@Override
public void shutdown() {
	KafkaConsumer<?, ?> oldConsumer = consumerRef.getAndSet(null);
	if(oldConsumer  == null) {
		logger.warn("Ignoring stop request, consumer already shutdown");
	}
	else {
		oldConsumer.wakeup();
	}
}
 
Example 3
Source File: NativeKafkaWithAvroDecoderTest.java    From hermes with Apache License 2.0 4 votes vote down vote up
@Test
public void testNative() throws IOException, InterruptedException, ExecutionException {
	final String topic = "kafka.SimpleAvroTopic";
	int msgNum = 200;
	final CountDownLatch countDown = new CountDownLatch(msgNum);

	Properties producerProps = new Properties();
	producerProps.put("bootstrap.servers", "");

	// Avro Decoder/Encoder
	CachedSchemaRegistryClient schemaRegistry = new CachedSchemaRegistryClient("",
	      AbstractKafkaAvroSerDeConfig.MAX_SCHEMAS_PER_SUBJECT_DEFAULT);
	Map<String, String> configs = new HashMap<String, String>();
	configs.put("schema.registry.url", "");

	KafkaAvroSerializer avroKeySerializer = new KafkaAvroSerializer();
	avroKeySerializer.configure(configs, true);
	KafkaAvroSerializer avroValueSerializer = new KafkaAvroSerializer();
	avroValueSerializer.configure(configs, false);

	Map<String, String> deserializerConfigs = new HashMap<String, String>();
	deserializerConfigs.put("specific.avro.reader", Boolean.TRUE.toString());
	deserializerConfigs.put("schema.registry.url", "");
	KafkaAvroDeserializer avroKeyDeserializer = new KafkaAvroDeserializer(schemaRegistry, deserializerConfigs);
	avroKeyDeserializer.configure(configs, true);
	KafkaAvroDeserializer avroValueDeserializer = new KafkaAvroDeserializer(schemaRegistry, deserializerConfigs);
	avroValueDeserializer.configure(configs, false);

	// Consumer
	final Properties consumerProps = new Properties();
	consumerProps.put("bootstrap.servers", "");
	consumerProps.put("group.id", "GROUP_" + topic);

	final List<Object> actualResult = new ArrayList<Object>();
	final List<Object> expectedResult = new ArrayList<Object>();

	final KafkaConsumer<Object, Object> consumer = new KafkaConsumer<Object, Object>(consumerProps,
	      avroKeyDeserializer, avroValueDeserializer);
	consumer.subscribe(Arrays.asList(topic));

	class KafkaConsumerThread implements Runnable {

		private final AtomicBoolean closed = new AtomicBoolean(false);

		public void run() {
			try {
				while (!closed.get()) {
					ConsumerRecords<Object, Object> records = consumer.poll(100);
					for (ConsumerRecord<Object, Object> consumerRecord : records) {
						System.out.println("received: " + consumerRecord.value());
						actualResult.add(consumerRecord.value());
						countDown.countDown();
					}
				}
			} catch (WakeupException e) {
				if (!closed.get())
					throw e;
			} finally {
				consumer.commitSync();
				consumer.close();
			}
		}

		public void shutdown() {
			closed.set(true);
			consumer.wakeup();
		}
	}

	KafkaConsumerThread thread = new KafkaConsumerThread();
	new Thread(thread).start();

	KafkaProducer<Object, Object> producer = new KafkaProducer<Object, Object>(producerProps, avroKeySerializer,
	      avroValueSerializer);
	int i = 0;
	while (i++ < msgNum) {
		ProducerRecord<Object, Object> data = new ProducerRecord<Object, Object>(topic, null,
		      (Object) KafkaAvroTest.generateEvent());
		Future<RecordMetadata> send = producer.send(data);
		send.get();
		if (send.isDone()) {
			System.out.println("sending: " + data.value());
			expectedResult.add(data.value());
		}
	}

	countDown.await();

	thread.shutdown();
	producer.close();

	Assert.assertEquals(expectedResult.size(), actualResult.size());
}