kafka.message.Message Java Examples

The following examples show how to use kafka.message.Message. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaBoltTest.java    From storm-kafka-0.8-plus with Apache License 2.0 6 votes vote down vote up
private boolean verifyMessage(String key, String message) {
    long lastMessageOffset = KafkaUtils.getOffset(simpleConsumer, kafkaConfig.topic, 0, OffsetRequest.LatestTime()) - 1;
    ByteBufferMessageSet messageAndOffsets = KafkaUtils.fetchMessages(kafkaConfig, simpleConsumer,
            new Partition(Broker.fromString(broker.getBrokerConnectionString()), 0), lastMessageOffset);
    MessageAndOffset messageAndOffset = messageAndOffsets.iterator().next();
    Message kafkaMessage = messageAndOffset.message();
    ByteBuffer messageKeyBuffer = kafkaMessage.key();
    String keyString = null;
    String messageString = new String(Utils.toByteArray(kafkaMessage.payload()));
    if (messageKeyBuffer != null) {
        keyString = new String(Utils.toByteArray(messageKeyBuffer));
    }
    assertEquals(key, keyString);
    assertEquals(message, messageString);
    return true;
}
 
Example #2
Source File: RawMessageHandlerTest.java    From elasticsearch-river-kafka with Apache License 2.0 6 votes vote down vote up
public void testIt() throws Exception
{
	byte[] data = "somedata".getBytes();
	List<String> l = new ArrayList<>();
	
	MessageHandler m = new RawMessageHandler();
	Message message = createMock(Message.class);
	expect(message.payload()).andReturn(ByteBuffer.wrap(data));
	
	BulkRequestBuilder bulkRequestBuilder = createMock(BulkRequestBuilder.class);
	expect(bulkRequestBuilder.add(aryEq(data), eq(0), eq(data.length), eq(false))).andReturn(null);
	replay(message, bulkRequestBuilder);
	
	m.handle(bulkRequestBuilder, message);
	verify(bulkRequestBuilder, message);
}
 
Example #3
Source File: JsonMessageHandlerTest.java    From elasticsearch-river-kafka with Apache License 2.0 6 votes vote down vote up
public void testReadMessage() throws Exception
{
	JsonMessageHandler h = new JsonMessageHandler(null);	
	byte[] json = toJson(rec).getBytes();
	Message message = createMock(Message.class);
	expect(message.payload()).andReturn(ByteBuffer.wrap(json));
	replay(message);
	
	try {
		h.readMessage(message);
	} catch (Exception e) {
		fail("This should not fail");
	}
	
	verify(message);
}
 
Example #4
Source File: KafkaSinglePortStringInputOperator.java    From attic-apex-malhar with Apache License 2.0 6 votes vote down vote up
/**
 * Implement abstract method of AbstractActiveMQSinglePortInputOperator
 */
@Override
public String getTuple(Message message)
{
  String data = "";
  try {
    ByteBuffer buffer = message.payload();
    byte[] bytes = new byte[buffer.remaining()];
    buffer.get(bytes);
    data = new String(bytes);
    //logger.debug("Consuming {}", data);
  } catch (Exception ex) {
    return data;
  }
  return data;
}
 
Example #5
Source File: JsonMessageHandlerTest.java    From elasticsearch-river-kafka with Apache License 2.0 6 votes vote down vote up
public void testGettersFromReadMessageReturnedMap() throws Exception
{
	JsonMessageHandler h = new JsonMessageHandler(null);	
	byte[] json = toJson(rec).getBytes();
	Message message = createMock(Message.class);
	
	expect(message.payload()).andReturn(ByteBuffer.wrap(json));
	replay(message);
	
	try {
		h.readMessage(message);
	} catch (Exception e) {
		fail("This should not fail");
	}
	
	assertEquals(h.getIndex(), rec.get("index"));
	assertEquals(h.getType(), rec.get("type"));
	assertEquals(h.getSource(), rec.get("source"));
	assertEquals(h.getId(), rec.get("id"));
	verify(message);
}
 
Example #6
Source File: KafkaSourceTest.java    From flume-ng-kafka-source with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Before
public void setup() throws Exception {
	mockIt = mock(ConsumerIterator.class);
	mockMessageAndMetadata = mock(MessageAndMetadata.class);
	mockChannelProcessor = mock(ChannelProcessor.class);
	mockBuffer = mock(ByteBuffer.class);
	mockMessage = mock(Message.class);
	mockKafkaSource = new KafkaSource();
	
	when(mockMessage.payload()).thenReturn(mockBuffer);
	when(mockMessageAndMetadata.message()).thenReturn(mockMessage);
	
	Field field = AbstractSource.class.getDeclaredField("channelProcessor");
	field.setAccessible(true);
	field.set(mockKafkaSource, mockChannelProcessor);

	field = KafkaSource.class.getDeclaredField("it");
	field.setAccessible(true);
	field.set(mockKafkaSource, mockIt);
}
 
Example #7
Source File: PartitionConsumer.java    From jstorm with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public Iterable<List<Object>> generateTuples(Message msg) {
    Iterable<List<Object>> tups = null;
    ByteBuffer payload = msg.payload();
    if (payload == null) {
        return null;
    }
    tups = Arrays.asList(Utils.tuple(Utils.toByteArray(payload)));
    return tups;
}
 
Example #8
Source File: KafkaDeserializerExtractorTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private ByteArrayBasedKafkaRecord getMockMessageAndOffset(ByteBuffer payload) {
  MessageAndOffset mockMessageAndOffset = mock(MessageAndOffset.class);
  Message mockMessage = mock(Message.class);
  when(mockMessage.payload()).thenReturn(payload);
  when(mockMessageAndOffset.message()).thenReturn(mockMessage);
  return new Kafka08ConsumerRecord(mockMessageAndOffset, "test", 0);
}
 
Example #9
Source File: KafkaPartitionLevelConsumerTest.java    From incubator-pinot with Apache License 2.0 5 votes vote down vote up
@Override
public ByteBufferMessageSet messageSet(String topic, int partition) {
  if (errorMap.containsKey(new TopicAndPartition(topic, partition))) {
    throw new IllegalArgumentException();
  } else {
    // TODO Maybe generate dummy messages here?
    return new ByteBufferMessageSet(Collections.<Message>emptyList());
  }
}
 
Example #10
Source File: KafkaReader.java    From HiveKa with Apache License 2.0 5 votes vote down vote up
/**
 * Fetches the next Kafka message and stuffs the results into the key and
 * value
 *
 * @param key
 * @param payload
 * @param pKey
 * @return true if there exists more events
 * @throws IOException
 */
public boolean getNext(KafkaKey key, BytesWritable payload ,BytesWritable pKey) throws IOException {
  if (hasNext()) {

    MessageAndOffset msgAndOffset = messageIter.next();
    Message message = msgAndOffset.message();

    ByteBuffer buf = message.payload();
    int origSize = buf.remaining();
    byte[] bytes = new byte[origSize];
    buf.get(bytes, buf.position(), origSize);
    payload.set(bytes, 0, origSize);

    buf = message.key();
    if(buf != null){
      origSize = buf.remaining();
      bytes = new byte[origSize];
      buf.get(bytes, buf.position(), origSize);
      pKey.set(bytes, 0, origSize);
    }

    key.clear();
    key.set(kafkaRequest.getTopic(), kafkaRequest.getLeaderId(),
        kafkaRequest.getPartition(), currentOffset,
        msgAndOffset.offset() + 1, message.checksum());

    key.setMessageSize(msgAndOffset.message().size());

    currentOffset = msgAndOffset.offset() + 1; // increase offset
    currentCount++; // increase count

    return true;
  } else {
    return false;
  }
}
 
Example #11
Source File: KafkaTestConsumer.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Override
public void run()
{
  Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
  topicCountMap.put(topic, new Integer(1));
  Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
  KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
  ConsumerIterator<byte[], byte[]> it = stream.iterator();
  logger.debug("Inside consumer::run receiveCount= {}", receiveCount);
  while (it.hasNext() & isAlive) {
    Message msg = new Message(it.next().message());
    if (latch != null) {
      latch.countDown();
    }
    if (getMessage(msg).equals(KafkaOperatorTestBase.END_TUPLE)) {
      break;
    }
    holdingBuffer.add(msg);
    receiveCount++;
    logger.debug("Consuming {}, receiveCount= {}", getMessage(msg), receiveCount);
    try {
      Thread.sleep(50);
    } catch (InterruptedException e) {
      break;
    }
  }
  logger.debug("DONE consuming");
}
 
Example #12
Source File: KafkaTestConsumer.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
public String getMessage(Message message)
{
  ByteBuffer buffer = message.payload();
  byte[] bytes = new byte[buffer.remaining()];
  buffer.get(bytes);
  return new String(bytes);
}
 
Example #13
Source File: KafkaSinglePortByteArrayInputOperator.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
/**
   * Implement abstract method of AbstractKafkaSinglePortInputOperator
   *
   * @param message
   * @return byte Array
   */
@Override
public byte[] getTuple(Message message)
{
  byte[] bytes = null;
  try {
    ByteBuffer buffer = message.payload();
    bytes = new byte[buffer.remaining()];
    buffer.get(bytes);
  } catch (Exception ex) {
    return bytes;
  }
  return bytes;
}
 
Example #14
Source File: KafkaLeaderReader.java    From arcusplatform with Apache License 2.0 5 votes vote down vote up
SearchOffset(TopicAndPartition tap, Function<Message, V> factory, V target, long startOffset, long endOffset) {
   this.tap = tap;
   this.factory = factory;
   this.target = target;
   this.startOffset = startOffset;
   this.endOffset = endOffset;
}
 
Example #15
Source File: AbstractExactlyOnceKafkaOutputOperator.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
private void initializeLastProcessingOffset()
{
  // read last received kafka message
  TopicMetadata tm = KafkaMetadataUtil.getTopicMetadata(Sets.newHashSet((String)getConfigProperties().get(KafkaMetadataUtil.PRODUCER_PROP_BROKERLIST)), this.getTopic());

  if (tm == null) {
    throw new RuntimeException("Failed to retrieve topic metadata");
  }

  partitionNum = tm.partitionsMetadata().size();

  lastMsgs = new HashMap<Integer, Pair<byte[],byte[]>>(partitionNum);

  for (PartitionMetadata pm : tm.partitionsMetadata()) {

    String leadBroker = pm.leader().host();
    int port = pm.leader().port();
    String clientName = this.getClass().getName().replace('$', '.') + "_Client_" + tm.topic() + "_" + pm.partitionId();
    SimpleConsumer consumer = new SimpleConsumer(leadBroker, port, 100000, 64 * 1024, clientName);

    long readOffset = KafkaMetadataUtil.getLastOffset(consumer, tm.topic(), pm.partitionId(), kafka.api.OffsetRequest.LatestTime(), clientName);

    FetchRequest req = new FetchRequestBuilder().clientId(clientName).addFetch(tm.topic(), pm.partitionId(), readOffset - 1, 100000).build();

    FetchResponse fetchResponse = consumer.fetch(req);
    for (MessageAndOffset messageAndOffset : fetchResponse.messageSet(tm.topic(), pm.partitionId())) {

      Message m = messageAndOffset.message();

      ByteBuffer payload = m.payload();
      ByteBuffer key = m.key();
      byte[] valueBytes = new byte[payload.limit()];
      byte[] keyBytes = new byte[key.limit()];
      payload.get(valueBytes);
      key.get(keyBytes);
      lastMsgs.put(pm.partitionId(), new Pair<byte[], byte[]>(keyBytes, valueBytes));
    }
  }
}
 
Example #16
Source File: KafkaUtils.java    From storm-kafka-0.8-plus with Apache License 2.0 5 votes vote down vote up
public static Iterable<List<Object>> generateTuples(KafkaConfig kafkaConfig, Message msg) {
    Iterable<List<Object>> tups;
    ByteBuffer payload = msg.payload();
    ByteBuffer key = msg.key();
    if (key != null && kafkaConfig.scheme instanceof KeyValueSchemeAsMultiScheme) {
        tups = ((KeyValueSchemeAsMultiScheme) kafkaConfig.scheme).deserializeKeyAndValue(Utils.toByteArray(key), Utils.toByteArray(payload));
    } else {
        tups = kafkaConfig.scheme.deserialize(Utils.toByteArray(payload));
    }
    return tups;
}
 
Example #17
Source File: JsonMessageHandlerTest.java    From elasticsearch-river-kafka with Apache License 2.0 5 votes vote down vote up
public void testIt() throws Exception
{
	Client client = createMock(Client.class);
	IndexRequestBuilder irb = createMock(IndexRequestBuilder.class);
	JsonMessageHandler h = new JsonMessageHandler(client);
	byte[] json = toJson(rec).getBytes();
			
	expect(client.prepareIndex(anyObject(String.class), anyObject(String.class), anyObject(String.class))).andReturn(irb);
	replay(client);
	
	Message message = createMock(Message.class);
	expect(message.payload()).andReturn(ByteBuffer.wrap(json));
	replay(message);
	
	BulkRequestBuilder bulkRequestBuilder = createMock(BulkRequestBuilder.class);
	
	expect(bulkRequestBuilder.add(anyObject(IndexRequestBuilder.class))).andReturn(null);
	replay(bulkRequestBuilder);
	
	try {
		h.handle(bulkRequestBuilder, message);
	} catch (Exception e) {
		fail("This should not fail");
	}
	
	verify(client);
}
 
Example #18
Source File: KafkaStream.java    From arcusplatform with Apache License 2.0 5 votes vote down vote up
private <C extends Comparable<? super C>> Function<Message, C> wrapMapGetter(Function<Map.Entry<K, V>, C> delegate) {
   return (message) -> {
      K key = keyDeserializer.apply(message.key());
      V value = valueDeserializer.apply(message.buffer());
      return delegate.apply(new ImmutableMapEntry<>(key, value));
   };
}
 
Example #19
Source File: PulsarMessageAndMetadata.java    From pulsar with Apache License 2.0 5 votes vote down vote up
public PulsarMessageAndMetadata(String topic, int partition, Message rawMessage, long offset, Decoder<K> keyDecoder,
        Decoder<V> valueDecoder, K key, V value) {
    super(topic, partition, rawMessage, offset, keyDecoder, valueDecoder);
    this.topic = topic;
    this.partition = partition;
    this.offset = offset;
    this.keyDecoder = keyDecoder;
    this.valueDecoder = valueDecoder;
    this.key = key;
    this.value = value;
}
 
Example #20
Source File: KafkaPartitionReader.java    From Scribengin with GNU Affero General Public License v3.0 5 votes vote down vote up
byte[] getCurrentMessagePayload() {
  while(currentMessageSetIterator.hasNext()) {
    MessageAndOffset messageAndOffset = currentMessageSetIterator.next();
    if (messageAndOffset.offset() < currentOffset) continue; //old offset, ignore
    Message message = messageAndOffset.message();
    ByteBuffer payload = message.payload();
    byte[] bytes = new byte[payload.limit()];
    payload.get(bytes);
    currentOffset = messageAndOffset.nextOffset();
    return bytes;
  }
  return null;
}
 
Example #21
Source File: TridentKafkaEmitter.java    From storm-kafka-0.8-plus with Apache License 2.0 5 votes vote down vote up
private void emit(TridentCollector collector, Message msg) {
    Iterable<List<Object>> values = KafkaUtils.generateTuples(_config, msg);
    if (values != null) {
        for (List<Object> value : values) {
            collector.emit(value);
        }
    }
}
 
Example #22
Source File: KafkaConsumerTest.java    From pentaho-kafka-consumer with Apache License 2.0 4 votes vote down vote up
private static MessageAndMetadata<byte[], byte[]> generateKafkaMessage() {
    byte[] message = "aMessage".getBytes();

    return new MessageAndMetadata<byte[], byte[]>("topic", 0, new Message(message),
            0, new DefaultDecoder(null), new DefaultDecoder(null));
}
 
Example #23
Source File: RawMessageHandler.java    From elasticsearch-river-kafka with Apache License 2.0 4 votes vote down vote up
public void handle(BulkRequestBuilder bulkRequestBuilder, Message message) throws Exception
{
	byte[] data = getMessageData(message);
	bulkRequestBuilder.add(data, 0, data.length, false);
}
 
Example #24
Source File: KafkaSpout.java    From storm-kafka-0.8-plus with Apache License 2.0 4 votes vote down vote up
public MessageAndRealOffset(Message msg, long offset) {
    this.msg = msg;
    this.offset = offset;
}
 
Example #25
Source File: JsonMessageHandler.java    From elasticsearch-river-kafka with Apache License 2.0 4 votes vote down vote up
@Override
public void handle(BulkRequestBuilder bulkRequestBuilder, Message message) throws Exception {
	this.readMessage(message);
	bulkRequestBuilder.add( this.createIndexRequestBuilder() );
}
 
Example #26
Source File: JsonMessageHandler.java    From elasticsearch-river-kafka with Apache License 2.0 4 votes vote down vote up
protected void readMessage(Message message) throws Exception {
	messageMap = reader.readValue(getMessageData(message));
}
 
Example #27
Source File: MessageHandler.java    From elasticsearch-river-kafka with Apache License 2.0 4 votes vote down vote up
public static byte[] getMessageData(Message message) {
	ByteBuffer buf = message.payload();
	byte[] data = new byte[buf.remaining()];
	buf.get(data);
	return data;
}
 
Example #28
Source File: KafkaConsumer.java    From attic-apex-malhar with Apache License 2.0 4 votes vote down vote up
public Message getMsg()
{
  return msg;
}
 
Example #29
Source File: KafkaLeaderReader.java    From arcusplatform with Apache License 2.0 4 votes vote down vote up
public SearchOffsets(Function<Message, V> factory, V target) {
   this.comparisonBuilder = factory;
   this.target = target;
   this.offsets = new LinkedHashMap<>();
}
 
Example #30
Source File: KafkaLeaderReader.java    From arcusplatform with Apache License 2.0 4 votes vote down vote up
public ScanSearch(Function<Message, V> getter, V target) {
   this.getter = getter;
   this.target = target;
}