org.apache.kafka.common.header.internals.RecordHeaders Java Examples

The following examples show how to use org.apache.kafka.common.header.internals.RecordHeaders. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaAvroSerdesTest.java    From registry with Apache License 2.0 7 votes vote down vote up
@Test
public void testToggleStoringSchemaInHeader() {
    TestRecord record = new TestRecord();
    record.setField1("Hello");
    record.setField2("World");
    String keySchemaHeaderName = KafkaAvroSerde.DEFAULT_KEY_SCHEMA_VERSION_ID;

    for (Boolean storeScheamIdInHeader : Arrays.asList(true, false)) {
        Map<String, Object> configs = new HashMap<>();
        configs.put(KafkaAvroSerializer.STORE_SCHEMA_VERSION_ID_IN_HEADER, storeScheamIdInHeader.toString());
        configs.put(AbstractAvroSnapshotDeserializer.SPECIFIC_AVRO_READER, true);

        KafkaAvroSerde serde = new KafkaAvroSerde(schemaRegistryClient);
        final Serializer<Object> serializer = serde.serializer();
        serializer.configure(configs, true);

        Headers headers = new RecordHeaders();
        final byte[] bytes = serializer.serialize(topic, headers, record);
        Assert.assertEquals(storeScheamIdInHeader, headers.lastHeader(keySchemaHeaderName) != null);

        final Deserializer<Object> deserializer = serde.deserializer();
        deserializer.configure(configs, true);
        final TestRecord actual = (TestRecord) deserializer.deserialize(topic, headers, bytes);
        Assert.assertEquals(record, actual);
    }
}
 
Example #2
Source File: KafkaEventReceiverTest.java    From stream-registry with Apache License 2.0 6 votes vote down vote up
@Test
public void typical() throws Exception {
  when(config.getTopic()).thenReturn(topic);
  when(consumer.partitionsFor(topic)).thenReturn(List.of(partitionInfo));
  when(consumer.beginningOffsets(topicPartitions)).thenReturn(Map.of(topicPartition, 0L));
  when(consumer.endOffsets(topicPartitions)).thenReturn(Map.of(topicPartition, 0L));
  when(consumer.poll(Duration.ofMillis(100))).thenReturn(new ConsumerRecords<>(Map.of(topicPartition, List.of(record))));
  when(record.key()).thenReturn(avroKey);
  when(record.value()).thenReturn(avroValue);
  when(converter.toModel(avroKey, avroValue)).thenReturn(event);
  when(record.headers()).thenReturn(new RecordHeaders(List.of(new RecordHeader(CORRELATION_ID, "foo".getBytes(UTF_8)))));

  underTest.receive(listener);
  Thread.sleep(100L);
  underTest.close();

  var inOrder = Mockito.inOrder(consumer, listener, correlator);
  inOrder.verify(consumer).assign(topicPartitions);
  inOrder.verify(consumer).seekToBeginning(topicPartitions);
  inOrder.verify(listener).onEvent(LOAD_COMPLETE);
  inOrder.verify(listener).onEvent(event);
  inOrder.verify(correlator).received("foo");
}
 
Example #3
Source File: HeaderUtilsTest.java    From extension-kafka with Apache License 2.0 6 votes vote down vote up
@Test
public void testWritingNonNegativeValuesShouldBeWrittenAsNonNegativeValues() {
    RecordHeaders target = new RecordHeaders();
    short expectedShort = 1;
    int expectedInt = 200;
    long expectedLong = 300L;
    float expectedFloat = 300.f;
    double expectedDouble = 0.000;

    addHeader(target, "short", expectedShort);
    assertThat(shortValue(target)).isEqualTo(expectedShort);

    addHeader(target, "int", expectedInt);
    assertThat(intValue(target)).isEqualTo(expectedInt);

    addHeader(target, "long", expectedLong);
    assertThat(longValue(target)).isEqualTo(expectedLong);

    addHeader(target, "float", expectedFloat);
    assertThat(floatValue(target)).isEqualTo(expectedFloat);

    addHeader(target, "double", expectedDouble);
    assertThat(doubleValue(target)).isEqualTo(expectedDouble);
}
 
Example #4
Source File: IncomingKafkaRecordMetadata.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
public IncomingKafkaRecordMetadata(KafkaConsumerRecord<K, T> record) {
    this.record = record;
    this.recordKey = record.key();
    this.topic = record.topic();
    this.partition = record.partition();
    this.timestamp = Instant.ofEpochMilli(record.timestamp());
    this.timestampType = record.timestampType();
    this.offset = record.offset();
    if (record.headers() == null) {
        this.headers = new RecordHeaders();
    } else {
        this.headers = new RecordHeaders(record.headers().stream()
                .map(kh -> new RecordHeader(kh.key(), kh.value().getBytes())).collect(
                        Collectors.toList()));
    }
}
 
Example #5
Source File: OutgoingKafkaRecord.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new outgoing Kafka Message with a header added to the header list.
 *
 * @param key the header key
 * @param content the header key, must not be {@code null}
 * @param enc the encoding, must not be {@code null}
 * @return the updated Kafka Message.
 */
public OutgoingKafkaRecord<K, T> withHeader(String key, String content, Charset enc) {
    Headers headers = getHeaders();
    Headers copy = new RecordHeaders(headers);
    copy.add(new Header() {
        @Override
        public String key() {
            return key;
        }

        @Override
        public byte[] value() {
            return content.getBytes(enc);
        }
    });
    return new OutgoingKafkaRecord<>(getTopic(), getKey(), getPayload(), getTimestamp(), getPartition(),
            copy, getAck(), getNack(), getMetadata());
}
 
Example #6
Source File: OutgoingKafkaRecord.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new outgoing Kafka Message with a header added to the header list.
 *
 * @param key the header key
 * @param content the header key, must not be {@code null}
 * @return the updated Kafka Message.
 */
public OutgoingKafkaRecord<K, T> withHeader(String key, String content) {
    Headers headers = getHeaders();
    Headers copy = new RecordHeaders(headers);
    copy.add(new Header() {
        @Override
        public String key() {
            return key;
        }

        @Override
        public byte[] value() {
            return content.getBytes();
        }
    });
    return new OutgoingKafkaRecord<>(getTopic(), getKey(), getPayload(), getTimestamp(), getPartition(),
            copy, getAck(), getNack(), getMetadata());
}
 
Example #7
Source File: OutgoingKafkaRecord.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new outgoing Kafka Message with a header added to the header list.
 *
 * @param key the header key
 * @param content the header key, must not be {@code null}
 * @return the updated Kafka Message.
 */
public OutgoingKafkaRecord<K, T> withHeader(String key, byte[] content) {
    Headers headers = getHeaders();
    Headers copy = new RecordHeaders(headers);
    copy.add(new Header() {
        @Override
        public String key() {
            return key;
        }

        @Override
        public byte[] value() {
            return content;
        }
    });
    return new OutgoingKafkaRecord<>(getTopic(), getKey(), getPayload(), getTimestamp(), getPartition(),
            copy, getAck(), getNack(), getMetadata());
}
 
Example #8
Source File: CloudEventMessageSerializerTest.java    From sdk-java with Apache License 2.0 6 votes vote down vote up
@Test
public void serializerShouldWork() {
    String topic = "test";
    CloudEvent event = Data.V1_WITH_JSON_DATA;

    CloudEventMessageSerializer serializer = new CloudEventMessageSerializer();

    Headers headers = new RecordHeaders();

    MockBinaryMessageWriter inMessage = new MockBinaryMessageWriter();
    CloudEventUtils.toVisitable(event).read(inMessage);

    byte[] payload = serializer.serialize(topic, headers, inMessage);

    MessageReader outMessage = KafkaMessageFactory.createReader(headers, payload);

    assertThat(outMessage.getEncoding())
        .isEqualTo(Encoding.BINARY);
    assertThat(outMessage.toEvent())
        .isEqualTo(event);
}
 
Example #9
Source File: TracingKafkaUtilsTest.java    From java-kafka-client with Apache License 2.0 6 votes vote down vote up
@Test
public void inject_two_contexts_and_extract() {
  MockSpan span = mockTracer.buildSpan("first").start();
  Headers headers = new RecordHeaders();
  assertEquals(0, headers.toArray().length);

  // inject first
  TracingKafkaUtils.inject(span.context(), headers, mockTracer);
  int headersLength = headers.toArray().length;
  assertTrue(headersLength > 0);

  // inject second
  MockSpan span2 = mockTracer.buildSpan("second").asChildOf(span.context()).start();
  TracingKafkaUtils.inject(span2.context(), headers, mockTracer);
  assertTrue(headers.toArray().length > headersLength);

  // check first
  MockSpan.MockContext spanContext = (MockSpan.MockContext) TracingKafkaUtils
      .extractSpanContext(headers, mockTracer);
  assertEquals(span2.context().spanId(), spanContext.spanId());
  assertEquals(span2.context().traceId(), spanContext.traceId());
}
 
Example #10
Source File: MirusSourceTaskTest.java    From mirus with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
@Test
public void testSourceRecordsWorksWithNoHeaders() {
  final String topic = "topica";
  final int partition = 0;
  final int offset = 123;
  final long timestamp = 314159;

  Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> records = new HashMap<>();
  records.put(
      new TopicPartition(topic, partition),
      Collections.singletonList(
          newConsumerRecord(topic, partition, offset, timestamp, new RecordHeaders())));
  ConsumerRecords<byte[], byte[]> pollResult = new ConsumerRecords<>(records);

  List<SourceRecord> result = mirusSourceTask.sourceRecords(pollResult);

  assertThat(result.get(0).topic(), is(topic));
  assertThat(
      result.get(0).sourcePartition(),
      is(TopicPartitionSerDe.asMap(new TopicPartition(topic, partition))));
  assertThat(result.get(0).sourceOffset(), is(MirusSourceTask.offsetMap(offset + 1L)));
  assertThat(result.get(0).timestamp(), is(timestamp));
  assertThat(result.get(0).headers().size(), is(0));
}
 
Example #11
Source File: KafkaDecoderTest.java    From synapse with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldDecodeCompoundKeys() {
    final KafkaDecoder decoder = new KafkaDecoder();
    final ConsumerRecord<String,String> record = new ConsumerRecord<>(
            "ch01",
            0,
            42L,
            1234L, TimestampType.CREATE_TIME,
            -1L, -1, -1,
            "key-1234",
            null,
            new RecordHeaders(asList(
                    new RecordHeader("_synapse_msg_partitionKey", "1234".getBytes(UTF_8)),
                    new RecordHeader("_synapse_msg_compactionKey", "key-1234".getBytes(UTF_8))
            ))
    );

    // when
    final TextMessage decodedMessage = decoder.apply(record);

    // then
    assertThat(decodedMessage.getKey().isCompoundKey(), is(true));
    assertThat(decodedMessage.getKey().compactionKey(), is("key-1234"));
    assertThat(decodedMessage.getKey().partitionKey(), is("1234"));
}
 
Example #12
Source File: KafkaDecoderTest.java    From synapse with Apache License 2.0 6 votes vote down vote up
@Test
public void shouldDecodeBrokenCompoundKeysAsMessageKey() {
    final KafkaDecoder decoder = new KafkaDecoder();
    final ConsumerRecord<String,String> record = new ConsumerRecord<>(
            "ch01",
            0,
            42L,
            1234L, TimestampType.CREATE_TIME,
            -1L, -1, -1,
            "record-key",
            null,
            new RecordHeaders(asList(
                    new RecordHeader("_synapse_msg_partitionKey", "1234".getBytes(UTF_8)),
                    new RecordHeader("_synapse_msg_compactionKey", "key-1234".getBytes(UTF_8))
            ))
    );

    // when
    final TextMessage decodedMessage = decoder.apply(record);

    // then
    assertThat(decodedMessage.getKey().isCompoundKey(), is(false));
    assertThat(decodedMessage.getKey().compactionKey(), is("record-key"));
}
 
Example #13
Source File: ConsumerMockTestBase.java    From vertx-kafka-client with Apache License 2.0 6 votes vote down vote up
@Test
public void testConsumeWithHeader(TestContext ctx) {
  MockConsumer<String, String> mock = new MockConsumer<>(OffsetResetStrategy.EARLIEST);
  KafkaReadStream<String, String> consumer = createConsumer(vertx, mock);
  Async doneLatch = ctx.async();
  consumer.handler(record -> {
    ctx.assertEquals("the_topic", record.topic());
    ctx.assertEquals(0, record.partition());
    ctx.assertEquals("abc", record.key());
    ctx.assertEquals("def", record.value());
    Header[] headers = record.headers().toArray();
    ctx.assertEquals(1, headers.length);
    Header header = headers[0];
    ctx.assertEquals("header_key", header.key());
    ctx.assertEquals("header_value", new String(header.value()));
    consumer.close(v -> doneLatch.complete());
  });
  consumer.subscribe(Collections.singleton("the_topic"), v -> {
    mock.schedulePollTask(() -> {
      mock.rebalance(Collections.singletonList(new TopicPartition("the_topic", 0)));
      mock.addRecord(new ConsumerRecord<>("the_topic", 0, 0L, 0L, TimestampType.NO_TIMESTAMP_TYPE, 0L, 0, 0, "abc", "def",
        new RecordHeaders(Collections.singletonList(new RecordHeader("header_key", "header_value".getBytes())))));
      mock.seek(new TopicPartition("the_topic", 0), 0L);
    });
  });
}
 
Example #14
Source File: KafkaAvroSerdesTest.java    From registry with Apache License 2.0 6 votes vote down vote up
@Test
public void testGenericSerializedSpecificDeserialized() {
    Map<String, Object> config = new HashMap<>();
    config.put(AvroSnapshotDeserializer.SPECIFIC_AVRO_READER, true);
    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    kafkaAvroDeserializer.configure(config, false);

    KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient);
    kafkaAvroSerializer.configure(Collections.emptyMap(), false);

    GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();

    byte[] payload = kafkaAvroSerializer.serialize(topic, record);
    Object o = kafkaAvroDeserializer.deserialize(topic, payload);
    checkGenericSerializedSpecificDeserializedEquals(record, o);

    Headers headers = new RecordHeaders();
    payload = kafkaAvroSerializer.serialize(topic, headers, record);
    o = kafkaAvroDeserializer.deserialize(topic, headers, payload);
    checkGenericSerializedSpecificDeserializedEquals(record, o);
}
 
Example #15
Source File: KafkaEventReceiverTest.java    From stream-registry with Apache License 2.0 6 votes vote down vote up
@Test
public void listenerThrowsException() throws Exception {
  when(config.getTopic()).thenReturn(topic);
  when(consumer.partitionsFor(topic)).thenReturn(List.of(partitionInfo));
  when(consumer.beginningOffsets(topicPartitions)).thenReturn(Map.of(topicPartition, 0L));
  when(consumer.endOffsets(topicPartitions)).thenReturn(Map.of(topicPartition, 0L));
  when(consumer.poll(Duration.ofMillis(100))).thenReturn(new ConsumerRecords<>(Map.of(topicPartition, List.of(record))));
  when(record.key()).thenReturn(avroKey);
  when(record.value()).thenReturn(avroValue);
  when(converter.toModel(avroKey, avroValue)).thenReturn(event);
  when(record.headers()).thenReturn(new RecordHeaders(List.of(new RecordHeader(CORRELATION_ID, "foo".getBytes(UTF_8)))));
  doThrow(new RuntimeException("listener error")).when(listener).onEvent(event);

  underTest.receive(listener);
  Thread.sleep(100L);
  underTest.close();

  var inOrder = Mockito.inOrder(consumer, listener, correlator);
  inOrder.verify(consumer).assign(topicPartitions);
  inOrder.verify(consumer).seekToBeginning(topicPartitions);
  inOrder.verify(listener).onEvent(LOAD_COMPLETE);
  inOrder.verify(listener).onEvent(event);
  inOrder.verify(correlator).received("foo");
}
 
Example #16
Source File: KafkaSourceTaskTest.java    From MirrorTool-for-Kafka-Connect with Apache License 2.0 6 votes vote down vote up
private ConsumerRecords<byte[], byte[]> createTestRecordsWithHeaders() {
  RecordHeader header = new RecordHeader("testHeader", new byte[0]);
  RecordHeaders headers = new RecordHeaders();
  headers.add(header);
  TimestampType timestampType = TimestampType.NO_TIMESTAMP_TYPE;

  byte testByte = 0;
  byte[] testKey = { testByte };
  byte[] testValue = { testByte };

  ConnectHeaders destinationHeaders = new ConnectHeaders();
  destinationHeaders.add(header.key(), header.value(), Schema.OPTIONAL_BYTES_SCHEMA);
  ConsumerRecord<byte[], byte[]> testConsumerRecord = new ConsumerRecord<byte[], byte[]>(FIRST_TOPIC, FIRST_PARTITION,
      FIRST_OFFSET, System.currentTimeMillis(), timestampType, 0L, 0, 0, testKey, testValue, headers);

  TopicPartition topicPartition = new TopicPartition(FIRST_TOPIC, FIRST_PARTITION);
  List<ConsumerRecord<byte[], byte[]>> consumerRecords = new ArrayList<>();
  consumerRecords.add(testConsumerRecord);

  Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> consumerRecordMap = new HashMap<>(1);
  consumerRecordMap.put(topicPartition, consumerRecords);
  ConsumerRecords<byte[], byte[]> testRecords = new ConsumerRecords<>(consumerRecordMap);
  return testRecords;
}
 
Example #17
Source File: KafkaAvroSerdesTest.java    From registry with Apache License 2.0 6 votes vote down vote up
@Test
public void testSpecificSerializedGenericDeserialized() {
    Map<String, Object> config = new HashMap<>();
    config.put(AvroSnapshotDeserializer.SPECIFIC_AVRO_READER, false);
    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    kafkaAvroDeserializer.configure(config, false);

    KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient);
    kafkaAvroSerializer.configure(config, false);

    TestRecord record = new TestRecord();
    record.setField1("some value");
    record.setField1("some other value");

    byte[] bytes = kafkaAvroSerializer.serialize(topic, record);
    Object o = kafkaAvroDeserializer.deserialize(topic, bytes);
    checkSpecificSerializedGenericDeserializedEquals(record, o);

    Headers headers = new RecordHeaders();
    bytes = kafkaAvroSerializer.serialize(topic, headers, record);
    o = kafkaAvroDeserializer.deserialize(topic, headers, bytes);
    checkSpecificSerializedGenericDeserializedEquals(record, o);
}
 
Example #18
Source File: KafkaAvroSerdesTest.java    From registry with Apache License 2.0 6 votes vote down vote up
@Test
public void testGenericSerializedGenericDeserialized() {
    String topic = "topic";
    Map<String, Object> config = new HashMap<>();
    config.put(AvroSnapshotDeserializer.SPECIFIC_AVRO_READER, false);
    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    kafkaAvroDeserializer.configure(config, false);

    KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient);
    kafkaAvroSerializer.configure(config, false);

    GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build();

    byte[] bytes = kafkaAvroSerializer.serialize(topic , record);
    Object o = kafkaAvroDeserializer.deserialize(topic, bytes);
    checkGenericSerializedGenericDeserializedEquals(record, o);

    Headers headers = new RecordHeaders();
    bytes = kafkaAvroSerializer.serialize(topic, headers, record);
    o = kafkaAvroDeserializer.deserialize(topic, headers, bytes);
    checkGenericSerializedGenericDeserializedEquals(record, o);
}
 
Example #19
Source File: CustomTimestampPolicyWithLimitedDelayTest.java    From beam with Apache License 2.0 6 votes vote down vote up
private static List<Long> getTimestampsForRecords(
    TimestampPolicy<String, String> policy, Instant now, List<Long> timestampOffsets) {

  return timestampOffsets.stream()
      .map(
          ts -> {
            Instant result =
                policy.getTimestampForRecord(
                    null,
                    new KafkaRecord<>(
                        "topic",
                        0,
                        0,
                        now.getMillis() + ts,
                        KafkaTimestampType.CREATE_TIME,
                        new RecordHeaders(),
                        "key",
                        "value"));
            return result.getMillis() - now.getMillis();
          })
      .collect(Collectors.toList());
}
 
Example #20
Source File: MockKafkaTest.java    From jackdaw with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
@Test
public void testDefaultRecordMapping() {
  final MockKafka<Object, Object> mockKafka = new MockKafka<>();
  final RecordHeaders recordHeaders = new RecordHeaders(Collections.singleton(
                  new RecordHeader("headerKey", "headerValue".getBytes(StandardCharsets.UTF_8))));
  final RecordMetadata recordMetadata = new RecordMetadata(new TopicPartition("topic", 0),
          0, 0, 0, -1L, -1, -1);
  final ProducerRecord<Object, Object> producerRecord =
          new ProducerRecord<>("topic", 0, "key", "value", recordHeaders);

  final ConsumerRecord<Object, Object> consumerRecord = mockKafka.defaultRecordMapping(producerRecord, recordMetadata);

  assertEquals(producerRecord.topic(), consumerRecord.topic());
  assertEquals(producerRecord.partition().intValue(), consumerRecord.partition());
  assertEquals(producerRecord.key(), consumerRecord.key());
  assertEquals(producerRecord.value(), consumerRecord.value());
  assertEquals(producerRecord.headers(), consumerRecord.headers());
}
 
Example #21
Source File: CryptoDeserializerTest.java    From kafka-encryption with Apache License 2.0 6 votes vote down vote up
@Test
public void testDeserializeWhenKeyRefIsSet() {
    byte[] encoded = "encoded".getBytes(StandardCharsets.UTF_8);
    byte[] keyRef = "keyref1".getBytes(StandardCharsets.UTF_8);

    ByteBuffer encodedValue = ByteBuffer.allocate(KafkaCryptoConstants.ENCRYPTED_PREFIX.length + Integer.BYTES + keyRef.length + encoded.length);
    encodedValue.put(KafkaCryptoConstants.ENCRYPTED_PREFIX);
    encodedValue.putInt(keyRef.length);
    encodedValue.put(keyRef);
    encodedValue.put(encoded);


    given(decryptor.decrypt(encoded, keyRef)).willReturn("decoded".getBytes(StandardCharsets.UTF_8));
    given(rawDeserializer.deserialize("topic1", new RecordHeaders(), "decoded".getBytes(StandardCharsets.UTF_8))).willReturn("deserialized value");

    RecordHeaders recordHeaders = new RecordHeaders();
    String value = cryptoDeserializer.deserialize("topic1", recordHeaders, encodedValue.array());

    assertThat(value).isEqualTo("deserialized value");

    assertThat(recordHeaders.lastHeader(KafkaCryptoConstants.KEY_REF_HEADER).value()).isEqualTo(keyRef);
}
 
Example #22
Source File: RecordTest.java    From kafka-backup with Apache License 2.0 6 votes vote down vote up
@Test
public void equalsFalseBecauseHeadersStrictSubsetTest() {
    // GIVEN
    RecordHeaders aHeaders = new RecordHeaders();
    aHeaders.add("header0-key", Arrays.copyOf(HEADER_0_VALUE_BYTES, HEADER_0_VALUE_BYTES.length));
    aHeaders.add("header1-key", Arrays.copyOf(HEADER_1_VALUE_BYTES, HEADER_1_VALUE_BYTES.length));
    Record a = new Record(TOPIC, PARTITION, KEY_BYTES, VALUE_BYTES, OFFSET, TIMESTAMP, TIMESTAMP_TYPE, aHeaders);

    RecordHeaders bHeaders = new RecordHeaders();
    bHeaders.add("header0-key", Arrays.copyOf(HEADER_0_VALUE_BYTES, HEADER_0_VALUE_BYTES.length));
    Record b = new Record(TOPIC, PARTITION, KEY_BYTES, VALUE_BYTES, OFFSET, TIMESTAMP, TIMESTAMP_TYPE, bHeaders);


    RecordHeaders cHeaders = new RecordHeaders();
    cHeaders.add("header1-key", Arrays.copyOf(HEADER_0_VALUE_BYTES, HEADER_0_VALUE_BYTES.length));
    cHeaders.add("header1-key", Arrays.copyOf(HEADER_1_VALUE_BYTES, HEADER_1_VALUE_BYTES.length));
    Record c = new Record(TOPIC, PARTITION, KEY_BYTES, VALUE_BYTES, OFFSET, TIMESTAMP, TIMESTAMP_TYPE, cHeaders);

    // THEN
    assertNotEquals(a, b);
    assertNotEquals(b, a);
    assertNotEquals(a, c);
    assertNotEquals(b, c);
}
 
Example #23
Source File: RecordTest.java    From kafka-backup with Apache License 2.0 6 votes vote down vote up
@Test
public void equalsValueTrueTest() {
    // GIVEN
    Record a = new Record(TOPIC, PARTITION, KEY_BYTES, VALUE_BYTES, OFFSET, TIMESTAMP, TIMESTAMP_TYPE, HEADERS);

    RecordHeaders bHeaders = new RecordHeaders();
    bHeaders.add("", new byte[0]);
    bHeaders.add("null", null);
    bHeaders.add("value0", Arrays.copyOf(HEADER_0_VALUE_BYTES, HEADER_0_VALUE_BYTES.length));
    bHeaders.add("value1", Arrays.copyOf(HEADER_1_VALUE_BYTES, HEADER_1_VALUE_BYTES.length));
    Record b = new Record(TOPIC, PARTITION, KEY_BYTES, VALUE_BYTES, OFFSET, TIMESTAMP, TIMESTAMP_TYPE, bHeaders);

    // THEN
    assertEquals(a, b);
    assertEquals(b, a);
}
 
Example #24
Source File: KafkaConsumerHelperTest.java    From zerocode with Apache License 2.0 6 votes vote down vote up
@Test
public void should_read_json_with_headers_in_record() throws IOException {
    // given
    ConsumerRecord consumerRecord = Mockito.mock(ConsumerRecord.class);
    Mockito.when(consumerRecord.key()).thenReturn("key");
    Mockito.when(consumerRecord.value()).thenReturn("\"value\"");
    Mockito.when(consumerRecord.headers())
            .thenReturn(new RecordHeaders().add("headerKey", "headerValue".getBytes()));

    // when
    List<ConsumerJsonRecord> consumerJsonRecords = new ArrayList<>();
    KafkaConsumerHelper.readJson(consumerJsonRecords, Iterators.forArray(consumerRecord));

    // then
    Assert.assertEquals(1, consumerJsonRecords.size());
    ConsumerJsonRecord consumerJsonRecord = consumerJsonRecords.get(0);
    Assert.assertEquals("key", consumerJsonRecord.getKey());
    Assert.assertEquals("\"value\"", consumerJsonRecord.getValue().toString());
    Assert.assertEquals(Collections.singletonMap("headerKey", "headerValue"), consumerJsonRecord.getHeaders());
}
 
Example #25
Source File: HeaderUtilsTest.java    From extension-kafka with Apache License 2.0 5 votes vote down vote up
@Test
public void testWritingTextValueShouldBeWrittenAsString() {
    RecordHeaders target = new RecordHeaders();
    String expectedKey = "foo";
    String expectedValue = "a";
    addHeader(target, expectedKey, expectedValue);

    assertThat(target.toArray().length).isEqualTo(1);
    assertThat(target.lastHeader(expectedKey).key()).isEqualTo(expectedKey);
    assertThat(valueAsString(target, expectedKey)).isEqualTo(expectedValue);
}
 
Example #26
Source File: HeaderUtilsTest.java    From extension-kafka with Apache License 2.0 5 votes vote down vote up
@Test
public void testReadingValuesAsTextExistingKeyShouldReturnText() {
    RecordHeaders headers = new RecordHeaders();
    String expectedValue = "Şơм℮ śẩмρŀę ÅŚÇÍỈ-ťęҳť FFlETYeKU3H5QRqw";
    addHeader(headers, "foo", expectedValue);

    assertThat(valueAsString(headers, "foo")).isEqualTo(expectedValue);
    assertThat(valueAsString(headers, "foo", "default-value")).isEqualTo(expectedValue);
}
 
Example #27
Source File: HeaderUtilsTest.java    From extension-kafka with Apache License 2.0 5 votes vote down vote up
@Test
public void testWritingCustomValueShouldBeWrittenAsRepresentedByToString() {
    RecordHeaders target = new RecordHeaders();
    Foo expectedValue = new Foo("someName", new Bar(100));
    addHeader(target, "object", expectedValue);

    assertThat(valueAsString(target, "object")).isEqualTo(expectedValue.toString());
}
 
Example #28
Source File: ProducerRecordCoderTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testProducerRecordStructuralValueWithHeadersApi() throws IOException {
  RecordHeaders headers = new RecordHeaders();
  headers.add("headerKey", "headerVal".getBytes(UTF_8));
  ProducerRecordCoder producerRecordCoder =
      ProducerRecordCoder.of(ByteArrayCoder.of(), ByteArrayCoder.of());
  ProducerRecord<byte[], byte[]> producerRecord =
      new ProducerRecord<>(
          "topic", 1, null, "key".getBytes(UTF_8), "value".getBytes(UTF_8), headers);

  ProducerRecord testProducerRecord =
      (ProducerRecord) producerRecordCoder.structuralValue(producerRecord);
  assertEquals(testProducerRecord.headers(), headers);
}
 
Example #29
Source File: HeaderUtilsTest.java    From extension-kafka with Apache License 2.0 5 votes vote down vote up
@Test
public void testWritingNullValueShouldBeWrittenAsNull() {
    RecordHeaders target = new RecordHeaders();
    addHeader(target, "baz", null);

    assertThat(value(target, "baz")).isNull();
}
 
Example #30
Source File: TracingKafkaUtilsTest.java    From java-kafka-client with Apache License 2.0 5 votes vote down vote up
@Test
public void extract_no_context() {
  Headers headers = new RecordHeaders();

  MockSpan.MockContext spanContext = (MockSpan.MockContext) TracingKafkaUtils
      .extractSpanContext(headers, mockTracer);
  assertNull(spanContext);
}