org.apache.kafka.common.header.Headers Java Examples

The following examples show how to use org.apache.kafka.common.header.Headers. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaAvroSerdesTest.java    From registry with Apache License 2.0 7 votes vote down vote up
@Test
public void testToggleStoringSchemaInHeader() {
    TestRecord record = new TestRecord();
    record.setField1("Hello");
    record.setField2("World");
    String keySchemaHeaderName = KafkaAvroSerde.DEFAULT_KEY_SCHEMA_VERSION_ID;

    for (Boolean storeScheamIdInHeader : Arrays.asList(true, false)) {
        Map<String, Object> configs = new HashMap<>();
        configs.put(KafkaAvroSerializer.STORE_SCHEMA_VERSION_ID_IN_HEADER, storeScheamIdInHeader.toString());
        configs.put(AbstractAvroSnapshotDeserializer.SPECIFIC_AVRO_READER, true);

        KafkaAvroSerde serde = new KafkaAvroSerde(schemaRegistryClient);
        final Serializer<Object> serializer = serde.serializer();
        serializer.configure(configs, true);

        Headers headers = new RecordHeaders();
        final byte[] bytes = serializer.serialize(topic, headers, record);
        Assert.assertEquals(storeScheamIdInHeader, headers.lastHeader(keySchemaHeaderName) != null);

        final Deserializer<Object> deserializer = serde.deserializer();
        deserializer.configure(configs, true);
        final TestRecord actual = (TestRecord) deserializer.deserialize(topic, headers, bytes);
        Assert.assertEquals(record, actual);
    }
}
 
Example #2
Source File: KafkaProducerMessageWriterTest.java    From sdk-java with Apache License 2.0 7 votes vote down vote up
@ParameterizedTest
@MethodSource("binaryTestArguments")
void testRequestWithBinary(CloudEvent event, Headers expectedHeaders, byte[] expectedBody) {
    String topic = "test";
    Integer partition = 10;
    Long timestamp = System.currentTimeMillis();
    String key = "aaa";

    ProducerRecord<String, byte[]> producerRecord = KafkaMessageFactory
        .createWriter(topic, partition, timestamp, key)
        .writeBinary(event);

    assertThat(producerRecord.topic())
        .isEqualTo(topic);
    assertThat(producerRecord.partition())
        .isEqualTo(partition);
    assertThat(producerRecord.timestamp())
        .isEqualTo(timestamp);
    assertThat(producerRecord.key())
        .isEqualTo(key);
    assertThat(producerRecord.headers())
        .containsExactlyInAnyOrder(expectedHeaders.toArray());
    assertThat(producerRecord.value())
        .isEqualTo(expectedBody);
}
 
Example #3
Source File: OutgoingKafkaRecord.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new outgoing Kafka Message with a header added to the header list.
 *
 * @param key the header key
 * @param content the header key, must not be {@code null}
 * @return the updated Kafka Message.
 */
public OutgoingKafkaRecord<K, T> withHeader(String key, byte[] content) {
    Headers headers = getHeaders();
    Headers copy = new RecordHeaders(headers);
    copy.add(new Header() {
        @Override
        public String key() {
            return key;
        }

        @Override
        public byte[] value() {
            return content;
        }
    });
    return new OutgoingKafkaRecord<>(getTopic(), getKey(), getPayload(), getTimestamp(), getPartition(),
            copy, getAck(), getNack(), getMetadata());
}
 
Example #4
Source File: JsonSchemaKafkaDeserializer.java    From apicurio-registry with Apache License 2.0 6 votes vote down vote up
/**
 * Gets the message type from the headers.  Throws if not found.
 *
 * @param headers the headers
 */
@SuppressWarnings("unchecked")
protected Class<T> getMessageType(Headers headers) {
    Header header = headers.lastHeader(JsonSchemaSerDeConstants.HEADER_MSG_TYPE);
    if (header == null) {
        throw new RuntimeException("Message Type not found in headers.");
    }
    String msgTypeName = IoUtil.toString(header.value());
    
    try {
        return (Class<T>) Thread.currentThread().getContextClassLoader().loadClass(msgTypeName);
    } catch (ClassNotFoundException ignored) {
    }
    try {
        return (Class<T>) Class.forName(msgTypeName);
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
 
Example #5
Source File: TestInput.java    From fluent-kafka-streams-tests with MIT License 6 votes vote down vote up
/**
 * <p>Constructor for the test input topic.</p>
 *
 * @param testDriver Kafka's {@link TopologyTestDriver} used in this test.
 * @param topic Name of input topic.
 * @param keySerde Serde for key type in topic.
 * @param valueSerde Serde for value type in topic.
 */
protected TestInput(final TopologyTestDriver testDriver, final String topic, final Serde<K> keySerde,
        final Serde<V> valueSerde) {
    this.testDriver = testDriver;
    this.topic = topic;
    this.keySerde = keySerde;
    this.valueSerde = valueSerde;

    this.consumerFactory = new ConsumerRecordFactory<>(topic,
            keySerde == null ? new UnspecifiedSerializer<K>() : keySerde.serializer(),
            valueSerde == null ? new UnspecifiedSerializer<V>() : valueSerde.serializer()) {
        @Override
        public ConsumerRecord<byte[], byte[]> create(final String topicName, final K key, final V value,
                final Headers headers, final long timestampMs) {
            final ConsumerRecord<byte[], byte[]> record = super.create(topicName, key, value, headers, timestampMs);
            testDriver.pipeInput(record);
            return record;
        }
    };
}
 
Example #6
Source File: CloudEventMessageSerializerTest.java    From sdk-java with Apache License 2.0 6 votes vote down vote up
@Test
public void serializerShouldWork() {
    String topic = "test";
    CloudEvent event = Data.V1_WITH_JSON_DATA;

    CloudEventMessageSerializer serializer = new CloudEventMessageSerializer();

    Headers headers = new RecordHeaders();

    MockBinaryMessageWriter inMessage = new MockBinaryMessageWriter();
    CloudEventUtils.toVisitable(event).read(inMessage);

    byte[] payload = serializer.serialize(topic, headers, inMessage);

    MessageReader outMessage = KafkaMessageFactory.createReader(headers, payload);

    assertThat(outMessage.getEncoding())
        .isEqualTo(Encoding.BINARY);
    assertThat(outMessage.toEvent())
        .isEqualTo(event);
}
 
Example #7
Source File: BinderHeaderMapper.java    From spring-cloud-stream-binder-kafka with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
@Nullable
private Map<String, String> decodeJsonTypes(Headers source) {
	Map<String, String> types = null;
	Header jsonTypes = source.lastHeader(JSON_TYPES);
	if (jsonTypes != null) {
		ObjectMapper headerObjectMapper = getObjectMapper();
		try {
			types = headerObjectMapper.readValue(jsonTypes.value(), Map.class);
		}
		catch (IOException e) {
			logger.error(e, () -> "Could not decode json types: " + new String(jsonTypes.value()));
		}
	}
	return types;
}
 
Example #8
Source File: ProducerRecordCoderTest.java    From beam with Apache License 2.0 6 votes vote down vote up
private ProducerRecord<String, String> verifySerialization(
    Headers headers, Integer partition, Long timestamp) throws IOException {
  ProducerRecord<String, String> producerRecord =
      new ProducerRecord<>("topic", partition, timestamp, "key", "value", headers);

  ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
  ProducerRecordCoder producerRecordCoder =
      ProducerRecordCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());

  producerRecordCoder.encode(producerRecord, outputStream);
  ProducerRecord<String, String> decodedRecord =
      producerRecordCoder.decode(new ByteArrayInputStream(outputStream.toByteArray()));

  assertEquals(producerRecord, decodedRecord);

  return decodedRecord;
}
 
Example #9
Source File: TracingKafkaUtilsTest.java    From java-kafka-client with Apache License 2.0 6 votes vote down vote up
@Test
public void inject_two_contexts_and_extract() {
  MockSpan span = mockTracer.buildSpan("first").start();
  Headers headers = new RecordHeaders();
  assertEquals(0, headers.toArray().length);

  // inject first
  TracingKafkaUtils.inject(span.context(), headers, mockTracer);
  int headersLength = headers.toArray().length;
  assertTrue(headersLength > 0);

  // inject second
  MockSpan span2 = mockTracer.buildSpan("second").asChildOf(span.context()).start();
  TracingKafkaUtils.inject(span2.context(), headers, mockTracer);
  assertTrue(headers.toArray().length > headersLength);

  // check first
  MockSpan.MockContext spanContext = (MockSpan.MockContext) TracingKafkaUtils
      .extractSpanContext(headers, mockTracer);
  assertEquals(span2.context().spanId(), spanContext.spanId());
  assertEquals(span2.context().traceId(), spanContext.traceId());
}
 
Example #10
Source File: LiKafkaClientsUtils.java    From li-apache-kafka-clients with BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * Special header keys have a "_" prefix and are managed internally by the clients.
 * @param headers kafka headers object
 * @return any "special" headers container in the argument map
 */
public static Map<String, byte[]> fetchSpecialHeaders(Headers headers) {
  Map<String, byte[]> map = new HashMap<>();
  for (Header header : headers) {

    if (!header.key().startsWith("_")) {
      // skip any non special header
      continue;
    }

    if (map.containsKey(header.key())) {
      throw new IllegalStateException("Duplicate special header found " + header.key());
    }
    map.put(header.key(), header.value());
  }
  return map;
}
 
Example #11
Source File: KafkaRecord.java    From beam with Apache License 2.0 6 votes vote down vote up
public KafkaRecord(
    String topic,
    int partition,
    long offset,
    long timestamp,
    KafkaTimestampType timestampType,
    @Nullable Headers headers,
    KV<K, V> kv) {
  this.topic = topic;
  this.partition = partition;
  this.offset = offset;
  this.timestamp = timestamp;
  this.timestampType = timestampType;
  this.headers = headers;
  this.kv = kv;
}
 
Example #12
Source File: KafkaAvroSerdesTest.java    From registry with Apache License 2.0 6 votes vote down vote up
@Test
public void testSpecificSerializedGenericDeserialized() {
    Map<String, Object> config = new HashMap<>();
    config.put(AvroSnapshotDeserializer.SPECIFIC_AVRO_READER, false);
    KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
    kafkaAvroDeserializer.configure(config, false);

    KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient);
    kafkaAvroSerializer.configure(config, false);

    TestRecord record = new TestRecord();
    record.setField1("some value");
    record.setField1("some other value");

    byte[] bytes = kafkaAvroSerializer.serialize(topic, record);
    Object o = kafkaAvroDeserializer.deserialize(topic, bytes);
    checkSpecificSerializedGenericDeserializedEquals(record, o);

    Headers headers = new RecordHeaders();
    bytes = kafkaAvroSerializer.serialize(topic, headers, record);
    o = kafkaAvroDeserializer.deserialize(topic, headers, bytes);
    checkSpecificSerializedGenericDeserializedEquals(record, o);
}
 
Example #13
Source File: OutgoingKafkaRecord.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new outgoing Kafka Message with a header added to the header list.
 *
 * @param key the header key
 * @param content the header key, must not be {@code null}
 * @param enc the encoding, must not be {@code null}
 * @return the updated Kafka Message.
 */
public OutgoingKafkaRecord<K, T> withHeader(String key, String content, Charset enc) {
    Headers headers = getHeaders();
    Headers copy = new RecordHeaders(headers);
    copy.add(new Header() {
        @Override
        public String key() {
            return key;
        }

        @Override
        public byte[] value() {
            return content.getBytes(enc);
        }
    });
    return new OutgoingKafkaRecord<>(getTopic(), getKey(), getPayload(), getTimestamp(), getPartition(),
            copy, getAck(), getNack(), getMetadata());
}
 
Example #14
Source File: OutgoingKafkaRecord.java    From smallrye-reactive-messaging with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new outgoing Kafka Message with a header added to the header list.
 *
 * @param key the header key
 * @param content the header key, must not be {@code null}
 * @return the updated Kafka Message.
 */
public OutgoingKafkaRecord<K, T> withHeader(String key, String content) {
    Headers headers = getHeaders();
    Headers copy = new RecordHeaders(headers);
    copy.add(new Header() {
        @Override
        public String key() {
            return key;
        }

        @Override
        public byte[] value() {
            return content.getBytes();
        }
    });
    return new OutgoingKafkaRecord<>(getTopic(), getKey(), getPayload(), getTimestamp(), getPartition(),
            copy, getAck(), getNack(), getMetadata());
}
 
Example #15
Source File: KafkaAvroDeserializer.java    From registry with Apache License 2.0 5 votes vote down vote up
@Override
public Object deserialize(String topic, Headers headers, byte[] data) {
    if (headers != null) {
        final Header header = headers.lastHeader(isKey ? keySchemaVersionIdHeaderName : valueSchemaVersionIdHeaderName);
        if (header != null) {
            return messageAndMetadataAvroDeserializer.deserialize(new MessageAndMetadata(header.value(), data), readerVersions.get(topic));
        }
    }
    return deserialize(topic, data);
}
 
Example #16
Source File: KafkaMessageFactoryTest.java    From sdk-java with Apache License 2.0 5 votes vote down vote up
@ParameterizedTest()
@MethodSource("binaryTestArguments")
public void readBinary(Headers headers, byte[] body, CloudEvent event) {
    MessageReader message = KafkaMessageFactory.createReader(headers, body);

    assertThat(message.getEncoding())
        .isEqualTo(Encoding.BINARY);
    assertThat(message.toEvent())
        .isEqualTo(event);
}
 
Example #17
Source File: HeaderUtilsTest.java    From extension-kafka with Apache License 2.0 5 votes vote down vote up
@Test
public void testGeneratingHeadersForEventMessageShouldGenerateEventHeaders() {
    String metaKey = "someHeaderKey";
    EventMessage<Object> evt = asEventMessage("SomePayload").withMetaData(
            MetaData.with(metaKey, "someValue")
    );
    SerializedObject<byte[]> so = serializedObject();
    Headers headers = toHeaders(evt, so, byteMapper());

    assertEventHeaders(metaKey, evt, so, headers);
}
 
Example #18
Source File: TracingKafkaUtilsTest.java    From java-kafka-client with Apache License 2.0 5 votes vote down vote up
@Test
public void extract() {
  MockSpan span = mockTracer.buildSpan("test").start();
  Headers headers = new RecordHeaders();
  TracingKafkaUtils.inject(span.context(), headers, mockTracer);

  MockSpan.MockContext spanContext = (MockSpan.MockContext) TracingKafkaUtils
      .extractSpanContext(headers, mockTracer);

  assertEquals(span.context().spanId(), spanContext.spanId());
  assertEquals(span.context().traceId(), spanContext.traceId());
}
 
Example #19
Source File: KafkaStubMessages.java    From spring-cloud-contract with Apache License 2.0 5 votes vote down vote up
private Map<String, Object> toMap(Headers headers) {
	Map<String, Object> map = new HashMap<>();
	for (Header header : headers) {
		map.put(header.key(), header.value());
	}
	return map;
}
 
Example #20
Source File: TracingKafkaUtilsTest.java    From java-kafka-client with Apache License 2.0 5 votes vote down vote up
@Test
public void inject() {
  MockSpan span = mockTracer.buildSpan("test").start();
  Headers headers = new RecordHeaders();
  assertEquals(0, headers.toArray().length);

  TracingKafkaUtils.inject(span.context(), headers, mockTracer);

  assertTrue(headers.toArray().length > 0);
}
 
Example #21
Source File: PublisherServiceTest.java    From kafka-pubsub-emulator with Apache License 2.0 5 votes vote down vote up
@Test
public void publish_withAttributes() {
  int messages = 3;
  PublishRequest request =
      PublishRequest.newBuilder()
          .setTopic("projects/project-1/topics/topic-2")
          .addAllMessages(generatePubsubMessagesWithHeader(messages))
          .build();

  MockProducer<String, ByteBuffer> producer = startPublishExecutor(messages);

  PublishResponse response = blockingStub.publish(request);
  assertThat(response.getMessageIdsList(), Matchers.contains("0-0", "0-1", "0-2"));

  List<Headers> headers =
      producer.history().stream().map(ProducerRecord::headers).collect(Collectors.toList());
  assertThat(
      headers,
      Matchers.contains(
          new RecordHeaders(
              Collections.singletonList(
                  new RecordHeader("some-key", "some-value".getBytes(UTF_8)))),
          new RecordHeaders(
              Collections.singletonList(
                  new RecordHeader("some-key", "some-value".getBytes(UTF_8)))),
          new RecordHeaders(
              Collections.singletonList(
                  new RecordHeader("some-key", "some-value".getBytes(UTF_8))))));

  verify(statisticsManager, times(3))
      .computePublish(
          eq("projects/project-1/topics/topic-2"),
          argThat(message -> message.toStringUtf8().matches(MESSAGE_CONTENT_REGEX)),
          anyLong());
  verify(statisticsManager, never()).computePublishError(anyString());
}
 
Example #22
Source File: PublisherService.java    From kafka-pubsub-emulator with Apache License 2.0 5 votes vote down vote up
private Headers buildHeaders(Map<String, String> attributesMap) {
  if (attributesMap == null || attributesMap.isEmpty()) {
    return null;
  }
  return new RecordHeaders(
      attributesMap
          .entrySet()
          .parallelStream()
          .map(attribute -> new RecordHeader(attribute.getKey(), attribute.getValue().getBytes()))
          .collect(Collectors.toList()));
}
 
Example #23
Source File: SubscriptionManager.java    From kafka-pubsub-emulator with Apache License 2.0 5 votes vote down vote up
private Map<String, String> buildAttributesMap(Headers headers) {
  if (Objects.isNull(headers)) {
    return null;
  }
  Map<String, String> attributesMap = new HashMap<>();
  headers.forEach(header -> attributesMap.put(header.key(), new String(header.value())));
  return attributesMap;
}
 
Example #24
Source File: ConsumerInterceptorTTL.java    From BigData-In-Practice with Apache License 2.0 5 votes vote down vote up
@Override
public ConsumerRecords<String, String> onConsume(
        ConsumerRecords<String, String> records) {
    long now = System.currentTimeMillis();
    Map<TopicPartition, List<ConsumerRecord<String, String>>> newRecords
            = new HashMap<>();
    for (TopicPartition tp : records.partitions()) {
        List<ConsumerRecord<String, String>> tpRecords = records.records(tp);
        List<ConsumerRecord<String, String>> newTpRecords = new ArrayList<>();
        for (ConsumerRecord<String, String> record : tpRecords) {
            Headers headers = record.headers();
            long ttl = -1;
            for (Header header : headers) {//判断headers中是否有key为"ttl"的Header
                if (header.key().equalsIgnoreCase("ttl")) {
                    ttl = BytesUtils.bytesToLong(header.value());
                }
            }
            //消息超时判定
            if (ttl > 0 && now - record.timestamp() < ttl * 1000) {
                newTpRecords.add(record);
            } else {//没有设置ttl,无需超时判定
                newTpRecords.add(record);
            }
        }
        if (!newTpRecords.isEmpty()) {
            newRecords.put(tp, newTpRecords);
        }
    }
    return new ConsumerRecords<>(newRecords);
}
 
Example #25
Source File: HeadersMapExtractAdapterTest.java    From java-kafka-client with Apache License 2.0 5 votes vote down vote up
@Test
public void verifyNullHeaderHandled() {
  Headers headers = new RecordHeaders();
  headers.add("test_null_header", null);
  HeadersMapExtractAdapter headersMapExtractAdapter = new HeadersMapExtractAdapter(headers);
  Entry<String, String> header = headersMapExtractAdapter.iterator().next();
  assertNotNull(header);
  assertEquals(header.getKey(), "test_null_header");
  assertNull(header.getValue());

}
 
Example #26
Source File: DefaultKafkaMessageConverter.java    From extension-kafka with Apache License 2.0 5 votes vote down vote up
private SerializedMessage<?> extractSerializedMessage(Headers headers, byte[] messageBody) {
    SimpleSerializedObject<byte[]> serializedObject = new SimpleSerializedObject<>(
            messageBody,
            byte[].class,
            valueAsString(headers, MESSAGE_TYPE),
            valueAsString(headers, MESSAGE_REVISION, null)
    );

    return new SerializedMessage<>(
            valueAsString(headers, MESSAGE_ID),
            new LazyDeserializingObject<>(serializedObject, serializer),
            new LazyDeserializingObject<>(MetaData.from(extractAxonMetadata(headers)))
    );
}
 
Example #27
Source File: GsonSerDeProvider.java    From zerocode with Apache License 2.0 5 votes vote down vote up
@Override
public void write(JsonWriter writer, Headers value) throws IOException {
    if (value == null || !value.iterator().hasNext()) {
        writer.nullValue();
    } else {
        Map<String, String> headers = new HashMap<>();
        value.forEach(header -> headers.put(header.key(), new String(header.value())));
        gson.getAdapter(Map.class).write(writer, headers);
    }
}
 
Example #28
Source File: KafkaRecord.java    From beam with Apache License 2.0 5 votes vote down vote up
public KafkaRecord(
    String topic,
    int partition,
    long offset,
    long timestamp,
    KafkaTimestampType timestampType,
    @Nullable Headers headers,
    K key,
    V value) {
  this(topic, partition, offset, timestamp, timestampType, headers, KV.of(key, value));
}
 
Example #29
Source File: HeaderUtils.java    From extension-kafka with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new {@link org.apache.kafka.common.header.internals.RecordHeader} based on {@code key} and
 * {@code value} and adds it to {@code headers}. The {@code value} is converted to bytes and follows this logic:
 * <ul>
 * <li>Instant - calls {@link Instant#toEpochMilli()}</li>
 * <li>Number - calls {@link HeaderUtils#toBytes} </li>
 * <li>String/custom object - calls {@link String#toString()} </li>
 * <li>null - <code>null</code> </li>
 * </ul>
 *
 * @param headers the Kafka {@code headers} to add a {@code key}/{@code value} pair to
 * @param key     the key you want to add to the {@code headers}
 * @param value   the value you want to add to the {@code headers}
 */
public static void addHeader(Headers headers, String key, Object value) {
    notNull(headers, () -> "headers may not be null");
    if (value instanceof Instant) {
        headers.add(key, toBytes((Number) ((Instant) value).toEpochMilli()));
    } else if (value instanceof Number) {
        headers.add(key, toBytes((Number) value));
    } else if (value instanceof String) {
        headers.add(key, ((String) value).getBytes(UTF_8));
    } else if (value == null) {
        headers.add(key, null);
    } else {
        headers.add(key, value.toString().getBytes(UTF_8));
    }
}
 
Example #30
Source File: JsonSchemaKafkaSerializer.java    From apicurio-registry with Apache License 2.0 5 votes vote down vote up
/**
 * @see org.apache.kafka.common.serialization.Serializer#serialize(java.lang.String, org.apache.kafka.common.header.Headers, java.lang.Object)
 */
@Override
public byte[] serialize(String topic, Headers headers, T data) {
    if (data == null) {
        return null;
    }

    // Now serialize the data
    try {
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        JsonGenerator generator = mapper.getFactory().createGenerator(baos);
        if (isValidationEnabled()) {
            String artifactId = getArtifactId(topic, data);
            long globalId = getGlobalId(artifactId, topic, data);
            addSchemaHeaders(headers, artifactId, globalId);

            SchemaValidator schemaValidator = getSchemaCache().getSchema(globalId);
            generator = api.decorateJsonGenerator(schemaValidator, generator);
        }
        addTypeHeaders(headers, data);

        mapper.writeValue(generator, data);

        return baos.toByteArray();
    } catch (IOException e) {
        throw new UncheckedIOException(e);
    }
}