org.apache.pulsar.client.impl.TypedMessageBuilderImpl Java Examples

The following examples show how to use org.apache.pulsar.client.impl.TypedMessageBuilderImpl. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ContextImplTest.java    From pulsar with Apache License 2.0 6 votes vote down vote up
@BeforeMethod
public void setup() {
    config = new InstanceConfig();
    FunctionDetails functionDetails = FunctionDetails.newBuilder()
        .setUserConfig("")
        .build();
    config.setFunctionDetails(functionDetails);
    logger = mock(Logger.class);
    client = mock(PulsarClientImpl.class);
    when(client.newProducer()).thenReturn(new ProducerBuilderImpl(client, Schema.BYTES));
    when(client.createProducerAsync(any(ProducerConfigurationData.class), any(), any()))
            .thenReturn(CompletableFuture.completedFuture(producer));
    when(client.getSchema(anyString())).thenReturn(CompletableFuture.completedFuture(Optional.empty()));
    when(producer.sendAsync(anyString())).thenReturn(CompletableFuture.completedFuture(null));

    TypedMessageBuilder messageBuilder = spy(new TypedMessageBuilderImpl(mock(ProducerBase.class), Schema.STRING));
    doReturn(new CompletableFuture<>()).when(messageBuilder).sendAsync();
    when(producer.newMessage()).thenReturn(messageBuilder);
    context = new ContextImpl(
        config,
        logger,
        client,
        new EnvironmentBasedSecretsProvider(), new CollectorRegistry(), new String[0],
            FunctionDetails.ComponentType.FUNCTION, null, null);
    context.setCurrentMessageContext((Record<String>) () -> null);
}
 
Example #2
Source File: MessageRecordUtils.java    From kop with Apache License 2.0 5 votes vote down vote up
public static MessageImpl<byte[]> recordToEntry(Record record) {
    @SuppressWarnings("unchecked")
    TypedMessageBuilderImpl<byte[]> builder = new TypedMessageBuilderImpl(null, Schema.BYTES);

    // key
    if (record.hasKey()) {
        byte[] key = new byte[record.keySize()];
        record.key().get(key);
        builder.keyBytes(key);
        // reuse ordering key to avoid converting string < > bytes
        builder.orderingKey(key);
    }

    // value
    if (record.hasValue()) {
        byte[] value = new byte[record.valueSize()];
        record.value().get(value);
        builder.value(value);
    } else {
        builder.value(new byte[0]);
    }

    // sequence
    if (record.sequence() >= 0) {
        builder.sequenceId(record.sequence());
    }

    // timestamp
    if (record.timestamp() >= 0) {
        builder.eventTime(record.timestamp());
    }

    // header
    for (Header h : record.headers()) {
        builder.property(h.key(),
            new String(h.value(), UTF_8));
    }

    return (MessageImpl<byte[]>) builder.getMessage();
}
 
Example #3
Source File: KafkaProducerInterceptorWrapper.java    From pulsar with Apache License 2.0 5 votes vote down vote up
/**
 * Convert a Kafka {@link ProducerRecord} to a Pulsar {@link Message}.
 *
 * @param producerRecord Kafka record to be convert.
 * @return Pulsar message.
 */
private Message<byte[]> toPulsarMessage(ProducerRecord<K, V> producerRecord) {
    TypedMessageBuilderImpl typedMessageBuilder = new TypedMessageBuilderImpl(null, scheme);
    typedMessageBuilder.key(serializeKey(topic, producerRecord.key()));
    if (valueSchema instanceof PulsarKafkaSchema) {
        ((PulsarKafkaSchema<V>) valueSchema).setTopic(topic);
    }
    typedMessageBuilder.value(valueSchema.encode(producerRecord.value()));
    typedMessageBuilder.eventTime(eventTime);
    typedMessageBuilder.property(KafkaMessageRouter.PARTITION_ID, partitionID);
    return typedMessageBuilder.getMessage();
}
 
Example #4
Source File: PulsarKafkaProducer.java    From pulsar with Apache License 2.0 5 votes vote down vote up
private RecordMetadata getRecordMetadata(String topic, TypedMessageBuilder<byte[]> msgBuilder, MessageId messageId,
        int size) {
    MessageIdImpl msgId = (MessageIdImpl) messageId;

    // Combine ledger id and entry id to form offset
    long offset = MessageIdUtils.getOffset(msgId);
    int partition = msgId.getPartitionIndex();

    TopicPartition tp = new TopicPartition(topic, partition);
    TypedMessageBuilderImpl<byte[]> mb = (TypedMessageBuilderImpl<byte[]>) msgBuilder;
    return new RecordMetadata(tp, offset, 0L, mb.getPublishTime(), 0L, mb.hasKey() ? mb.getKey().length() : 0, size);
}
 
Example #5
Source File: PulsarKafkaProducerTest.java    From pulsar with Apache License 2.0 4 votes vote down vote up
@Test
public void testPulsarKafkaInterceptor() throws PulsarClientException {
    // Arrange
    PulsarClient mockClient = mock(PulsarClient.class);
    ProducerBuilder mockProducerBuilder = mock(ProducerBuilder.class);
    org.apache.pulsar.client.api.Producer mockProducer = mock(org.apache.pulsar.client.api.Producer.class);
    ClientBuilder mockClientBuilder = mock(ClientBuilder.class);
    CompletableFuture mockPartitionFuture = new CompletableFuture();
    CompletableFuture mockSendAsyncFuture = new CompletableFuture();
    TypedMessageBuilder mockTypedMessageBuilder = mock(TypedMessageBuilderImpl.class);

    mockPartitionFuture.complete(new ArrayList<>());
    mockSendAsyncFuture.complete(new MessageIdImpl(1, 1, 1));
    doReturn(mockClientBuilder).when(mockClientBuilder).serviceUrl(anyString());
    doReturn(mockClientBuilder).when(mockClientBuilder).keepAliveInterval(anyInt(), any(TimeUnit.class));
    doReturn(mockClient).when(mockClientBuilder).build();
    doReturn(mockPartitionFuture).when(mockClient).getPartitionsForTopic(anyString());
    doReturn(mockProducerBuilder).when(mockProducerBuilder).topic(anyString());
    doReturn(mockProducerBuilder).when(mockProducerBuilder).clone();
    doReturn(mockProducerBuilder).when(mockProducerBuilder).intercept(
            (org.apache.pulsar.client.api.ProducerInterceptor) any());
    doReturn(mockProducer).when(mockProducerBuilder).create();
    doReturn(mockTypedMessageBuilder).when(mockProducer).newMessage();
    doReturn(mockSendAsyncFuture).when(mockTypedMessageBuilder).sendAsync();
    PowerMockito.mockStatic(PulsarClientKafkaConfig.class);
    PowerMockito.mockStatic(PulsarProducerKafkaConfig.class);
    when(PulsarClientKafkaConfig.getClientBuilder(any(Properties.class))).thenReturn(mockClientBuilder);
    when(PulsarProducerKafkaConfig.getProducerBuilder(any(PulsarClient.class), any(Properties.class))).thenReturn(mockProducerBuilder);

    Properties properties = new Properties();
    List interceptors =  new ArrayList();
    interceptors.add("org.apache.kafka.clients.producer.PulsarKafkaProducerTest$PulsarKafkaProducerInterceptor");
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    properties.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, DefaultPartitioner.class);
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Arrays.asList("pulsar://localhost:6650"));
    properties.put(ProducerConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG, "1000000");
    properties.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, "1000000");
    properties.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, interceptors);

    // Act
    PulsarKafkaProducer<String, String> pulsarKafkaProducer = new PulsarKafkaProducer<>(properties);

    pulsarKafkaProducer.send(new ProducerRecord<>("topic", 1,"key", "value"));

    // Verify
    verify(mockProducerBuilder, times(1)).intercept(
            (org.apache.pulsar.client.api.ProducerInterceptor)any());
}
 
Example #6
Source File: PulsarKafkaProducerTest.java    From pulsar with Apache License 2.0 4 votes vote down vote up
@Test
public void testPulsarKafkaSendAvro() throws PulsarClientException {
    // Arrange
    PulsarClient mockClient = mock(PulsarClient.class);
    ProducerBuilder mockProducerBuilder = mock(ProducerBuilder.class);
    org.apache.pulsar.client.api.Producer mockProducer = mock(org.apache.pulsar.client.api.Producer.class);
    ClientBuilder mockClientBuilder = mock(ClientBuilder.class);
    CompletableFuture mockPartitionFuture = new CompletableFuture();
    CompletableFuture mockSendAsyncFuture = new CompletableFuture();
    TypedMessageBuilder mockTypedMessageBuilder = mock(TypedMessageBuilderImpl.class);

    mockPartitionFuture.complete(new ArrayList<>());
    mockSendAsyncFuture.complete(new MessageIdImpl(1, 1, 1));
    doReturn(mockClientBuilder).when(mockClientBuilder).serviceUrl(anyString());
    doReturn(mockClientBuilder).when(mockClientBuilder).keepAliveInterval(anyInt(), any(TimeUnit.class));
    doReturn(mockClient).when(mockClientBuilder).build();
    doReturn(mockPartitionFuture).when(mockClient).getPartitionsForTopic(anyString());
    doReturn(mockProducerBuilder).when(mockProducerBuilder).topic(anyString());
    doReturn(mockProducerBuilder).when(mockProducerBuilder).clone();
    doReturn(mockProducerBuilder).when(mockProducerBuilder).intercept(
            (org.apache.pulsar.client.api.ProducerInterceptor) any());
    doReturn(mockProducer).when(mockProducerBuilder).create();
    doReturn(mockTypedMessageBuilder).when(mockProducer).newMessage();
    doReturn(mockSendAsyncFuture).when(mockTypedMessageBuilder).sendAsync();
    PowerMockito.mockStatic(PulsarClientKafkaConfig.class);
    PowerMockito.mockStatic(PulsarProducerKafkaConfig.class);
    when(PulsarClientKafkaConfig.getClientBuilder(any(Properties.class))).thenReturn(mockClientBuilder);
    when(PulsarProducerKafkaConfig.getProducerBuilder(any(PulsarClient.class), any(Properties.class))).thenReturn(mockProducerBuilder);

    Properties properties = new Properties();
    List interceptors =  new ArrayList();
    interceptors.add("org.apache.kafka.clients.producer.PulsarKafkaProducerTest$PulsarKafkaProducerInterceptor");
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    properties.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, DefaultPartitioner.class);
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Arrays.asList("pulsar://localhost:6650"));
    properties.put(ProducerConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG, "1000000");
    properties.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, "1000000");
    properties.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, interceptors);

    AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
    AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
    // Act
    PulsarKafkaProducer<Foo, Bar> pulsarKafkaProducer = new PulsarKafkaProducer<>(properties, fooSchema, barSchema);

    Bar bar = new Bar();
    bar.setField1(true);

    Foo foo = new Foo();
    foo.setField1("field1");
    foo.setField2("field2");
    foo.setField3(3);

    pulsarKafkaProducer.send(new ProducerRecord<>("topic", 1,foo, bar));

    // Verify
    verify(mockTypedMessageBuilder, times(1)).sendAsync();
    verify(mockProducerBuilder, times(1)).intercept(
            (org.apache.pulsar.client.api.ProducerInterceptor) any());
}
 
Example #7
Source File: PulsarKafkaProducerTest.java    From pulsar with Apache License 2.0 4 votes vote down vote up
@Test
public void testPulsarKafkaSendAvro() throws PulsarClientException {
    // Arrange
    PulsarClient mockClient = mock(PulsarClient.class);
    ProducerBuilder mockProducerBuilder = mock(ProducerBuilder.class);
    org.apache.pulsar.client.api.Producer mockProducer = mock(org.apache.pulsar.client.api.Producer.class);
    ClientBuilder mockClientBuilder = mock(ClientBuilder.class);
    CompletableFuture mockPartitionFuture = new CompletableFuture();
    CompletableFuture mockSendAsyncFuture = new CompletableFuture();
    TypedMessageBuilder mockTypedMessageBuilder = mock(TypedMessageBuilderImpl.class);

    mockPartitionFuture.complete(new ArrayList<>());
    mockSendAsyncFuture.complete(new MessageIdImpl(1, 1, 1));
    doReturn(mockClientBuilder).when(mockClientBuilder).serviceUrl(anyString());
    doReturn(mockClientBuilder).when(mockClientBuilder).keepAliveInterval(anyInt(), any(TimeUnit.class));
    doReturn(mockClient).when(mockClientBuilder).build();
    doReturn(mockPartitionFuture).when(mockClient).getPartitionsForTopic(anyString());
    doReturn(mockProducerBuilder).when(mockProducerBuilder).topic(anyString());
    doReturn(mockProducerBuilder).when(mockProducerBuilder).clone();
    doReturn(mockProducer).when(mockProducerBuilder).create();
    doReturn(mockTypedMessageBuilder).when(mockProducer).newMessage();
    doReturn(mockSendAsyncFuture).when(mockTypedMessageBuilder).sendAsync();
    PowerMockito.mockStatic(PulsarClientKafkaConfig.class);
    PowerMockito.mockStatic(PulsarProducerKafkaConfig.class);
    when(PulsarClientKafkaConfig.getClientBuilder(any(Properties.class))).thenReturn(mockClientBuilder);
    when(PulsarProducerKafkaConfig.getProducerBuilder(any(PulsarClient.class), any(Properties.class))).thenReturn(mockProducerBuilder);

    Properties properties = new Properties();
    properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    properties.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, DefaultPartitioner.class);
    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Arrays.asList("pulsar://localhost:6650"));
    properties.put(ProducerConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG, "1000000");
    properties.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, "1000000");

    AvroSchema<Bar> barSchema = AvroSchema.of(SchemaDefinition.<Bar>builder().withPojo(Bar.class).build());
    AvroSchema<Foo> fooSchema = AvroSchema.of(SchemaDefinition.<Foo>builder().withPojo(Foo.class).build());
    // Act
    PulsarKafkaProducer<Foo, Bar> pulsarKafkaProducer = new PulsarKafkaProducer<>(properties, fooSchema, barSchema);

    Bar bar = new Bar();
    bar.setField1(true);

    Foo foo = new Foo();
    foo.setField1("field1");
    foo.setField2("field2");
    foo.setField3(3);

    pulsarKafkaProducer.send(new ProducerRecord<>("topic", 1, foo, bar));

    // Verify
    verify(mockTypedMessageBuilder).sendAsync();
}
 
Example #8
Source File: PersistentTopicE2ETest.java    From pulsar with Apache License 2.0 4 votes vote down vote up
@Test
public void testPayloadCorruptionDetection() throws Exception {
    final String topicName = "persistent://prop/ns-abc/topic1";

    // 1. producer connect
    Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName)
        .enableBatching(false)
        .messageRoutingMode(MessageRoutingMode.SinglePartition)
        .create();
    Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-sub").subscribe();

    CompletableFuture<MessageId> future1 = producer.newMessage().value("message-1".getBytes()).sendAsync();

    // Stop the broker, and publishes messages. Messages are accumulated in the producer queue and they're checksums
    // would have already been computed. If we change the message content at that point, it should result in a
    // checksum validation error
    stopBroker();


    byte[] a2 = "message-2".getBytes();
    TypedMessageBuilder<byte[]> msg2 = producer.newMessage().value(a2);


    CompletableFuture<MessageId> future2 = msg2.sendAsync();

    // corrupt the message, new content would be 'message-3'
    ((TypedMessageBuilderImpl<byte[]>) msg2).getContent().put(a2.length - 1, (byte) '3');

    // Restart the broker to have the messages published
    startBroker();

    future1.get();

    try {
        future2.get();
        fail("since we corrupted the message, it should be rejected by the broker");
    } catch (Exception e) {
        // ok
    }

    // We should only receive msg1
    Message<byte[]> msg = consumer.receive(1, TimeUnit.SECONDS);
    assertEquals(new String(msg.getData()), "message-1");

    while ((msg = consumer.receive(1, TimeUnit.SECONDS)) != null) {
        assertEquals(new String(msg.getData()), "message-1");
    }
}
 
Example #9
Source File: PulsarBolt.java    From pulsar with Apache License 2.0 4 votes vote down vote up
@Override
public void execute(Tuple input) {
    if (TupleUtils.isTick(input)) {
        collector.ack(input);
        return;
    }
    try {
        if (producer != null) {
            // a message key can be provided in the mapper
            TypedMessageBuilder<byte[]> msgBuilder = pulsarBoltConf.getTupleToMessageMapper()
                    .toMessage(producer.newMessage(), input);
            if (msgBuilder == null) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("[{}] Cannot send null message, acking the collector", boltId);
                }
                collector.ack(input);
            } else {
                final long messageSizeToBeSent = ((TypedMessageBuilderImpl<byte[]>) msgBuilder).getContent()
                        .remaining();
                msgBuilder.sendAsync().handle((msgId, ex) -> {
                    synchronized (collector) {
                        if (ex != null) {
                            collector.reportError(ex);
                            collector.fail(input);
                            LOG.error("[{}] Message send failed", boltId, ex);

                        } else {
                            collector.ack(input);
                            ++messagesSent;
                            messageSizeSent += messageSizeToBeSent;
                            if (LOG.isDebugEnabled()) {
                                LOG.debug("[{}] Message sent with id {}", boltId, msgId);
                            }
                        }
                    }

                    return null;
                });
            }
        }
    } catch (Exception e) {
        LOG.error("[{}] Message processing failed", boltId, e);
        collector.reportError(e);
        collector.fail(input);
    }
}