com.alibaba.otter.canal.protocol.Message Java Examples

The following examples show how to use com.alibaba.otter.canal.protocol.Message. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BaseCanalClientTest.java    From canal-1.1.3 with Apache License 2.0 6 votes vote down vote up
protected void printSummary(Message message, long batchId, int size) {
    long memsize = 0;
    for (Entry entry : message.getEntries()) {
        memsize += entry.getHeader().getEventLength();
    }

    String startPosition = null;
    String endPosition = null;
    if (!CollectionUtils.isEmpty(message.getEntries())) {
        startPosition = buildPositionForDump(message.getEntries().get(0));
        endPosition = buildPositionForDump(message.getEntries().get(message.getEntries().size() - 1));
    }

    SimpleDateFormat format = new SimpleDateFormat(DATE_FORMAT);
    logger.info(context_format, new Object[] { batchId, size, memsize, format.format(new Date()), startPosition,
            endPosition });
}
 
Example #2
Source File: KafkaOffsetCanalConnector.java    From canal-1.1.3 with Apache License 2.0 6 votes vote down vote up
/**
 * 获取Kafka消息,不确认
 *
 * @param timeout
 * @param unit
 * @param offset  消息偏移地址(-1为不偏移)
 * @return
 * @throws CanalClientException
 */
public List<KafkaMessage> getListWithoutAck(Long timeout, TimeUnit unit, long offset) throws CanalClientException {
    waitClientRunning();
    if (!running) {
        return Lists.newArrayList();
    }

    if (offset > -1) {
        TopicPartition tp = new TopicPartition(topic, partition == null ? 0 : partition);
        kafkaConsumer.seek(tp, offset);
    }

    ConsumerRecords<String, Message> records = kafkaConsumer.poll(unit.toMillis(timeout));

    if (!records.isEmpty()) {
        List<KafkaMessage> messages = new ArrayList<>();
        for (ConsumerRecord<String, Message> record : records) {
            KafkaMessage message = new KafkaMessage(record.value(), record.offset());
            messages.add(message);
        }
        return messages;
    }
    return Lists.newArrayList();
}
 
Example #3
Source File: KafkaOffsetCanalConnector.java    From canal with Apache License 2.0 6 votes vote down vote up
/**
 * 获取Kafka消息,不确认
 *
 * @param timeout
 * @param unit
 * @param offset  消息偏移地址(-1为不偏移)
 * @return
 * @throws CanalClientException
 */
public List<KafkaMessage> getListWithoutAck(Long timeout, TimeUnit unit, long offset) throws CanalClientException {
    waitClientRunning();
    if (!running) {
        return Lists.newArrayList();
    }

    if (offset > -1) {
        TopicPartition tp = new TopicPartition(topic, partition == null ? 0 : partition);
        kafkaConsumer.seek(tp, offset);
    }

    ConsumerRecords<String, Message> records = kafkaConsumer.poll(unit.toMillis(timeout));

    if (!records.isEmpty()) {
        List<KafkaMessage> messages = new ArrayList<>();
        for (ConsumerRecord<String, Message> record : records) {
            KafkaMessage message = new KafkaMessage(record.value(), record.offset());
            messages.add(message);
        }
        return messages;
    }
    return Lists.newArrayList();
}
 
Example #4
Source File: ClusterCanalConnector.java    From canal-1.1.3 with Apache License 2.0 6 votes vote down vote up
public Message getWithoutAck(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
    int times = 0;
    while (times < retryTimes) {
        try {
            Message msg = currentConnector.getWithoutAck(batchSize, timeout, unit);
            return msg;
        } catch (Throwable t) {
            logger.warn(String.format("something goes wrong when getWithoutAck data from server:%s",
                currentConnector != null ? currentConnector.getAddress() : "null"), t);
            times++;
            restart();
            logger.info("restart the connector for next round retry.");
        }
    }
    throw new CanalClientException("failed to fetch the data after " + times + " times retry");
}
 
Example #5
Source File: ClusterCanalConnector.java    From canal-1.1.3 with Apache License 2.0 6 votes vote down vote up
public Message getWithoutAck(int batchSize) throws CanalClientException {
    int times = 0;
    while (times < retryTimes) {
        try {
            Message msg = currentConnector.getWithoutAck(batchSize);
            return msg;
        } catch (Throwable t) {
            logger.warn(String.format("something goes wrong when getWithoutAck data from server:%s",
                currentConnector != null ? currentConnector.getAddress() : "null"), t);
            times++;
            restart();
            logger.info("restart the connector for next round retry.");
        }
    }
    throw new CanalClientException("failed to fetch the data after " + times + " times retry");
}
 
Example #6
Source File: ClusterCanalConnector.java    From canal-1.1.3 with Apache License 2.0 6 votes vote down vote up
public Message get(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
    int times = 0;
    while (times < retryTimes) {
        try {
            Message msg = currentConnector.get(batchSize, timeout, unit);
            return msg;
        } catch (Throwable t) {
            logger.warn(String.format("something goes wrong when getting data from server:%s",
                currentConnector != null ? currentConnector.getAddress() : "null"), t);
            times++;
            restart();
            logger.info("restart the connector for next round retry.");
        }
    }
    throw new CanalClientException("failed to fetch the data after " + times + " times retry");
}
 
Example #7
Source File: ClusterCanalConnector.java    From canal-1.1.3 with Apache License 2.0 6 votes vote down vote up
public Message get(int batchSize) throws CanalClientException {
    int times = 0;
    while (times < retryTimes) {
        try {
            Message msg = currentConnector.get(batchSize);
            return msg;
        } catch (Throwable t) {
            logger.warn(String.format("something goes wrong when getting data from server:%s",
                currentConnector != null ? currentConnector.getAddress() : "null"), t);
            times++;
            restart();
            logger.info("restart the connector for next round retry.");
        }
    }
    throw new CanalClientException("failed to fetch the data after " + times + " times retry");
}
 
Example #8
Source File: RocketMQCanalConnector.java    From canal with Apache License 2.0 6 votes vote down vote up
@Override
public List<Message> getListWithoutAck(Long timeout, TimeUnit unit) throws CanalClientException {
    try {
        if (this.lastGetBatchMessage != null) {
            throw new CanalClientException("mq get/ack not support concurrent & async ack");
        }

        ConsumerBatchMessage batchMessage = messageBlockingQueue.poll(timeout, unit);
        if (batchMessage != null) {
            this.lastGetBatchMessage = batchMessage;
            return batchMessage.getData();
        }
    } catch (InterruptedException ex) {
        logger.warn("Get message timeout", ex);
        throw new CanalClientException("Failed to fetch the data after: " + timeout);
    }
    return Lists.newArrayList();
}
 
Example #9
Source File: BaseCanalServerWithEmbededTest.java    From canal-1.1.3 with Apache License 2.0 6 votes vote down vote up
@Test
public void testGet() {
    int maxEmptyCount = 10;
    int emptyCount = 0;
    int totalCount = 0;
    server.subscribe(clientIdentity);
    while (emptyCount < maxEmptyCount) {
        Message message = server.get(clientIdentity, 11);
        if (CollectionUtils.isEmpty(message.getEntries())) {
            emptyCount++;
            try {
                Thread.sleep(emptyCount * 300L);
            } catch (InterruptedException e) {
                Assert.fail();
            }

            System.out.println("empty count : " + emptyCount);
        } else {
            emptyCount = 0;
            totalCount += message.getEntries().size();
        }
    }

    System.out.println("!!!!!! testGet totalCount : " + totalCount);
    server.unsubscribe(clientIdentity);
}
 
Example #10
Source File: CanalReaderMessageDumper.java    From DataLink with Apache License 2.0 6 votes vote down vote up
public static void dumpMessages(Message message, long batchId, int size) {
    long memsize = 0;
    for (CanalEntry.Entry entry : message.getEntries()) {
        memsize += entry.getHeader().getEventLength();
    }

    String startPosition = null;
    String endPosition = null;
    if (message.getEntries()!=null && message.getEntries().size()>0) {
        startPosition = buildPositionForDump(message.getEntries()
                .get(0));
        endPosition = buildPositionForDump(message.getEntries().get(
                message.getEntries().size() - 1));
    }

    SimpleDateFormat format = new SimpleDateFormat(DATE_FORMAT);
    logger.info(context_format, new Object[] { batchId, size, memsize,
            format.format(new Date()), startPosition, endPosition });
}
 
Example #11
Source File: CanalReader.java    From DataLink with Apache License 2.0 6 votes vote down vote up
void dump() {
    if (logger.isInfoEnabled()) {
        Message message = (Message) latestReaderMsg.getMetaData().get(MESSAGE_KEY);
        String startPosition = null;
        String endPosition = null;
        if (!CollectionUtils.isEmpty(message.getEntries())) {
            startPosition = CanalReaderRecordsDumper.buildPositionForDump(message.getEntries().get(0));
            endPosition = CanalReaderRecordsDumper.buildPositionForDump(message.getEntries().get(message.getEntries().size() - 1));
        }

        //dump for troubleshoot problems
        CanalReaderMessageDumper.dumpMessages(message, latestReaderMsg.getBatchId(), message.getEntries().size());
        CanalReaderRecordsDumper.dumpRecords(latestReaderMsg.getBatchId(), latestReaderMsg.getRecords(),
                startPosition, endPosition, message.getEntries().size(), parameter.isDumpDetail());
    }
}
 
Example #12
Source File: SelectorTask.java    From canal-elasticsearch with Apache License 2.0 6 votes vote down vote up
private void printSummary(Message message, long batchId, int size) {
    long memsize = 0;
    for (CanalEntry.Entry entry : message.getEntries()) {
        memsize += entry.getHeader().getEventLength();
    }

    String startPosition = null;
    String endPosition = null;
    if (!CollectionUtils.isEmpty(message.getEntries())) {
        startPosition = buildPositionForDump(message.getEntries().get(0));
        endPosition = buildPositionForDump(message.getEntries().get(message.getEntries().size() - 1));
    }

    logger.info(context_format, new Object[]{batchId, size, memsize, format.format(new Date()), startPosition,
            endPosition});
}
 
Example #13
Source File: ClusterCanalConnector.java    From canal with Apache License 2.0 6 votes vote down vote up
public Message get(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
    int times = 0;
    while (times < retryTimes) {
        try {
            Message msg = currentConnector.get(batchSize, timeout, unit);
            return msg;
        } catch (Throwable t) {
            logger.warn(String.format("something goes wrong when getting data from server:%s",
                currentConnector != null ? currentConnector.getAddress() : "null"), t);
            times++;
            restart();
            logger.info("restart the connector for next round retry.");
        }
    }
    throw new CanalClientException("failed to fetch the data after " + times + " times retry");
}
 
Example #14
Source File: ClusterCanalConnector.java    From canal with Apache License 2.0 6 votes vote down vote up
public Message getWithoutAck(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
    int times = 0;
    while (times < retryTimes) {
        try {
            Message msg = currentConnector.getWithoutAck(batchSize, timeout, unit);
            return msg;
        } catch (Throwable t) {
            logger.warn(String.format("something goes wrong when getWithoutAck data from server:%s",
                currentConnector != null ? currentConnector.getAddress() : "null"), t);
            times++;
            restart();
            logger.info("restart the connector for next round retry.");
        }
    }
    throw new CanalClientException("failed to fetch the data after " + times + " times retry");
}
 
Example #15
Source File: CanalExecutor.java    From search-commons with Apache License 2.0 6 votes vote down vote up
/**
 * 消费当前消息
 */
private void consumerMessage(Message message) {
    log.debug("canal instance: " + handle.instanceName() + " get message entry size " + message.getEntries().size());
    try {
        for (CanalEntry.Entry e : message.getEntries()) {
            if (e.getEntryType() != CanalEntry.EntryType.ROWDATA || !e.hasStoreValue()) continue;
            CanalEntry.Header header = e.getHeader();
            if (header.getExecuteTime() < startRtTime
                    || header.getEventType().getNumber() > CanalEntry.EventType.DELETE_VALUE
                    || !handle.startHandle(header)) continue;
            try {
                CanalEntry.RowChange rowChange = CanalEntry.RowChange.parseFrom(e.getStoreValue());
                if (rowChange.getIsDdl()) continue;
                handle.rowChangeHandle(rowChange);
            } catch (InvalidProtocolBufferException e1) {
                log.error("canal instance: " + handle.instanceName() + " parse store value have exception: ", e1);
            }
        }
        handle.ack(message.getId());
    } finally {
        handle.finishMessageHandle();
    }
}
 
Example #16
Source File: CanalKafkaProducer.java    From canal-1.1.3 with Apache License 2.0 5 votes vote down vote up
@Override
public void init(MQProperties kafkaProperties) {
    this.kafkaProperties = kafkaProperties;
    Properties properties = new Properties();
    properties.put("bootstrap.servers", kafkaProperties.getServers());
    properties.put("acks", kafkaProperties.getAcks());
    properties.put("compression.type", kafkaProperties.getCompressionType());
    properties.put("batch.size", kafkaProperties.getBatchSize());
    properties.put("linger.ms", kafkaProperties.getLingerMs());
    properties.put("max.request.size", kafkaProperties.getMaxRequestSize());
    properties.put("buffer.memory", kafkaProperties.getBufferMemory());
    properties.put("key.serializer", StringSerializer.class.getName());
    properties.put("max.in.flight.requests.per.connection", 1);

    if (!kafkaProperties.getProperties().isEmpty()) {
        properties.putAll(kafkaProperties.getProperties());
    }

    if (kafkaProperties.getTransaction()) {
        properties.put("transactional.id", "canal-transactional-id");
    } else {
        properties.put("retries", kafkaProperties.getRetries());
    }
    if (!kafkaProperties.getFlatMessage()) {
        properties.put("value.serializer", MessageSerializer.class.getName());
        producer = new KafkaProducer<String, Message>(properties);
    } else {
        properties.put("value.serializer", StringSerializer.class.getName());
        producer2 = new KafkaProducer<String, String>(properties);
    }
    if (kafkaProperties.getTransaction()) {
        if (!kafkaProperties.getFlatMessage()) {
            producer.initTransactions();
        } else {
            producer2.initTransactions();
        }
    }
}
 
Example #17
Source File: CanalMessageDeserializer.java    From canal-1.1.3 with Apache License 2.0 5 votes vote down vote up
public static Message deserializer(byte[] data, boolean lazyParseEntry) {
    try {
        if (data == null) {
            return null;
        } else {
            CanalPacket.Packet p = CanalPacket.Packet.parseFrom(data);
            switch (p.getType()) {
                case MESSAGES: {
                    if (!p.getCompression().equals(Compression.NONE)
                        && !p.getCompression().equals(Compression.COMPRESSIONCOMPATIBLEPROTO2)) {
                        throw new CanalClientException("compression is not supported in this connector");
                    }

                    CanalPacket.Messages messages = CanalPacket.Messages.parseFrom(p.getBody());
                    Message result = new Message(messages.getBatchId());
                    if (lazyParseEntry) {
                        // byteString
                        result.setRawEntries(messages.getMessagesList());
                        result.setRaw(true);
                    } else {
                        for (ByteString byteString : messages.getMessagesList()) {
                            result.addEntry(CanalEntry.Entry.parseFrom(byteString));
                        }
                        result.setRaw(false);
                    }
                    return result;
                }
                case ACK: {
                    Ack ack = Ack.parseFrom(p.getBody());
                    throw new CanalClientException("something goes wrong with reason: " + ack.getErrorMessage());
                }
                default: {
                    throw new CanalClientException("unexpected packet type: " + p.getType());
                }
            }
        }
    } catch (Exception e) {
        throw new CanalClientException("deserializer failed", e);
    }
}
 
Example #18
Source File: BaseCanalServerWithEmbededTest.java    From canal with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetWithoutAck() {
    int maxEmptyCount = 10;
    int emptyCount = 0;
    int totalCount = 0;
    server.subscribe(clientIdentity);
    while (emptyCount < maxEmptyCount) {
        Message message = server.getWithoutAck(clientIdentity, 11);
        if (CollectionUtils.isEmpty(message.getEntries())) {
            emptyCount++;
            try {
                Thread.sleep(emptyCount * 300L);
            } catch (InterruptedException e) {
                Assert.fail();
            }

            System.out.println("empty count : " + emptyCount);
        } else {
            emptyCount = 0;
            totalCount += message.getEntries().size();
            server.ack(clientIdentity, message.getId());
        }
    }

    System.out.println("!!!!!! testGetWithoutAck totalCount : " + totalCount);
    server.unsubscribe(clientIdentity);
}
 
Example #19
Source File: AbstractCanalClientTest.java    From canal-1.1.3 with Apache License 2.0 5 votes vote down vote up
protected void process() {
    int batchSize = 5 * 1024;
    while (running) {
        try {
            MDC.put("destination", destination);
            connector.connect();
            connector.subscribe();
            while (running) {
                Message message = connector.getWithoutAck(batchSize); // 获取指定数量的数据
                long batchId = message.getId();
                int size = message.getEntries().size();
                if (batchId == -1 || size == 0) {
                    // try {
                    // Thread.sleep(1000);
                    // } catch (InterruptedException e) {
                    // }
                } else {
                    printSummary(message, batchId, size);
                    printEntry(message.getEntries());
                }

                connector.ack(batchId); // 提交确认
                // connector.rollback(batchId); // 处理失败, 回滚数据
            }
        } catch (Exception e) {
            logger.error("process error!", e);
        } finally {
            connector.disconnect();
            MDC.remove("destination");
        }
    }
}
 
Example #20
Source File: CanalMessageSerializerUtil.java    From canal with Apache License 2.0 5 votes vote down vote up
public static Message deserializer(byte[] data, boolean lazyParseEntry) {
    try {
        if (data == null) {
            return null;
        } else {
            CanalPacket.Packet p = CanalPacket.Packet.parseFrom(data);
            switch (p.getType()) {
                case MESSAGES: {
                    if (!p.getCompression().equals(CanalPacket.Compression.NONE)
                        && !p.getCompression().equals(CanalPacket.Compression.COMPRESSIONCOMPATIBLEPROTO2)) {
                        throw new CanalClientException("compression is not supported in this connector");
                    }

                    CanalPacket.Messages messages = CanalPacket.Messages.parseFrom(p.getBody());
                    Message result = new Message(messages.getBatchId());
                    if (lazyParseEntry) {
                        // byteString
                        result.setRawEntries(messages.getMessagesList());
                        result.setRaw(true);
                    } else {
                        for (ByteString byteString : messages.getMessagesList()) {
                            result.addEntry(CanalEntry.Entry.parseFrom(byteString));
                        }
                        result.setRaw(false);
                    }
                    return result;
                }
                case ACK: {
                    CanalPacket.Ack ack = CanalPacket.Ack.parseFrom(p.getBody());
                    throw new CanalClientException("something goes wrong with reason: " + ack.getErrorMessage());
                }
                default: {
                    throw new CanalClientException("unexpected packet type: " + p.getType());
                }
            }
        }
    } catch (Exception e) {
        throw new CanalClientException("deserializer failed", e);
    }
}
 
Example #21
Source File: AbstractCanalClientTest.java    From canal with Apache License 2.0 5 votes vote down vote up
protected void process() {
    int batchSize = 5 * 1024;
    while (running) {
        try {
            MDC.put("destination", destination);
            connector.connect();
            connector.subscribe();
            while (running) {
                Message message = connector.getWithoutAck(batchSize); // 获取指定数量的数据
                long batchId = message.getId();
                int size = message.getEntries().size();
                if (batchId == -1 || size == 0) {
                    // try {
                    // Thread.sleep(1000);
                    // } catch (InterruptedException e) {
                    // }
                } else {
                    printSummary(message, batchId, size);
                    printEntry(message.getEntries());
                }

                if (batchId != -1) {
                    connector.ack(batchId); // 提交确认
                    // connector.rollback(batchId); // 处理失败, 回滚数据
                }
            }
        } catch (Exception e) {
            logger.error("process error!", e);
            try {
                Thread.sleep(1000L);
            } catch (InterruptedException e1) {
                // ignore
            }
        } finally {
            connector.disconnect();
            MDC.remove("destination");
        }
    }
}
 
Example #22
Source File: SimpleCanalConnector.java    From canal with Apache License 2.0 5 votes vote down vote up
public Message getWithoutAck(int batchSize, Long timeout, TimeUnit unit) throws CanalClientException {
    waitClientRunning();
    if (!running) {
        return null;
    }
    try {
        int size = (batchSize <= 0) ? 1000 : batchSize;
        long time = (timeout == null || timeout < 0) ? -1 : timeout; // -1代表不做timeout控制
        if (unit == null) {
            unit = TimeUnit.MILLISECONDS;
        }

        writeWithHeader(Packet.newBuilder()
            .setType(PacketType.GET)
            .setBody(Get.newBuilder()
                .setAutoAck(false)
                .setDestination(clientIdentity.getDestination())
                .setClientId(String.valueOf(clientIdentity.getClientId()))
                .setFetchSize(size)
                .setTimeout(time)
                .setUnit(unit.ordinal())
                .build()
                .toByteString())
            .build()
            .toByteArray());
        return receiveMessages();
    } catch (IOException e) {
        throw new CanalClientException(e);
    }
}
 
Example #23
Source File: RabbitMQCanalConnector.java    From canal with Apache License 2.0 5 votes vote down vote up
@Override
public List<Message> getList(Long timeout, TimeUnit unit) throws CanalClientException {
    List<Message> messages = getListWithoutAck(timeout, unit);
    if (messages != null && !messages.isEmpty()) {
        ack();
    }
    return messages;
}
 
Example #24
Source File: CanalKafkaConsumer.java    From canal with Apache License 2.0 5 votes vote down vote up
@Override
public void connect() {
    if (this.flatMessage) {
        kafkaProperties.put("value.deserializer", StringDeserializer.class);
        this.kafkaConsumer = new KafkaConsumer<String, String>(kafkaProperties);
    } else {
        kafkaProperties.put("value.deserializer", KafkaMessageDeserializer.class);
        this.kafkaConsumer = new KafkaConsumer<String, Message>(kafkaProperties);
    }
    kafkaConsumer.subscribe(Collections.singletonList(topic));
}
 
Example #25
Source File: BaseCanalServerWithEmbededTest.java    From canal-1.1.3 with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetWithoutAck() {
    int maxEmptyCount = 10;
    int emptyCount = 0;
    int totalCount = 0;
    server.subscribe(clientIdentity);
    while (emptyCount < maxEmptyCount) {
        Message message = server.getWithoutAck(clientIdentity, 11);
        if (CollectionUtils.isEmpty(message.getEntries())) {
            emptyCount++;
            try {
                Thread.sleep(emptyCount * 300L);
            } catch (InterruptedException e) {
                Assert.fail();
            }

            System.out.println("empty count : " + emptyCount);
        } else {
            emptyCount = 0;
            totalCount += message.getEntries().size();
            server.ack(clientIdentity, message.getId());
        }
    }

    System.out.println("!!!!!! testGetWithoutAck totalCount : " + totalCount);
    server.unsubscribe(clientIdentity);
}
 
Example #26
Source File: ProtocolTest.java    From canal-1.1.3 with Apache License 2.0 5 votes vote down vote up
@Test
public void testSimple() throws IOException {
    Header.Builder headerBuilder = Header.newBuilder();
    headerBuilder.setLogfileName("mysql-bin.000001");
    headerBuilder.setLogfileOffset(1024);
    headerBuilder.setExecuteTime(1024);
    Entry.Builder entryBuilder = Entry.newBuilder();
    entryBuilder.setHeader(headerBuilder.build());
    entryBuilder.setEntryType(EntryType.ROWDATA);
    Entry entry = entryBuilder.build();
    Message message = new Message(3, true, Arrays.asList(entry.toByteString()));

    byte[] body = buildData(message);
    Packet packet = Packet.parseFrom(body);
    switch (packet.getType()) {
        case MESSAGES: {
            if (!packet.getCompression().equals(Compression.NONE)) {
                throw new CanalClientException("compression is not supported in this connector");
            }

            Messages messages = Messages.parseFrom(packet.getBody());
            Message result = new Message(messages.getBatchId());
            for (ByteString byteString : messages.getMessagesList()) {
                result.addEntry(Entry.parseFrom(byteString));
            }

            System.out.println(result);
            break;
        }
        default: {
            throw new CanalClientException("unexpected packet type: " + packet.getType());
        }
    }
}
 
Example #27
Source File: KafkaCanalConnector.java    From canal with Apache License 2.0 5 votes vote down vote up
@Override
public List<Message> getList(Long timeout, TimeUnit unit) throws CanalClientException {
    waitClientRunning();
    if (!running) {
        return Lists.newArrayList();
    }

    List<Message> messages = getListWithoutAck(timeout, unit);
    if (messages != null && !messages.isEmpty()) {
        this.ack();
    }
    return messages;
}
 
Example #28
Source File: TotoroChannel.java    From canal-elasticsearch with Apache License 2.0 5 votes vote down vote up
public void putMessage(Message e) throws InterruptedException {
    if (rollBack.state() == true) {
        selectorMessageQueue.put(e);
    } else {
        logger.info("The rollback happened =============>  discard message , batchId :{}", e.getId());
    }
}
 
Example #29
Source File: AbstractBasicMessageTransponder.java    From spring-boot-starter-canal with MIT License 5 votes vote down vote up
@Override
protected void distributeEvent(Message message) {
    List<CanalEntry.Entry> entries = message.getEntries();
    for (CanalEntry.Entry entry : entries) {
        //ignore the transaction operations
        List<CanalEntry.EntryType> ignoreEntryTypes = getIgnoreEntryTypes();
        if (ignoreEntryTypes != null
                && ignoreEntryTypes.stream().anyMatch(t -> entry.getEntryType() == t)) {
            continue;
        }
        CanalEntry.RowChange rowChange;
        try {
            rowChange = CanalEntry.RowChange.parseFrom(entry.getStoreValue());
        } catch (Exception e) {
            throw new CanalClientException("ERROR ## parser of event has an error , data:" + entry.toString(),
                    e);
        }
        //ignore the ddl operation
        if (rowChange.hasIsDdl() && rowChange.getIsDdl()) {
            processDdl(rowChange);
            continue;
        }
        for (CanalEntry.RowData rowData : rowChange.getRowDatasList()) {
            //distribute to listener interfaces
            distributeByImpl(rowChange.getEventType(), rowData);
            //distribute to annotation listener interfaces
            distributeByAnnotation(destination,
                    entry.getHeader().getSchemaName(),
                    entry.getHeader().getTableName(),
                    rowChange.getEventType(),
                    rowData);
        }
    }
}
 
Example #30
Source File: ProtocolTest.java    From canal with Apache License 2.0 5 votes vote down vote up
@Test(expected = CanalClientException.class)
public void testSimple() throws IOException {
    Header.Builder headerBuilder = Header.newBuilder();
    headerBuilder.setLogfileName("mysql-bin.000001");
    headerBuilder.setLogfileOffset(1024);
    headerBuilder.setExecuteTime(1024);
    Entry.Builder entryBuilder = Entry.newBuilder();
    entryBuilder.setHeader(headerBuilder.build());
    entryBuilder.setEntryType(EntryType.ROWDATA);
    Entry entry = entryBuilder.build();
    Message message = new Message(3, true, Arrays.asList(entry.toByteString()));

    byte[] body = buildData(message);
    Packet packet = Packet.parseFrom(body);
    switch (packet.getType()) {
        case MESSAGES: {
            if (!packet.getCompression().equals(Compression.NONE)) {
                throw new CanalClientException("compression is not supported in this connector");
            }

            Messages messages = Messages.parseFrom(packet.getBody());
            Message result = new Message(messages.getBatchId());
            for (ByteString byteString : messages.getMessagesList()) {
                result.addEntry(Entry.parseFrom(byteString));
            }

            System.out.println(result);
            break;
        }
        default: {
            throw new CanalClientException("unexpected packet type: " + packet.getType());
        }
    }
}