Python confluent_kafka.KafkaException() Examples

The following are 22 code examples of confluent_kafka.KafkaException(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module confluent_kafka , or try the search function .
Example #1
Source File: worker.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def start(self):

        self._logger.info("Listening topic:{0}".format(self.kafka_consumer.Topic))
        consumer = self.kafka_consumer.start()

        try:
            while True:
                message = consumer.poll(timeout=1.0)
                if message is None:
                    continue
                if not message.error():
                    self._new_file(message.value().decode('utf-8'))
                elif message.error():
                    if message.error().code() == KafkaError._PARTITION_EOF:
                        continue
                    elif message.error:
                        raise KafkaException(message.error())

        except KeyboardInterrupt:
            sys.stderr.write('%% Aborted by user\n')

        consumer.close() 
Example #2
Source File: asyncio.py    From confluent-kafka-python with Apache License 2.0 6 votes vote down vote up
def produce2(self, topic, value, on_delivery):
        """
        A produce method in which delivery notifications are made available
        via both the returned future and on_delivery callback (if specified).
        """
        result = self._loop.create_future()

        def ack(err, msg):
            if err:
                self._loop.call_soon_threadsafe(
                    result.set_exception, KafkaException(err))
            else:
                self._loop.call_soon_threadsafe(
                    result.set_result, msg)
            if on_delivery:
                self._loop.call_soon_threadsafe(
                    on_delivery, err, msg)
        self._producer.produce(topic, value, on_delivery=ack)
        return result 
Example #3
Source File: test_KafkaError.py    From confluent-kafka-python with Apache License 2.0 6 votes vote down vote up
def test_fatal():
    """ Test fatal exceptions """

    # Configure an invalid broker and make sure the ALL_BROKERS_DOWN
    # error is seen in the error callback.
    p = Producer({'error_cb': error_cb})

    with pytest.raises(KafkaException) as exc:
        raise KafkaException(KafkaError(KafkaError.MEMBER_ID_REQUIRED,
                                        fatal=True))
    err = exc.value.args[0]
    assert isinstance(err, KafkaError)
    assert err.fatal()
    assert not err.retriable()
    assert not err.txn_requires_abort()

    p.poll(0)  # Need some p use to avoid flake8 unused warning 
Example #4
Source File: adminapi.py    From confluent-kafka-python with Apache License 2.0 6 votes vote down vote up
def example_describe_configs(a, args):
    """ describe configs """

    resources = [ConfigResource(restype, resname) for
                 restype, resname in zip(args[0::2], args[1::2])]

    fs = a.describe_configs(resources)

    # Wait for operation to finish.
    for res, f in fs.items():
        try:
            configs = f.result()
            for config in iter(configs.values()):
                print_config(config, 1)

        except KafkaException as e:
            print("Failed to describe {}: {}".format(res, e))
        except Exception:
            raise 
Example #5
Source File: worker.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def start(self):

        self._logger.info("Listening topic:{0}".format(self.kafka_consumer.Topic))
        consumer = self.kafka_consumer.start()
        try:
            while True:
                message = consumer.poll(timeout=1.0)
                if message is None:
                    continue
                if not message.error():
                    self._new_file(message.value().decode('utf-8'))
                elif message.error():
                    if message.error().code() == KafkaError._PARTITION_EOF:
                        continue
                    elif message.error:
                        raise KafkaException(message.error())

        except KeyboardInterrupt:
            sys.stderr.write('%% Aborted by user\n')

        consumer.close() 
Example #6
Source File: soakclient.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def create_topic(self, topic, conf):
        """ Create the topic if it doesn't already exist """
        admin = AdminClient(conf)
        fs = admin.create_topics([NewTopic(topic, num_partitions=2, replication_factor=3)])
        f = fs[topic]
        try:
            res = f.result()  # noqa unused variable
        except KafkaException as ex:
            if ex.args[0].code() == KafkaError.TOPIC_ALREADY_EXISTS:
                self.logger.info("Topic {} already exists: good".format(topic))
            else:
                raise 
Example #7
Source File: test_KafkaError.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def test_kafkaException_unknown_KafkaError_with_subclass():
    class MyException(KafkaException):
        def __init__(self, error_code):
            super(MyException, self).__init__(KafkaError(error_code))

    with pytest.raises(KafkaException, match="Err-12345?") as e:
        raise MyException(12345)
    assert not e.value.args[0].fatal()
    assert not e.value.args[0].fatal()
    assert not e.value.args[0].retriable()
    assert not e.value.args[0].txn_requires_abort() 
Example #8
Source File: test_KafkaError.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def test_kafkaError_unknonw_error():
    with pytest.raises(KafkaException, match="Err-12345?") as e:
        raise KafkaError(12345)
    assert not e.value.args[0].fatal()
    assert not e.value.args[0].retriable()
    assert not e.value.args[0].txn_requires_abort() 
Example #9
Source File: test_KafkaError.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def test_abortable():
    """ Test abortable exceptions """

    with pytest.raises(KafkaException) as exc:
        raise KafkaException(KafkaError(KafkaError.MEMBER_ID_REQUIRED,
                                        txn_requires_abort=True))
    err = exc.value.args[0]
    assert isinstance(err, KafkaError)
    assert not err.fatal()
    assert not err.retriable()
    assert err.txn_requires_abort() 
Example #10
Source File: test_KafkaError.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def test_retriable():
    """ Test retriable exceptions """

    with pytest.raises(KafkaException) as exc:
        raise KafkaException(KafkaError(KafkaError.MEMBER_ID_REQUIRED,
                                        retriable=True))
    err = exc.value.args[0]
    assert isinstance(err, KafkaError)
    assert not err.fatal()
    assert err.retriable()
    assert not err.txn_requires_abort() 
Example #11
Source File: test_Admin.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def test_alter_configs_api():
    """ alter_configs() tests, these wont really do anything since there
        is no broker configured. """

    a = AdminClient({"socket.timeout.ms": 10})
    fs = a.alter_configs([ConfigResource(confluent_kafka.admin.RESOURCE_BROKER, "3",
                                         set_config={"some": "config"})])
    # ignore the result

    with pytest.raises(Exception):
        a.alter_configs(None)

    with pytest.raises(Exception):
        a.alter_configs("something")

    with pytest.raises(ValueError):
        a.alter_configs([])

    fs = a.alter_configs([ConfigResource("topic", "mytopic",
                                         set_config={"set": "this",
                                                     "and": "this"}),
                          ConfigResource(confluent_kafka.admin.RESOURCE_GROUP,
                                         "mygroup")],
                         request_timeout=0.123)

    with pytest.raises(KafkaException):
        for f in concurrent.futures.as_completed(iter(fs.values())):
            f.result(timeout=1) 
Example #12
Source File: test_Admin.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def test_describe_configs_api():
    """ describe_configs() tests, these wont really do anything since there
        is no broker configured. """

    a = AdminClient({"socket.timeout.ms": 10})
    fs = a.describe_configs([ConfigResource(confluent_kafka.admin.RESOURCE_BROKER, "3")])
    # ignore the result

    with pytest.raises(Exception):
        a.describe_configs(None)

    with pytest.raises(Exception):
        a.describe_configs("something")

    with pytest.raises(Exception):
        a.describe_configs([])

    with pytest.raises(ValueError):
        a.describe_configs([None, ConfigResource(confluent_kafka.admin.RESOURCE_TOPIC, "mytopic")])

    fs = a.describe_configs([ConfigResource(confluent_kafka.admin.RESOURCE_TOPIC, "mytopic"),
                             ConfigResource(confluent_kafka.admin.RESOURCE_GROUP, "mygroup")],
                            request_timeout=0.123)
    with pytest.raises(KafkaException):
        for f in concurrent.futures.as_completed(iter(fs.values())):
            f.result(timeout=1) 
Example #13
Source File: test_Admin.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def test_delete_topics_api():
    """ delete_topics() tests, these wont really do anything since there is no
        broker configured. """

    a = AdminClient({"socket.timeout.ms": 10})
    fs = a.delete_topics(["mytopic"])
    # ignore the result

    with pytest.raises(Exception):
        a.delete_topics(None)

    with pytest.raises(Exception):
        a.delete_topics("mytopic")

    with pytest.raises(Exception):
        a.delete_topics([])

    with pytest.raises(ValueError):
        a.delete_topics([None, "mytopic"])

    fs = a.delete_topics(["mytopic", "other"])
    with pytest.raises(KafkaException):
        for f in concurrent.futures.as_completed(iter(fs.values())):
            f.result(timeout=1)

    fs = a.delete_topics(["mytopic", "othertopic", "third"],
                         request_timeout=0.5,
                         operation_timeout=300.1)
    for f in concurrent.futures.as_completed(iter(fs.values())):
        e = f.exception(timeout=1)
        assert isinstance(e, KafkaException)
        assert e.args[0].code() == KafkaError._TIMED_OUT

    with pytest.raises(TypeError):
        a.delete_topics(["mytopic"],
                        validate_only="maybe") 
Example #14
Source File: test_Admin.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def test_basic_api():
    """ Basic API tests, these wont really do anything since there is no
        broker configured. """

    with pytest.raises(TypeError):
        a = AdminClient()

    a = AdminClient({"socket.timeout.ms": 10})

    a.poll(0.001)

    try:
        a.list_topics(timeout=0.2)
    except KafkaException as e:
        assert e.args[0].code() in (KafkaError._TIMED_OUT, KafkaError._TRANSPORT) 
Example #15
Source File: test_proto_serializers.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def test_protobuf_serializer_type_mismatch(kafka_cluster):
    """
    Ensures an Exception is raised when deserializing an unexpected type.

    """
    pb2_1 = TestProto_pb2.TestMessage
    pb2_2 = NestedTestProto_pb2.NestedMessage

    sr = kafka_cluster.schema_registry({'url': 'http://localhost:8081'})
    topic = kafka_cluster.create_topic("serialization-proto-refs")
    serializer = ProtobufSerializer(pb2_1, sr)

    producer = kafka_cluster.producer(key_serializer=serializer)

    with pytest.raises(KafkaException,
                       match=r"message must be of type <class"
                             r" 'TestProto_pb2.TestMessage'\> not \<class"
                             r" 'NestedTestProto_pb2.NestedMessage'\>"):
        producer.produce(topic, key=pb2_2()) 
Example #16
Source File: asyncio.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def create_item3(item: Item):
    try:
        producer.produce("items", item.name, on_delivery=ack)
        return {"timestamp": time()}
    except KafkaException as ex:
        raise HTTPException(status_code=500, detail=ex.args[0].str()) 
Example #17
Source File: asyncio.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def create_item2(item: Item):
    try:
        aio_producer.produce2("items", item.name, on_delivery=ack)
        return {"timestamp": time()}
    except KafkaException as ex:
        raise HTTPException(status_code=500, detail=ex.args[0].str()) 
Example #18
Source File: asyncio.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def create_item1(item: Item):
    try:
        result = await aio_producer.produce("items", item.name)
        return {"timestamp": result.timestamp()}
    except KafkaException as ex:
        raise HTTPException(status_code=500, detail=ex.args[0].str()) 
Example #19
Source File: asyncio.py    From confluent-kafka-python with Apache License 2.0 5 votes vote down vote up
def produce(self, topic, value):
        """
        An awaitable produce method.
        """
        result = self._loop.create_future()

        def ack(err, msg):
            if err:
                self._loop.call_soon_threadsafe(result.set_exception, KafkaException(err))
            else:
                self._loop.call_soon_threadsafe(result.set_result, msg)
        self._producer.produce(topic, value, on_delivery=ack)
        return result 
Example #20
Source File: kafka_source_extractor.py    From amundsendatabuilder with Apache License 2.0 5 votes vote down vote up
def consume(self):
        # Type: () -> Any
        """
        Consume messages from a give list of topic

        :return:
        """
        records = []
        start = datetime.now()
        try:
            while True:
                msg = self.consumer.poll(timeout=self.consumer_poll_timeout)
                end = datetime.now()

                # The consumer exceeds consume timeout
                if (end - start) > timedelta(seconds=self.consumer_total_timeout):
                    # Exceed the consume timeout
                    break

                if msg is None:
                    continue

                if msg.error():
                    # Hit the EOF of partition
                    if msg.error().code() == KafkaError._PARTITION_EOF:
                        continue
                    else:
                        raise KafkaException(msg.error())
                else:
                    records.append(msg.value())

        except Exception as e:
            LOGGER.exception(e)
        finally:
            return records 
Example #21
Source File: kafka_streaming_client.py    From agogosml with MIT License 5 votes vote down vote up
def handle_kafka_error(self, msg):  # pragma: no cover
        """Handle an error in kafka."""
        if msg.error().code() == KafkaError._PARTITION_EOF:
            # End of partition event
            self.logger.info('%% %s [%d] reached end at offset %d\n',
                             msg.topic(), msg.partition(), msg.offset())
        else:
            # Error
            raise KafkaException(msg.error()) 
Example #22
Source File: verifiable_consumer.py    From confluent-kafka-python with Apache License 2.0 4 votes vote down vote up
def do_commit(self, immediate=False, asynchronous=None):
        """ Commit every 1000 messages or whenever there is a consume timeout
            or immediate. """
        if (self.use_auto_commit
                or self.consumed_msgs_at_last_commit + (0 if immediate else 1000) >
                self.consumed_msgs):
            return

        # Make sure we report consumption before commit,
        # otherwise tests may fail because of commit > consumed
        if self.consumed_msgs_at_last_commit < self.consumed_msgs:
            self.send_records_consumed(immediate=True)

        if asynchronous is None:
            async_mode = self.use_async_commit
        else:
            async_mode = asynchronous

        self.dbg('Committing %d messages (Async=%s)' %
                 (self.consumed_msgs - self.consumed_msgs_at_last_commit,
                  async_mode))

        retries = 3
        while True:
            try:
                self.dbg('Commit')
                offsets = self.consumer.commit(asynchronous=async_mode)
                self.dbg('Commit done: offsets %s' % offsets)

                if not async_mode:
                    self.on_commit(None, offsets)

                break

            except KafkaException as e:
                if e.args[0].code() == KafkaError._NO_OFFSET:
                    self.dbg('No offsets to commit')
                    break
                elif e.args[0].code() in (KafkaError.REQUEST_TIMED_OUT,
                                          KafkaError.NOT_COORDINATOR,
                                          KafkaError._WAIT_COORD):
                    self.dbg('Commit failed: %s (%d retries)' % (str(e), retries))
                    if retries <= 0:
                        raise
                    retries -= 1
                    time.sleep(1)
                    continue
                else:
                    raise

        self.consumed_msgs_at_last_commit = self.consumed_msgs