Python kombu.Connection() Examples

The following are 30 code examples of kombu.Connection(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module kombu , or try the search function .
Example #1
Source File: kombu_queue.py    From pyspider with Apache License 2.0 6 votes vote down vote up
def __init__(self, name, url="amqp://", maxsize=0, lazy_limit=True):
        """
        Constructor for KombuQueue

        url:        http://kombu.readthedocs.org/en/latest/userguide/connections.html#urls
        maxsize:    an integer that sets the upperbound limit on the number of
                    items that can be placed in the queue.
        """
        self.name = name
        self.conn = Connection(url)
        self.queue = self.conn.SimpleQueue(self.name, no_ack=True, serializer='umsgpack')

        self.maxsize = maxsize
        self.lazy_limit = lazy_limit
        if self.lazy_limit and self.maxsize:
            self.qsize_diff_limit = int(self.maxsize * 0.1)
        else:
            self.qsize_diff_limit = 0
        self.qsize_diff = 0 
Example #2
Source File: task_management.py    From arches with GNU Affero General Public License v3.0 6 votes vote down vote up
def check_if_celery_available():
    result = None
    try:
        conn = Connection(settings.CELERY_BROKER_URL)
        conn.ensure_connection(max_retries=2)
    except Exception as e:
        logger.warning(_("Unable to connect to a celery broker"))
        return False
    inspect = app.control.inspect()
    for i in range(4):
        try:
            result = inspect.ping()
            break
        except BrokenPipeError as e:
            time.sleep(0.10)
            logger.warning(_("Celery worker connection failed. Reattempting"))
            if i == 3:
                logger.warning(_("Failed to connect to celery due to a BrokenPipeError"))
                logger.exception(e)
    if result is None:
        logger.info(_("A celery broker is running, but a celery worker is not available"))
        result = False  # ping returns True or None, assigning False here so we return only a boolean value
    else:
        result = True
    return result 
Example #3
Source File: suite_helper.py    From MozDef with Mozilla Public License 2.0 6 votes vote down vote up
def setup_rabbitmq_client(options):
    global RABBITMQ_CLIENT
    try:
        RABBITMQ_CLIENT
    except NameError:
        mqConnString = 'amqp://{0}:{1}@{2}:{3}//'.format(
            options.mquser,
            options.mqpassword,
            options.mqalertserver,
            options.mqport
        )
        mqAlertConn = Connection(mqConnString)
        alertExchange = Exchange(name=options.alertExchange, type='topic', durable=True, delivery_mode=1)
        alertExchange(mqAlertConn).declare()

        alertQueue = Queue(options.queueName,
                           exchange=alertExchange,
                           routing_key=options.alerttopic,
                           durable=False,
                           no_ack=(not options.mqack))
        alertQueue(mqAlertConn).declare()

        RABBITMQ_CLIENT = mqAlertConn.Consumer(alertQueue, accept=['json'])
    return RABBITMQ_CLIENT 
Example #4
Source File: check.py    From data_integration_celery with GNU General Public License v3.0 6 votes vote down vote up
def test(url):
    from kombu import Exchange, Queue, Connection, Consumer, Producer
    task_queue = Queue('tasks', exchange=Exchange('celery', type='direct'), routing_key='tasks')
    # 生产者
    with Connection(url) as conn:
        with conn.channel() as channel:
            producer = Producer(channel)
            producer.publish({'hello': 'world'},
                             retry=True,
                             exchange=task_queue.exchange,
                             routing_key=task_queue.routing_key,
                             declare=[task_queue])

    def get_message(body, message):
        print("receive message: %s" % body)
        # message.ack()

    # 消费者
    with Connection(url) as conn:
        with conn.channel() as channel:
            consumer = Consumer(channel, queues=task_queue, callbacks=[get_message, ], prefetch_count=10)
            consumer.consume(no_ack=True) 
Example #5
Source File: main.py    From banzai with GNU General Public License v3.0 6 votes vote down vote up
def start_listener(runtime_context):
    # Need to keep the amqp logger level at least as high as INFO,
    # or else it send heartbeat check messages every second
    logging.getLogger('amqp').setLevel(max(logger.level, getattr(logging, 'INFO')))
    logger.info('Starting pipeline listener')

    fits_exchange = Exchange(runtime_context.FITS_EXCHANGE, type='fanout')
    listener = RealtimeModeListener(runtime_context)

    with Connection(runtime_context.broker_url) as connection:
        listener.connection = connection.clone()
        listener.queue = Queue(runtime_context.queue_name, fits_exchange)
        try:
            listener.run()
        except listener.connection.connection_errors:
            listener.connection = connection.clone()
            listener.ensure_connection(max_retries=10)
        except KeyboardInterrupt:
            logger.info('Shutting down pipeline listener.') 
Example #6
Source File: mailbox.py    From mochi with MIT License 6 votes vote down vote up
def __init__(self,
                 address,
                 name,
                 transport_options,
                 ssl=False,
                 no_ack=True,
                 queue_opts=None,
                 exchange_opts=None):
        from kombu import Connection
        self._address = address
        self._conn = Connection(address,
                                transport_options=transport_options,
                                ssl=ssl)
        self._queue = self._conn.SimpleQueue(name, no_ack, queue_opts, exchange_opts)
        self._no_ack = no_ack
        self._last_msg = None 
Example #7
Source File: stream.py    From memex-explorer with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def __init__(self, crawl_name, num_urls=DEFAULT_NUM_URLS):
        """
        Create a NutchUrlTrails instance for visualizing a running Nutch crawl in real-time using Bokeh
        :param name: The name of the crawl (as identified by the queue)
        :param num_urls: The number of URLs to display in the visualization
        :return: A NutchUrLTrails instance
        """
        self.crawl_name = crawl_name
        self.num_urls = num_urls
        self.open_urls = {}
        self.closed_urls = {}
        self.old_segments = None
        self.old_circles = None
        
        self.session = Session()
        self.session.use_doc(self.crawl_name)
        self.document = Document()

        con = Connection()

        exchange = Exchange(EXCHANGE_NAME, 'direct', durable=False)
        queue = Queue(crawl_name, exchange=exchange, routing_key=crawl_name)
        self.queue = con.SimpleQueue(name=queue) 
Example #8
Source File: controller.py    From umbra with Apache License 2.0 6 votes vote down vote up
def start(self):
        self._browsing_threads = set()
        self._browsing_threads_lock = threading.Lock()

        self._exchange = kombu.Exchange(name=self.exchange_name, type='direct',
                durable=True)

        self._reconnect_requested = False

        self._producer = None
        self._producer_lock = threading.Lock()
        with self._producer_lock:
            self._producer_conn = kombu.Connection(self.amqp_url)
            self._producer = self._producer_conn.Producer(serializer='json')

        self._consumer_thread = threading.Thread(target=self._consume_amqp, name='AmqpConsumerThread')
        self._consumer_stop = threading.Event()
        self._consumer_thread.start() 
Example #9
Source File: utils.py    From st2 with Apache License 2.0 5 votes vote down vote up
def get_connection(urls=None, connection_kwargs=None):
    """
    Retrieve kombu "Conection" class instance configured with all the correct
    options using values from the config and provided values.

    :param connection_kwargs: Any additional connection keyword arguments passed directly to the
                              Connection class constructor.
    :type connection_kwargs: ``dict``
    """
    urls = urls or get_messaging_urls()
    connection_kwargs = connection_kwargs or {}

    kwargs = {}

    ssl_kwargs = _get_ssl_kwargs(ssl=cfg.CONF.messaging.ssl,
                                 ssl_keyfile=cfg.CONF.messaging.ssl_keyfile,
                                 ssl_certfile=cfg.CONF.messaging.ssl_certfile,
                                 ssl_cert_reqs=cfg.CONF.messaging.ssl_cert_reqs,
                                 ssl_ca_certs=cfg.CONF.messaging.ssl_ca_certs,
                                 login_method=cfg.CONF.messaging.login_method)

    # NOTE: "connection_kwargs" argument passed to this function has precedence over config values
    if len(ssl_kwargs) == 1 and ssl_kwargs['ssl'] is True:
        kwargs.update({'ssl': True})
    elif len(ssl_kwargs) >= 2:
        ssl_kwargs.pop('ssl')
        kwargs.update({'ssl': ssl_kwargs})

    kwargs['login_method'] = cfg.CONF.messaging.login_method

    kwargs.update(connection_kwargs)

    # NOTE: This line contains no secret values so it's OK to log it
    LOG.debug('Using SSL context for RabbitMQ connection: %s' % (ssl_kwargs))

    connection = Connection(urls, **kwargs)
    return connection 
Example #10
Source File: check.py    From data_integration_celery with GNU General Public License v3.0 5 votes vote down vote up
def receiver(url):
    logger.info("start receiver")
    with Connection(url) as conn:
        C(conn, 'queuetest').run() 
Example #11
Source File: check.py    From data_integration_celery with GNU General Public License v3.0 5 votes vote down vote up
def sender(url):
    logger.info("start sender")
    with Connection(url) as conn:
        with conn.channel() as channel:
            # producer = Producer(channel)
            producer = channel.Producer()

            while True:
                message = time.strftime('%H:%M:%S', time.localtime())
                producer.publish(
                    body=message,
                    retry=True,
                    exchange='celery',
                    routing_key='rkeytest'
                )
                logger.info('send message: %s' % message)

                while True:
                    # 检查队列,以重新得到消息计数
                    queue = channel.queue_declare(queue='queuetest', passive=True)
                    messageCount = queue.message_count
                    logger.info('messageCount: %d' % messageCount)
                    if messageCount < 100:
                        time.sleep(0.5)
                        break
                    time.sleep(1) 
Example #12
Source File: file_utils.py    From banzai with GNU General Public License v3.0 5 votes vote down vote up
def post_to_archive_queue(image_path, broker_url, exchange_name='fits_files'):
    exchange = Exchange(exchange_name, type='fanout')
    with Connection(broker_url) as conn:
        producer = conn.Producer(exchange=exchange)
        producer.publish({'path': image_path})
        producer.release() 
Example #13
Source File: controller.py    From umbra with Apache License 2.0 5 votes vote down vote up
def _consume_amqp(self):
        # XXX https://webarchive.jira.com/browse/ARI-3811
        # After running for some amount of time (3 weeks in the latest case),
        # consumer looks normal but doesn't consume any messages. Not clear if
        # it's hanging in drain_events() or not. As a temporary measure for
        # mitigation (if it works) or debugging (if it doesn't work), close and
        # reopen the connection every 2.5 hours
        RECONNECT_AFTER_SECONDS = 150 * 60

        url_queue = kombu.Queue(self.queue_name, exchange=self._exchange, routing_key=self.routing_key)

        while not self._consumer_stop.is_set():
            try:
                self.logger.info("connecting to amqp exchange={} at {}".format(self._exchange.name, self.amqp_url))
                self._reconnect_requested = False
                with kombu.Connection(self.amqp_url) as conn:
                    conn.default_channel.basic_qos(
                            prefetch_count=self.max_active_browsers,
                            prefetch_size=0, a_global=False)
                    with conn.Consumer(url_queue) as consumer:
                        self._wait_for_and_browse_urls(
                                conn, consumer, timeout=RECONNECT_AFTER_SECONDS)

                    # need to wait for browsers to finish here, before closing
                    # the amqp connection,  because they use it to do
                    # message.ack() after they finish browsing a page
                    self._wait_for_active_browsers()
            except BaseException as e:
                self.logger.error("caught exception {}".format(e), exc_info=True)
                time.sleep(0.5)
                self.logger.error("attempting to reopen amqp connection") 
Example #14
Source File: listener.py    From bugbug with Mozilla Public License 2.0 5 votes vote down vote up
def __init__(self, user, password, callback):
        self.connection = Connection(CONNECTION_URL.format(user, password))
        self.queues = [
            Queue(
                name="queue/{}/pushes".format(user),
                exchange=Exchange(
                    "exchange/hgpushes/v2", type="topic", no_declare=True,
                ),
                routing_key="#",
                durable=True,
                auto_delete=True,
            )
        ]
        self.consumer = _GenericConsumer(self.connection, self.queues, callback) 
Example #15
Source File: rabbit.py    From sfm-ui with MIT License 5 votes vote down vote up
def get_connection():
        return Connection(transport="librabbitmq",
                          hostname=settings.RABBITMQ_HOST,
                          userid=settings.RABBITMQ_USER,
                          password=settings.RABBITMQ_PASSWORD) 
Example #16
Source File: pulse.py    From jx-sqlite with Mozilla Public License 2.0 5 votes vote down vote up
def connect(self):
        if not self.connection:
            self.connection = Connection(
                hostname=self.settings.host,
                port=self.settings.port,
                userid=self.settings.user,
                password=self.settings.password,
                virtual_host=self.settings.vhost,
                ssl=self.settings.ssl
            ) 
Example #17
Source File: base.py    From celery-message-consumer with Apache License 2.0 5 votes vote down vote up
def setUp(self):
        super(BaseConsumerIntegrationTest, self).setUp()

        # NOTE:
        # must be a real rabbitmq instance, we rely on rabbitmq
        # features (dead-letter exchange) for our retry queue logic
        self.connection = kombu.Connection(
            settings.BROKER_URL,
            connect_timeout=1,
        )
        self.connection.ensure_connection()
        self.connection.connect()
        self.channel = self.connection.channel() 
Example #18
Source File: utils.py    From st2 with Apache License 2.0 5 votes vote down vote up
def _get_ssl_kwargs(ssl=False, ssl_keyfile=None, ssl_certfile=None, ssl_cert_reqs=None,
                    ssl_ca_certs=None, login_method=None):
    """
    Return SSL keyword arguments to be used with the kombu.Connection class.
    """
    ssl_kwargs = {}

    # NOTE: If "ssl" is not set to True we don't pass "ssl=False" argument to the constructor
    # because user could still specify to use SSL by including "?ssl=true" query param at the
    # end of the connection URL string
    if ssl is True:
        ssl_kwargs['ssl'] = True

    if ssl_keyfile:
        ssl_kwargs['ssl'] = True
        ssl_kwargs['keyfile'] = ssl_keyfile

    if ssl_certfile:
        ssl_kwargs['ssl'] = True
        ssl_kwargs['certfile'] = ssl_certfile

    if ssl_cert_reqs:
        if ssl_cert_reqs == 'none':
            ssl_cert_reqs = ssl_lib.CERT_NONE
        elif ssl_cert_reqs == 'optional':
            ssl_cert_reqs = ssl_lib.CERT_OPTIONAL
        elif ssl_cert_reqs == 'required':
            ssl_cert_reqs = ssl_lib.CERT_REQUIRED
        ssl_kwargs['cert_reqs'] = ssl_cert_reqs

    if ssl_ca_certs:
        ssl_kwargs['ssl'] = True
        ssl_kwargs['ca_certs'] = ssl_ca_certs

    return ssl_kwargs 
Example #19
Source File: kombu_manager.py    From python-socketio with MIT License 5 votes vote down vote up
def _connection(self):
        return kombu.Connection(self.url, **self.connection_options) 
Example #20
Source File: kombu_manager.py    From python-socketio with MIT License 5 votes vote down vote up
def _listen(self):
        reader_queue = self._queue()

        while True:
            connection = self._connection().ensure_connection(
                errback=self.__error_callback)
            try:
                with connection.SimpleQueue(reader_queue) as queue:
                    while True:
                        message = queue.get(block=True)
                        message.ack()
                        yield message.payload
            except connection.connection_errors:
                self._get_logger().exception("Connection error "
                                             "while reading from queue") 
Example #21
Source File: alert_actions_worker.py    From MozDef with Mozilla Public License 2.0 5 votes vote down vote up
def main():
    # connect and declare the message queue/kombu objects.
    # Event server/exchange/queue
    mqConnString = 'amqp://{0}:{1}@{2}:{3}//'.format(
        options.mquser,
        options.mqpassword,
        options.mqalertserver,
        options.mqport
    )
    mqAlertConn = Connection(mqConnString)

    # Exchange for alerts we pass to actions
    alertExchange = Exchange(name=options.alertExchange,
                             type='topic',
                             durable=True,
                             delivery_mode=1)

    alertExchange(mqAlertConn).declare()

    # Queue for the exchange
    alertQueue = Queue(options.queueName,
                       exchange=alertExchange,
                       routing_key=options.alerttopic,
                       durable=False,
                       no_ack=(not options.mqack))
    alertQueue(mqAlertConn).declare()

    # consume our alerts.
    alertConsumer(mqAlertConn, alertQueue, alertExchange).run() 
Example #22
Source File: alerttask.py    From MozDef with Mozilla Public License 2.0 5 votes vote down vote up
def _configureKombu(self):
        """
        Configure kombu for amqp or sqs
        """
        try:
            connString = self.__build_conn_string()
            self.mqConn = kombu.Connection(connString)
            if connString.find('sqs') == 0:
                self.mqConn.transport_options['region'] = os.getenv('DEFAULT_AWS_REGION', 'us-west-2')
                self.mqConn.transport_options['is_secure'] = True
                self.alertExchange = kombu.Exchange(
                    name=RABBITMQ["alertexchange"], type="topic", durable=True
                )
                self.alertExchange(self.mqConn).declare()
                alertQueue = kombu.Queue(
                    os.getenv('OPTIONS_ALERTSQSQUEUEURL').split('/')[4], exchange=self.alertExchange
                )
            else:
                self.alertExchange = kombu.Exchange(
                    name=RABBITMQ["alertexchange"], type="topic", durable=True
                )
                self.alertExchange(self.mqConn).declare()
                alertQueue = kombu.Queue(
                    RABBITMQ["alertqueue"], exchange=self.alertExchange
                )
            alertQueue(self.mqConn).declare()
            self.mqproducer = self.mqConn.Producer(serializer="json")
            self.log.debug("Kombu configured")
        except Exception as e:
            self.log.error(
                "Exception while configuring kombu for alerts: {0}".format(e)
            ) 
Example #23
Source File: mozdefbot.py    From MozDef with Mozilla Public License 2.0 5 votes vote down vote up
def consume_alerts(bot):
    # connect and declare the message queue/kombu objects.
    # server/exchange/queue
    mq_conn_str = 'amqp://{0}:{1}@{2}:{3}//'.format(
        options.mq_user,
        options.mq_password,
        options.mq_alert_server,
        options.mq_port
    )
    mq_alert_conn = Connection(mq_conn_str)

    # Exchange for alerts we pass to plugins
    alert_exchange = Exchange(
        name=options.alert_exchange,
        type='topic',
        durable=True,
        delivery_mode=1
    )

    alert_exchange(mq_alert_conn).declare()

    # Queue for the exchange
    alert_queue = Queue(
        options.queue_name,
        exchange=alert_exchange,
        routing_key=options.alerttopic,
        durable=False,
        no_ack=(not options.mq_ack)
    )
    alert_queue(mq_alert_conn).declare()

    # consume our alerts.
    AlertConsumer(mq_alert_conn, alert_queue, alert_exchange, bot).run() 
Example #24
Source File: amqp_source.py    From RackHD with Apache License 2.0 5 votes vote down vote up
def __init__(self, amqp_url, logs):
        self.__logs = logs
        self.__amqp_url = amqp_url
        self.__monitors = {}
        self.__connection = Connection(self.__amqp_url)
        self.__connection.connect()
        self.__running = True
        self.__consumer_gl = gevent.spawn(self.__consumer_greenlet_main)
        self.__consumer_gl.greenlet_name = 'amqp-consumer-gl'  # allowing flogging to print a nice name
        gevent.sleep(0.0) 
Example #25
Source File: rackhd_amqp_od.py    From RackHD with Apache License 2.0 5 votes vote down vote up
def __setup_rackhd_style_amqp(self):
        """
        Need to make exchanges and named queus to make this
        look like a RackHD instance amqp.
        """
        # A freshly spun up on-demand docker likes to say it's there, but will
        # then reset the connection. So, catch that scenario w/ a few retries.
        con = None
        done_time = time.time() + 30.0
        while con is None:
            con = Connection(hostname=self.host, port=self.ssl_port, ssl=False)
            try:
                con.connect()
            except Exception as ex:
                if time.time() > done_time:
                    raise ex
                con = None
            if con is None:
                time.sleep(0.1)

        on_task = self.__assure_exchange(con, 'on.task', 'topic')
        self.__assure_named_queue(con, on_task, 'ipmi.command.sel.result')
        self.__assure_named_queue(con, on_task, 'ipmi.command.sdr.result')
        self.__assure_named_queue(con, on_task, 'ipmi.command.chassis.result')

        on_events = self.__assure_exchange(con, 'on.events', 'topic')
        self.__assure_named_queue(con, on_events, 'graph.finished')
        self.__assure_named_queue(con, on_events, 'polleralert.sel.updated', '#') 
Example #26
Source File: worker.py    From daenerys with Apache License 2.0 5 votes vote down vote up
def main():
    disconnect()
    connect('zhihulive')
    with Connection(BROKER_URI) as conn:
        consumer(conn, [process_task]) 
Example #27
Source File: redis.py    From dino with Apache License 2.0 5 votes vote down vote up
def __init__(self, env, is_external_queue: bool):
        super().__init__(env, is_external_queue, queue_type='redis', logger=logger)

        conf = env.config

        bind_port = self.get_port()
        if bind_port is None:
            logger.info('skipping pubsub setup, no port specified')
            return

        queue_host = conf.get(ConfigKeys.HOST, domain=self.domain_key, default=None)
        exchange = conf.get(ConfigKeys.EXCHANGE, domain=self.domain_key, default='node_exchange')
        queue_db = conf.get(ConfigKeys.DB, domain=self.domain_key, default=0)
        queue_name = conf.get(ConfigKeys.QUEUE, domain=self.domain_key, default=None)

        if queue_name is None or len(queue_name.strip()) == 0:
            queue_name = 'node_queue_%s_%s_%s' % (
                conf.get(ConfigKeys.ENVIRONMENT),
                self.get_host(),
                bind_port
            )

        if self.is_external_queue:
            self.exchange = Exchange(exchange, type='direct')
        else:
            self.exchange = Exchange(exchange, type='fanout')

        self.queue_connection = Connection(queue_host, transport_options={'db': queue_db})
        logger.info('queue connection: {}'.format(str(self.queue_connection)))
        self.queue_name = queue_name
        self.queue = Queue(self.queue_name, self.exchange) 
Example #28
Source File: kafka_to_rabbitmq.py    From dino with Apache License 2.0 5 votes vote down vote up
def __init__(self, _conf):
        amqp_conf = conf.get(ConfigKeys.AMQP)
        queue_host = amqp_conf.get(ConfigKeys.HOST)
        if queue_host is None or len(queue_host.strip()) == 0:
            return

        queue_port = amqp_conf.get(ConfigKeys.PORT)
        queue_vhost = amqp_conf.get(ConfigKeys.VHOST)
        queue_user = amqp_conf.get(ConfigKeys.USER)
        queue_pass = amqp_conf.get(ConfigKeys.PASSWORD)

        queue_host = ';'.join(['amqp://%s' % host for host in queue_host.split(';')])
        queue_exchange = '%s_%s' % (
            amqp_conf.get(ConfigKeys.EXCHANGE),
            amqp_conf.get(ConfigKeys.ENVIRONMENT)
        )

        queue_name = amqp_conf.get(ConfigKeys.QUEUE)
        self.exchange = Exchange(queue_exchange, type='direct')

        self.queue_connection = Connection(
            hostname=queue_host,
            port=queue_port,
            virtual_host=queue_vhost,
            userid=queue_user,
            password=queue_pass
        )
        self.queue = Queue(queue_name, self.exchange)
        logger.info('setting up pubsub for host(s) "{}"'.format(queue_host)) 
Example #29
Source File: rabbitmqdriver.py    From sniffer with Apache License 2.0 5 votes vote down vote up
def start_sync(self):
        exchange = Exchange(self.exchange_name, self.exchange_type, durable=self.durable)
        queue = Queue(self.queue_name, exchange=exchange, routing_key=self.routing_key)
        with Connection(self.amqp_url) as conn:
            # producer = conn.Producer(serializer='json')
            # producer.publish({'name': '/tmp/lolcat1.avi', 'size': 1301013},
            #                  exchange=exchange, routing_key=self.routing_key,
            #                  declare=[queue])
            # producer.publish({'name': '/tmp/lolcat1.avi', 'size': 1301013},
            #                  exchange=exchange, routing_key=self.routing_key,
            #                  declare=[queue])
            with conn.Consumer(queue, callbacks=[self.rabbitmq_callback]) as consumer:
                # Process messages and handle events on all channels
                while True:
                    conn.drain_events() 
Example #30
Source File: logger.py    From KubeOperator with Apache License 2.0 5 votes vote down vote up
def __init__(self):
        self.connection = Connection(settings.CELERY_LOG_BROKER_URL)