Python celery.Celery() Examples

The following are 30 code examples of celery.Celery(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module celery , or try the search function .
Example #1
Source File: flask_celery.py    From flaskapp with MIT License 7 votes vote down vote up
def make_celery(self, backend=None, broker=None):
    if backend:
        self.config['CELERY_RESULT_BACKEND'] = backend
    if broker:
        self.config['CELERY_BROKER_URL'] = broker
    celery = Celery(
        self.import_name,
        backend=self.config['CELERY_RESULT_BACKEND'],
        broker=self.config['CELERY_BROKER_URL']
    )
    celery.conf.update(self.config)

    class ContextTask(celery.Task):
        def __call__(s, *args, **kwargs):
            with self.app_context():
                return s.run(*args, **kwargs)

    celery.Task = ContextTask
    return celery 
Example #2
Source File: application.py    From appointment-reminders-flask with MIT License 6 votes vote down vote up
def celery(self):
        app = self.flask_app
        celery = Celery(app.import_name, broker=app.config[
                        'CELERY_BROKER_URL'])
        celery.conf.update(app.config)

        TaskBase = celery.Task

        class ContextTask(TaskBase):
            abstract = True

            def __call__(self, *args, **kwargs):
                with app.app_context():
                    return TaskBase.__call__(self, *args, **kwargs)
        celery.Task = ContextTask

        return celery 
Example #3
Source File: make_app.py    From dagster with Apache License 2.0 6 votes vote down vote up
def make_app_with_task_routes(task_routes, app_args=None):
    app_ = Celery('dagster', **(app_args if app_args else {}))

    if app_args is None:
        app_.config_from_object('dagster_celery.defaults', force=True)

        if is_module_available('dagster_celery_config'):
            # pylint: disable=protected-access
            obj = force_mapping(app_.loader._smart_import('dagster_celery_config'))
            app_.conf.update(obj)

    app_.loader.import_module('celery.contrib.testing.tasks')

    app_.conf.task_queues = [
        Queue('dagster', routing_key='dagster.#', queue_arguments={'x-max-priority': 10})
    ]
    app_.conf.task_routes = task_routes
    app_.conf.task_queue_max_priority = 10
    app_.conf.task_default_priority = 5
    return app_ 
Example #4
Source File: celery.py    From mqttwarn with Eclipse Public License 2.0 6 votes vote down vote up
def plugin(srv, item):
    srv.logging.debug("*** MODULE=%s: service=%s, target=%s", __file__, item.service, item.target)

    config = item.config

    app = celery.Celery(
        config['app_name'],
        broker=config['broker_url']
    )

    for target in item.addrs:
        message = item.message
        try:
            if target['message_format'] == 'json':
                message = json.loads(message)
            app.send_task(target['task'], [message])
        except Exception as e:
            srv.logging.warning("Error: %s" % e)
            return False

    return True 
Example #5
Source File: __init__.py    From netinfo with MIT License 6 votes vote down vote up
def check_asndb(f):
    """Check if the ASN database should be updated.

    This wraps any call to the API to ensure the version of the database is
    always the most current. The PyASN database remains in a global variable
    exposed by Flask. Celery will update the configuration file after a new
    RIB has been downloaded and processed. That serves as the trigger data in
    order to reload the database or not.
    """
    @wraps(f)
    def decorated_function(*args, **kwargs):
        config = json.load(open('%s/resources/config.json' % APP_BASE))
        delta = (now_time() - load_time(config['asn']['last_update'])).seconds
        if delta > REFRESH_TIME or not app.config['ASNDB']:
            try:
                app.config['ASNDB'] = pyasn.pyasn('%s/resources/asn/current' % APP_BASE,
                                                  as_names_file='%s/resources/asn/as_names.json' % APP_BASE)
                app.config['ASNDB'].loaded = config['asn']['last_rib_file']
            except Exception as e:
                raise Exception("Database has not been initialized.")
        return f(*args, **kwargs)
    return decorated_function 
Example #6
Source File: app.py    From timesketch with Apache License 2.0 6 votes vote down vote up
def create_celery_app():
    """Create a Celery app instance."""
    app = create_app()
    celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'])
    celery.conf.update(app.config)
    TaskBase = celery.Task

    # pylint: disable=no-init
    class ContextTask(TaskBase):
        """Add Flask context to the Celery tasks created."""
        abstract = True

        def __call__(self, *args, **kwargs):
            """Return Task within a Flask app context.

            Returns:
                A Task (instance of Celery.celery.Task)
            """
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery 
Example #7
Source File: test_celery.py    From pylogctx with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_old_task():
    from pylogctx import context

    app = Celery(task_cls=OldLoggingTask)

    @app.task
    def my_task():
        logger = get_task_logger(current_task.name)
        logger.info("I log!")
        return context.as_dict()

    result = my_task.apply()
    if VERSION.major < 4:
        result.maybe_reraise()
    else:
        result.maybe_throw()
    fields = result.result
    assert 'taskField' in fields
    assert not context.as_dict() 
Example #8
Source File: celery_prometheus_exporter.py    From celery-prometheus-exporter with MIT License 6 votes vote down vote up
def _process_event(self, evt):
        # Events might come in in parallel. Celery already has a lock
        # that deals with this exact situation so we'll use that for now.
        with self._state._mutex:
            if celery.events.group_from(evt['type']) == 'task':
                evt_state = evt['type'][5:]
                try:
                    # Celery 4
                    state = celery.events.state.TASK_EVENT_TO_STATE[evt_state]
                except AttributeError:  # pragma: no cover
                    # Celery 3
                    task = celery.events.state.Task()
                    task.event(evt_state)
                    state = task.state
                if state == celery.states.STARTED:
                    self._observe_latency(evt)
                self._collect_tasks(evt, state) 
Example #9
Source File: test_celery.py    From pylogctx with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_task():
    from pylogctx import context

    app = Celery(task_cls=LoggingTask)

    @app.task
    def my_task():
        context.update(taskField='RUNNED')
        logger = get_task_logger(current_task.name)
        logger.info("I log!")
        return context.as_dict()

    result = my_task.apply()
    if VERSION.major < 4:
        result.maybe_reraise()
    else:
        result.maybe_throw()
    fields = result.result
    assert 'taskField' in fields
    assert not context.as_dict() 
Example #10
Source File: celery.py    From turbinia with Apache License 2.0 6 votes vote down vote up
def setup(self):
    """Set up Celery"""
    config.LoadConfig()
    self.app = celery.Celery(
        'turbinia', broker=config.CELERY_BROKER, backend=config.CELERY_BACKEND)
    self.app.conf.update(
        task_default_queue=config.INSTANCE_ID,
        accept_content=['json'],
        # TODO(ericzinnikas): Without task_acks_late Celery workers will start
        # on one task and prefetch another (i.e. can result in 1 worker getting
        # 2 plaso jobs while another worker is free). But enabling this causes
        # problems with certain Celery brokers (duplicated work).
        task_acks_late=False,
        task_track_started=True,
        worker_concurrency=1,
        worker_prefetch_multiplier=1,
    ) 
Example #11
Source File: app.py    From flask-celery-example with MIT License 6 votes vote down vote up
def index():
    if request.method == 'GET':
        return render_template('index.html', email=session.get('email', ''))
    email = request.form['email']
    session['email'] = email

    # send the email
    email_data = {
        'subject': 'Hello from Flask',
        'to': email,
        'body': 'This is a test email sent from a background Celery task.'
    }
    if request.form['submit'] == 'Send':
        # send right away
        send_async_email.delay(email_data)
        flash('Sending email to {0}'.format(email))
    else:
        # send in one minute
        send_async_email.apply_async(args=[email_data], countdown=60)
        flash('An email will be sent to {0} in one minute'.format(email))

    return redirect(url_for('index')) 
Example #12
Source File: __init__.py    From celery-scheduler with MIT License 6 votes vote down vote up
def make_celery(app):
    # create context tasks in celery
    celery = Celery(app.import_name, broker=app.config['BROKER_URL'])
    celery.config_from_object(celeryconfig)
    # celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery 
Example #13
Source File: test_celery.py    From sentry-python with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_no_stackoverflows(celery):
    """We used to have a bug in the Celery integration where its monkeypatching
    was repeated for every task invocation, leading to stackoverflows.

    See https://github.com/getsentry/sentry-python/issues/265
    """

    results = []

    @celery.task(name="dummy_task")
    def dummy_task():
        with configure_scope() as scope:
            scope.set_tag("foo", "bar")

        results.append(42)

    for _ in range(10000):
        dummy_task.delay()

    assert results == [42] * 10000

    with configure_scope() as scope:
        assert not scope._tags 
Example #14
Source File: celery.py    From lemur with Apache License 2.0 6 votes vote down vote up
def report_failed_task(**kwargs):
    """
    Report a generic failure metric as tasks to our metrics broker every time a task fails.
    This metric can be used for alerting.
    https://docs.celeryproject.org/en/latest/userguide/signals.html#task-failure
    """
    with flask_app.app_context():
        log_data = {
            "function": f"{__name__}.{sys._getframe().f_code.co_name}",
            "Message": "Celery Task Failure",
        }

        # Add traceback if exception info is in the kwargs
        einfo = kwargs.get("einfo")
        if einfo:
            log_data["traceback"] = einfo.traceback

        error_tags = get_celery_request_tags(**kwargs)

        log_data.update(error_tags)
        current_app.logger.error(log_data)
        metrics.send("celery.failed_task", "TIMER", 1, metric_tags=error_tags) 
Example #15
Source File: celery_executor.py    From airflow with Apache License 2.0 6 votes vote down vote up
def fetch_celery_task_state(async_result: AsyncResult) -> \
        Tuple[str, Union[str, ExceptionWithTraceback], Any]:
    """
    Fetch and return the state of the given celery task. The scope of this function is
    global so that it can be called by subprocesses in the pool.

    :param async_result: a tuple of the Celery task key and the async Celery object used
        to fetch the task's state
    :type async_result: tuple(str, celery.result.AsyncResult)
    :return: a tuple of the Celery task key and the Celery state and the celery info
        of the task
    :rtype: tuple[str, str, str]
    """

    try:
        with timeout(seconds=OPERATION_TIMEOUT):
            # Accessing state property of celery task will make actual network request
            # to get the current state of the task
            info = async_result.info if hasattr(async_result, 'info') else None
            return async_result.task_id, async_result.state, info
    except Exception as e:  # pylint: disable=broad-except
        exception_traceback = f"Celery Task ID: {async_result}\n{traceback.format_exc()}"
        return async_result.task_id, ExceptionWithTraceback(e, exception_traceback), None 
Example #16
Source File: celery_executor.py    From airflow with Apache License 2.0 6 votes vote down vote up
def update_task_state(self, key: TaskInstanceKeyType, state: str, info: Any) -> None:
        """Updates state of a single task."""
        # noinspection PyBroadException
        try:
            if self.last_state[key] != state:
                if state == celery_states.SUCCESS:
                    self.success(key, info)
                    del self.tasks[key]
                    del self.last_state[key]
                elif state == celery_states.FAILURE:
                    self.fail(key, info)
                    del self.tasks[key]
                    del self.last_state[key]
                elif state == celery_states.REVOKED:
                    self.fail(key, info)
                    del self.tasks[key]
                    del self.last_state[key]
                else:
                    self.log.info("Unexpected state: %s", state)
                    self.last_state[key] = state
        except Exception:  # pylint: disable=broad-except
            self.log.exception("Error syncing the Celery executor, ignoring it.") 
Example #17
Source File: celery_executor.py    From airflow with Apache License 2.0 6 votes vote down vote up
def _process_tasks(self, task_tuples_to_send: List[TaskInstanceInCelery]) -> None:
        first_task = next(t[4] for t in task_tuples_to_send)

        # Celery state queries will stuck if we do not use one same backend
        # for all tasks.
        cached_celery_backend = first_task.backend

        key_and_async_results = self._send_tasks_to_celery(task_tuples_to_send)
        self.log.debug('Sent all tasks.')

        for key, _, result in key_and_async_results:
            if isinstance(result, ExceptionWithTraceback):
                self.log.error(  # pylint: disable=logging-not-lazy
                    CELERY_SEND_ERR_MSG_HEADER + ":%s\n%s\n", result.exception, result.traceback
                )
            elif result is not None:
                # Only pops when enqueued successfully, otherwise keep it
                # and expect scheduler loop to deal with it.
                self.queued_tasks.pop(key)
                result.backend = cached_celery_backend
                self.running.add(key)
                self.tasks[key] = result
                self.last_state[key] = celery_states.PENDING 
Example #18
Source File: core.py    From celery-exporter with MIT License 6 votes vote down vote up
def __init__(
        self,
        broker_url,
        listen_address,
        max_tasks=10000,
        namespace="celery",
        transport_options=None,
        enable_events=False,
    ):
        self._listen_address = listen_address
        self._max_tasks = max_tasks
        self._namespace = namespace
        self._enable_events = enable_events

        self._app = celery.Celery(broker=broker_url)
        self._app.conf.broker_transport_options = transport_options or {} 
Example #19
Source File: taskman.py    From lost with MIT License 6 votes vote down vote up
def make_celery(app):
    celery = Celery(
        app.import_name,
        backend=app.config['CELERY_RESULT_BACKEND'],
        broker=app.config['CELERY_BROKER_URL']
    )
    celery.conf.update(app.config)
    celery.config_from_object(celeryconfig)

    class ContextTask(celery.Task):
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return self.run(*args, **kwargs)

    celery.Task = ContextTask
    return celery 
Example #20
Source File: app.py    From celery-once with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def make_celery(app):
    celery = Celery(
        app.import_name,
        backend=app.config['CELERY_RESULT_BACKEND'],
        broker=app.config['CELERY_BROKER_URL']
    )
    celery.conf.update(app.config)

    class ContextTask(celery.Task):
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return self.run(*args, **kwargs)
    
    class ContextQueueOnce(QueueOnce):
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return super(ContextQueueOnce, self).__call__(*args, **kwargs)

    celery.Task = ContextTask
    celery.QueueOnce = ContextQueueOnce
    return celery 
Example #21
Source File: celery.py    From lemur with Apache License 2.0 6 votes vote down vote up
def make_celery(app):
    celery = Celery(
        app.import_name,
        backend=app.config.get("CELERY_RESULT_BACKEND"),
        broker=app.config.get("CELERY_BROKER_URL"),
    )
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery 
Example #22
Source File: celery.py    From lemur with Apache License 2.0 6 votes vote down vote up
def report_revoked_task(**kwargs):
    """
    Report a generic failure metric as tasks to our metrics broker every time a task is revoked.
    This metric can be used for alerting.
    https://docs.celeryproject.org/en/latest/userguide/signals.html#task-revoked
    """
    with flask_app.app_context():
        log_data = {
            "function": f"{__name__}.{sys._getframe().f_code.co_name}",
            "Message": "Celery Task Revoked",
        }

        error_tags = get_celery_request_tags(**kwargs)

        log_data.update(error_tags)
        current_app.logger.error(log_data)
        metrics.send("celery.revoked_task", "TIMER", 1, metric_tags=error_tags) 
Example #23
Source File: celery_runner.py    From incepiton-mysql with MIT License 6 votes vote down vote up
def make_celery(app):
    celery = Celery(
        app.import_name,
        broker=app.config['CELERY_BROKER_URL'],
        backend=app.config['CELERY_RESULT_BACKEND']
    )
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    return celery 
Example #24
Source File: celery.py    From cadasta-platform with GNU Affero General Public License v3.0 5 votes vote down vote up
def get_app(conf):
    """ App setup, placed in function to make testing easier """
    app = Celery(amqp=CircuitBreakerAMQP)
    app.config_from_object(conf)
    try:
        breakers.celery.call(setup_app, app, throw=True)
    except breakers.celery.expected_errors:
        logger.exception("Failed to setup celery app.")
    return app 
Example #25
Source File: app.py    From flask-boilerplate with MIT License 5 votes vote down vote up
def create_celery_app(app=None):
    app = app or create_app()
    celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery 
Example #26
Source File: celery_test_utils.py    From celery-exporter with MIT License 5 votes vote down vote up
def get_celery_app():
    return celery.Celery(broker="memory://", backend="cache+memory://") 
Example #27
Source File: test_copr_build.py    From packit-service with MIT License 5 votes vote down vote up
def test_copr_build_success_gitlab(gitlab_mr_event):
    # status is set for each build-target (4x):
    #  - Building SRPM ...
    #  - Starting RPM build...
    helper = build_helper(event=gitlab_mr_event)
    flexmock(GitProject).should_receive("set_commit_status").and_return().times(8)
    flexmock(GitProject).should_receive("get_pr").and_return(flexmock())
    flexmock(SRPMBuildModel).should_receive("create").and_return(
        SRPMBuildModel(success=True)
    )
    flexmock(CoprBuildModel).should_receive("get_or_create").and_return(
        CoprBuildModel(id=1)
    )
    flexmock(MergeRequestGitlabEvent).should_receive("db_trigger").and_return(
        flexmock()
    )

    flexmock(PackitAPI).should_receive("create_srpm").and_return("my.srpm")

    # copr build
    flexmock(CoprHelper).should_receive("create_copr_project_if_not_exists").and_return(
        None
    )
    flexmock(CoprHelper).should_receive("get_copr_client").and_return(
        flexmock(
            config={"copr_url": "https://copr.fedorainfracloud.org/"},
            build_proxy=flexmock()
            .should_receive("create_from_file")
            .and_return(
                flexmock(id=2, projectname="the-project-name", ownername="the-owner")
            )
            .mock(),
        )
    )

    flexmock(Celery).should_receive("send_task").once()
    assert helper.run_copr_build()["success"] 
Example #28
Source File: test_copr_build.py    From packit-service with MIT License 5 votes vote down vote up
def test_copr_build_success(github_pr_event):
    # status is set for each build-target (4x):
    #  - Building SRPM ...
    #  - Starting RPM build...
    helper = build_helper(event=github_pr_event)
    flexmock(GitProject).should_receive("set_commit_status").and_return().times(8)
    flexmock(GitProject).should_receive("get_pr").and_return(flexmock())
    flexmock(SRPMBuildModel).should_receive("create").and_return(
        SRPMBuildModel(success=True)
    )
    flexmock(CoprBuildModel).should_receive("get_or_create").and_return(
        CoprBuildModel(id=1)
    )
    flexmock(PullRequestGithubEvent).should_receive("db_trigger").and_return(flexmock())

    flexmock(PackitAPI).should_receive("create_srpm").and_return("my.srpm")

    # copr build
    flexmock(CoprHelper).should_receive("create_copr_project_if_not_exists").and_return(
        None
    )
    flexmock(CoprHelper).should_receive("get_copr_client").and_return(
        flexmock(
            config={"copr_url": "https://copr.fedorainfracloud.org/"},
            build_proxy=flexmock()
            .should_receive("create_from_file")
            .and_return(
                flexmock(id=2, projectname="the-project-name", ownername="the-owner")
            )
            .mock(),
        )
    )

    flexmock(Celery).should_receive("send_task").once()
    assert helper.run_copr_build()["success"] 
Example #29
Source File: celerizer.py    From packit-service with MIT License 5 votes vote down vote up
def celery_app(self):
        if self._celery_app is None:
            redis_host = getenv("REDIS_SERVICE_HOST", "localhost")
            redis_port = getenv("REDIS_SERVICE_PORT", "6379")
            redis_db = getenv("REDIS_SERVICE_DB", "0")
            redis_url = "redis://{host}:{port}/{db}".format(
                host=redis_host, port=redis_port, db=redis_db
            )
            # https://docs.celeryproject.org/en/stable/userguide/configuration.html#database-url-examples
            postgres_url = f"db+{get_pg_url()}"

            # http://docs.celeryproject.org/en/latest/reference/celery.html#celery.Celery
            self._celery_app = Celery(backend=postgres_url, broker=redis_url)
        return self._celery_app 
Example #30
Source File: celery.py    From figures with MIT License 5 votes vote down vote up
def celery_check(self, msg):
    """Basic system check to check Celery results in devsite

    Returns a value so that we can test Celery results backend configuration
    """
    print('Called devsite.celery.celery.check with message "{}"'.format(msg))
    return '{prefix}:{msg}'.format(prefix=CELERY_CHECK_MSG_PREFIX, msg=msg)