Python celery.group() Examples

The following are 30 code examples of celery.group(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module celery , or try the search function .
Example #1
Source File: signature.py    From osspolice with GNU General Public License v3.0 6 votes vote down vote up
def extract_functionnames_regex(file_path, exported_only=False):
    rproc = "(^|;|};?|public:|private:)"  # group 1: begining of source line
    rproc += "\s*"  # optional whitespace(s)
    rproc += "([\w_][\w\s\d_,<>\[\].&:\*]*)"  # group 2: return type (includes associated class)
    rproc += "\s+"  # mandatory whitespace(s)
    rproc += "(\*|[\*|\w_][\w\d_<>\*\[\]&]*::)?"  # group 3: # group 3: optional class/pointer type
    rproc += "([\w_][\w\d_]*)"  # group 4: function name
    rproc += "\s*"  # optional whitespace(s)
    rproc += "\("  # '(' start of parameters
    rproc += "([\w\s,<>\[\].=&':/*]*)"  # group 4: parameters
    rproc += "\)"  # ')' end of parameters
    rproc += "\s*"  # optional whitespace(s)
    rproc += "([\w\s\d_]*)"  # group 5: optional attribute
    rproc += "\s*"  # optional whitespace(s)
    rproc += "{"  # '{' function start

    p = re.compile(rproc)
    exclude = ['if', 'while', 'do', 'for', 'switch']
    for x in p.finditer(loadtxt(file_path)):
        if x.group(4) in exclude or (exported_only and 'static' in x.group(2)):
            continue
        yield x.group(4) 
Example #2
Source File: middleware.py    From aerial_wildlife_detection with MIT License 6 votes vote down vote up
def _get_inference_job_signature(self, imageIDs, maxNumWorkers=-1):
        '''
            Assembles (but does not submit) an inference job based on the provided parameters.
        '''
        # setup
        if maxNumWorkers != 1:
            # only query the number of available workers if more than one is specified to save time
            num_available = self._get_num_available_workers()
            if maxNumWorkers == -1:
                maxNumWorkers = num_available   #TODO: more than one process per worker?
            else:
                maxNumWorkers = min(maxNumWorkers, num_available)

        # distribute across workers
        images_subset = array_split(imageIDs, max(1, len(imageIDs) // maxNumWorkers))
        jobs = []
        for subset in images_subset:
            job = celery_interface.call_inference.si(imageIDs=subset)
            jobs.append(job)

        jobGroup = group(jobs)
        return jobGroup 
Example #3
Source File: tasks.py    From micromasters with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def authorize_exam_runs():
    """
    Check for outstanding exam runs
    """
    for exam_run in ExamRun.objects.filter(
            authorized=False,
            date_first_schedulable__lte=now_in_utc(),
    ):
        enrollment_ids_qset = ProgramEnrollment.objects.filter(
            program=exam_run.course.program).values_list('id', flat=True)
        # create a group of subtasks
        job = group(
            authorize_enrollment_for_exam_run.s(enrollment_ids, exam_run.id)
            for enrollment_ids in chunks(enrollment_ids_qset)
        )
        job.apply_async()
        exam_run.authorized = True
        exam_run.save() 
Example #4
Source File: intrinsic_results_sweep_thresh.py    From opensurfaces with MIT License 6 votes vote down vote up
def compute_threshold(self, thresh):
        # split task into 1024 chunks (too many subtasks causes a backlog
        # error)
        chunksize = len(self.decomp_ids) / 1024
        print 'Queuing %s items (chunksize %s) for threshold %s...' % (
            len(self.decomp_ids), chunksize, thresh)

        job = group([
            evaluate_decompositions_task.subtask(kwargs={
                'decomposition_ids': ids,
                'delete_failed_open': False,
                'thresh': thresh,
            })
            for ids in chunk_list_generator(self.decomp_ids, chunksize)
        ])
        result = job.apply_async()

        print 'Waiting on %s subtasks with chunksize %s...' % (
            len(self.decomp_ids) / chunksize, chunksize)

        result.join() 
Example #5
Source File: tasks.py    From uspto-opendata-python with MIT License 6 votes vote down vote up
def run(self, query, options=None):
        """
        https://celery.readthedocs.io/en/latest/userguide/canvas.html#groups
        """

        logger.info('Starting download with query=%s, options=%s', query, options)

        # http://docs.celeryproject.org/en/latest/userguide/calling.html

        if isinstance(query, str):
            query = {'number': query}

        if isinstance(query, dict):
            self.task = self.task_function.delay(query, options)

        elif isinstance(query, list):
            tasks = [self.task_function.s(query, options) for query in query]
            task_group = celery.group(tasks)
            self.task = task_group.delay()

        else:
            raise TypeError('Unknown type for query {}. type={}'.format(query, type(query)))

        return self.task 
Example #6
Source File: intrinsic_results_sweep_thresh.py    From opensurfaces with MIT License 6 votes vote down vote up
def compute_threshold(self, thresh):
        # split task into 1024 chunks (too many subtasks causes a backlog
        # error)
        chunksize = len(self.decomp_ids) / 1024
        print 'Queuing %s items (chunksize %s) for threshold %s...' % (
            len(self.decomp_ids), chunksize, thresh)

        job = group([
            evaluate_decompositions_task.subtask(kwargs={
                'decomposition_ids': ids,
                'delete_failed_open': False,
                'thresh': thresh,
            })
            for ids in chunk_list_generator(self.decomp_ids, chunksize)
        ])
        result = job.apply_async()

        print 'Waiting on %s subtasks with chunksize %s...' % (
            len(self.decomp_ids) / chunksize, chunksize)

        result.join() 
Example #7
Source File: celery_mandelbrot.py    From Software-Architecture-with-Python with MIT License 6 votes vote down vote up
def mandelbrot_main(w, h, max_iterations=1000, output='mandelbrot_celery.png'):
    """ Main function for mandelbrot program with celery """
    
    job = group([mandelbrot_calc_row.s(y, w, h, max_iterations) for y in range(h)])
    result = job.apply_async()

    image = Image.new('RGB', (w, h))
    
    for image_rows in result.join():
        for k,v in image_rows.items():
            k = int(k)
            v = tuple(map(int, v))
            x,y = k % args.width, k // args.width
            image.putpixel((x,y), v)
            
    image.save(output, 'PNG')
    print('Saved to',output) 
Example #8
Source File: tasks.py    From toptal-blog-celery-toy-ex with MIT License 6 votes vote down vote up
def build_report_task(results, ref_date):
    all_repos = []
    for repos in results:
        all_repos += [Repository(repo) for repo in repos]

    # 3. group by language
    grouped_repos = {}
    for repo in all_repos:
        if repo.language in grouped_repos:
            grouped_repos[repo.language].append(repo.name)
        else:
            grouped_repos[repo.language] = [repo.name]

    # 4. create csv
    lines = []
    for lang in sorted(grouped_repos.keys()):
        lines.append([lang] + grouped_repos[lang])

    filename = '{media}/github-hot-repos-{date}.csv'.format(media=settings.MEDIA_ROOT, date=ref_date)
    return make_csv(filename, lines) 
Example #9
Source File: call.py    From vk_friends with GNU General Public License v3.0 6 votes vote down vote up
def getDeep():
	result = {}
	for i in range(deep):
		if result:
			# те айди, которых нет в ключах + не берем id:None
			lst = list(set([item for sublist in result.values() if sublist for item in sublist]) - set(result.keys()))
			d_friends = group(deep_friends.s(i) for i in parts(list(lst), 75))().get()
			result = {k: v for d in d_friends for k, v in d.items()}
			result.update(result)
		else:
			all_friends = friends(my_id)
			d_friends = group(deep_friends.s(i) for i in parts(list(all_friends[0].keys()), 75) )().get()
			result = {k: v for d in d_friends for k, v in d.items()}
			result.update(result)

	return cleaner(result) 
Example #10
Source File: virtualhost.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def fingerprint_virtual_hosts(
        self,
        org_uuid=None,
        network_service_uuid=None,
        network_service_scan_uuid=None,
        use_ssl=None,
        order_uuid=None,
):
    """
    Perform fingerprinting for virtual hosts for the given network service.
    :param org_uuid: The UUID of the organization to perform fingerprinting on behalf of.
    :param network_service_uuid: The UUID of the network service to fingerprint.
    :param network_service_scan_uuid: The UUID of the network service scan that this fingerprinting is a part
    of.
    :param use_ssl: Whether or not to use SSL to connect to the remote endpoint.
    :return: None
    """
    logger.info(
        "Now starting to fingerprint virtual hosts for service %s. Organization is %s."
        % (network_service_uuid, org_uuid)
    )
    domain_names = get_all_domains_for_organization(org_uuid=org_uuid, db_session=self.db_session)
    task_sigs = []
    for domain_name in domain_names:
        task_sigs.append(fingerprint_virtual_host.si(
            org_uuid=org_uuid,
            network_service_uuid=network_service_uuid,
            network_service_scan_uuid=network_service_scan_uuid,
            use_ssl=use_ssl,
            hostname=domain_name,
        ))
    logger.info(
        "Now kicking off a total of %s tasks to fingerprint service %s."
        % (len(task_sigs), network_service_uuid)
    )
    canvas_sig = group(task_sigs)
    self.finish_after(signature=canvas_sig)


#USED 
Example #11
Source File: imaging.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def screenshot_web_service(
        self,
        web_service_uuid=None,
        org_uuid=None,
        web_service_scan_uuid=None,
        order_uuid=None,
):
    """
    Take screenshots of all the relevant endpoints for the given web service.
    :param web_service_uuid: The UUID of the web service to take screenshots for.
    :param org_uuid: The UUID of the organization that owns the web service.
    :param web_service_scan_uuid: The UUID of the scan that this screenshotting is being done in.
    :return: None
    """
    logger.info(
        "Now taking screenshots of all relevant endpoints for web service %s. Organization is %s, scan is %s."
        % (web_service_uuid, org_uuid, web_service_scan_uuid)
    )
    url_paths = get_url_paths_to_screenshot(
        service_uuid=web_service_uuid,
        db_session=self.db_session,
        scan_uuid=web_service_scan_uuid,
    )
    logger.info(
        "A total of %s URL paths remain to be screenshotted for web service %s."
        % (len(url_paths), web_service_uuid)
    )
    task_sigs = []
    for url_path in url_paths:
        task_sigs.append(screenshot_web_service_url.si(
            web_service_uuid=web_service_uuid,
            org_uuid=org_uuid,
            web_service_scan_uuid=web_service_scan_uuid,
            url_path=url_path,
            order_uuid=order_uuid,
        ))
    canvas_sig = group(task_sigs)
    self.finish_after(signature=canvas_sig)


#USED 
Example #12
Source File: ssl.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def redo_ssl_support_inspection_for_organization(self, org_uuid=None):
    """
    Perform SSL support inspection for all of the network services associated with the given organization
    again.
    :param org_uuid: The UUID of the organization to re-do SSL support inspection for.
    :return: None
    """
    logger.info(
        "Now redo'ing SSL support inspection for organization %s."
        % (org_uuid,)
    )
    network_service_scan_uuids = get_latest_network_service_scan_uuids_for_organization(
        org_uuid=org_uuid,
        db_session=self.db_session,
    )
    task_sigs = []
    for network_service_scan_uuid in network_service_scan_uuids:
        task_sigs.append(redo_ssl_support_inspection_for_network_service_scan.si(
            network_service_scan_uuid=network_service_scan_uuid,
        ))
    canvas_sig = group(task_sigs)
    logger.info(
        "Now kicking off %s tasks to redo SSL inspection for organization %s."
        % (len(task_sigs), org_uuid)
    )
    self.finish_after(signature=canvas_sig) 
Example #13
Source File: ssl.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def enumerate_vulnerabilities_for_ssl_service(
        self,
        org_uuid=None,
        network_service_uuid=None,
        network_service_scan_uuid=None,
        order_uuid=None,
):
    """
    Enumerate all of the SSL-based vulnerabilities for the given SSL/TLS service.
    :param org_uuid: The UUID of the organization to enumerate SSL vulnerabilities on behalf of.
    :param network_service_uuid: The UUID of the network service that is being scanned.
    :param network_service_scan_uuid: The UUID of the network service scan that this enumeration is
    a part of.
    :return: None
    """
    logger.info(
        "Now enumerating SSL vulnerabilities for network service %s."
        % (network_service_uuid,)
    )
    task_sigs = []
    command_map = get_ssl_vulnerabilities_command_map()
    for command_name in command_map.keys():
        task_sigs.append(test_ssl_service_for_ssl_vulnerability.si(
            org_uuid=org_uuid,
            network_service_uuid=network_service_uuid,
            network_service_scan_uuid=network_service_scan_uuid,
            vulnerability_name=command_name,
            order_uuid=order_uuid,
        ))
    canvas_sig = group(task_sigs)
    logger.info(
        "Now kicking off %s tasks to inspect network service %s for SSL vulnerabilities."
        % (len(task_sigs), network_service_uuid)
    )
    self.finish_after(signature=canvas_sig)


#USED 
Example #14
Source File: prfetcher.py    From gm_pr with Apache License 2.0 5 votes vote down vote up
def get_prs(self):
        """
        fetch the prs from github

        return a list of { 'name' : repo_name, 'pr_list' : pr_list }
        pr_list is a list of PullRequest
        """
        # Parallelisation strategy: get_fragments_for_repo will extract every urls
        # needed for every open PR on a repo. Those urls are stored in a GithubFragmentUrl
        # Each GithubFragmentUrl is distributed to a celery worker to do the HTTP request
        # and retrieve the data (GithubFragment).
        # Then we merge (addfragment) all the GithubFragment from the same PR (same prid)
        # and parse the result. The merge and parsing is done by django
        res = group((get_fragments_for_repo.s(repo.name, self.__url, self.__org,
                                              self.__current_user) | \
                     dmap.s(fetch_githubfragmenturl.s()))
                    for repo in self.__repos)()

        githubpr_by_id = {}
        for groupres in res.get():
            for fragment in groupres.get():
                prid = fragment.prid
                if prid not in githubpr_by_id:
                    githubpr_by_id[prid] = GithubPr(prid)
                githubpr_by_id[prid].addfragment(fragment)

        prlist = [githubPr.parsepr(self.__current_user) for githubPr in githubpr_by_id.values()]

        repo_pr = {}
        for pullrequest in prlist:
            if pullrequest.repo not in repo_pr:
                repo_pr[pullrequest.repo] = []
            repo_pr[pullrequest.repo].append(pullrequest)

        return repo_pr 
Example #15
Source File: network.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def zmap_scan_order(self, order_uuid=None):
    """
    Perform Zmap scans for all necessary ports for the given order.
    :param order_uuid: The UUID of the order to scan.
    :return: None
    """
    port_tuples = get_ports_to_scan_for_scan_config(
        config_uuid=self.scan_config.uuid,
        db_session=self.db_session,
    )
    logger.info(
        "Now scanning order %s for %s total ports."
        % (order_uuid, len(port_tuples))
    )
    task_signatures = []
    scan_signatures = []
    network_scan = create_network_scan_for_organization(
        db_session=self.db_session,
        org_uuid=self.org_uuid,
    )
    self.commit_session()
    for port, protocol in port_tuples:
        scan_signatures.append(zmap_scan_order_for_port.si(
            port=port,
            protocol=protocol,
            order_uuid=order_uuid,
            network_scan_uuid=network_scan.uuid,
        ))
    task_signatures.append(group(scan_signatures))
    task_signatures.append(update_zmap_scan_completed.si(
        scan_uuid=network_scan.uuid,
        org_uuid=self.org_uuid,
        order_uuid=order_uuid,
    ))
    logger.info("Kicking off Zmap subtasks now.")
    canvas_sig = chain(task_signatures)
    canvas_sig.apply_async()


#USED 
Example #16
Source File: fingerprinting.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def enumerate_user_agent_fingerprints_for_web_service(
        self,
        org_uuid=None,
        web_service_uuid=None,
        web_service_scan_uuid=None,
        order_uuid=None,
):
    """
    Perform fingerprinting for the given web service to determine if different user agents result in different
    responses being returned.
    :param org_uuid: The UUID of the organization to fingerprint the web service on behalf of.
    :param web_service_uuid: The UUID of the web service to gather fingerprints for.
    :param web_service_scan_uuid: The UUID of the web service scan to perform fingerprinting for.
    :return: None
    """
    logger.info(
        "Now enumerating user agent fingerprints for web service scan %s."
        % (web_service_scan_uuid,)
    )
    user_agents_file = UserAgentCsvFileWrapper.from_default_file()
    task_sigs = []
    for user_agent in user_agents_file.user_agents:
        task_sigs.append(get_user_agent_fingerprint_for_web_service.si(
            org_uuid=org_uuid,
            web_service_uuid=web_service_uuid,
            web_service_scan_uuid=web_service_scan_uuid,
            user_agent_type=user_agent.agent_type,
            user_agent_name=user_agent.agent_name,
            user_agent_string=user_agent.user_agent,
            order_uuid=order_uuid,
        ))
    canvas_sig = group(task_sigs)
    self.finish_after(signature=canvas_sig)


#USED 
Example #17
Source File: analysis.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def update_latest_web_service_reports_for_organization(self, org_uuid=None):
    """
    Update all of the web service reports for the given organization based on the current state of the web
    service inspector.
    :param org_uuid: The UUID of the organization to update web service reports for.
    :return: None
    """
    logger.info(
        "Now updating all web service reports for organization %s."
        % (org_uuid,)
    )
    report_ids = get_latest_web_service_report_ids(org_uuid)
    logger.info(
        "Total of %s web service reports found for organization %s."
        % (len(report_ids), org_uuid)
    )
    task_sigs = []
    for report_id in report_ids:
        task_sigs.append(update_web_service_report_for_organization.si(
            doc_id=report_id,
            org_uuid=org_uuid,
            is_latest=True,
        ))
    canvas_sig = group(task_sigs)
    logger.info(
        "Kicking off a total of %s tasks to update web service reports for organization %s."
        % (len(task_sigs), org_uuid)
    )
    self.finish_after(signature=canvas_sig) 
Example #18
Source File: analysis.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def update_latest_ssl_support_reports_for_organization(self, org_uuid=None):
    """
    Update all of the ssl support reports for the given organization based on the current state of the SSL support
    inspector.
    :param org_uuid: The UUID of the organization to update SSL support reports for.
    :return: None
    """
    logger.info(
        "Now updating all of the latest SSL support reports for organization %s."
        % (org_uuid,)
    )
    report_ids = get_latest_ssl_support_report_ids(org_uuid)
    logger.info(
        "Total of %s SSL support reports found for organization %s."
        % (len(report_ids), org_uuid)
    )
    task_sigs = []
    for report_id in report_ids:
        task_sigs.append(update_latest_ssl_support_report_for_organization.si(
            doc_id=report_id,
            org_uuid=org_uuid,
            is_latest=True,
        ))
    canvas_sig = group(task_sigs)
    logger.info(
        "Now kicking off %s tasks to update SSL support reports for organization %s."
        % (len(task_sigs), org_uuid)
    )
    self.finish_after(signature=canvas_sig) 
Example #19
Source File: ip.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def get_historic_dns_data_for_ip_address(
        self,
        org_uuid=None,
        ip_address_uuid=None,
        ip_address_scan_uuid=None,
        order_uuid=None,
):
    """
    Get historic DNS data related to the given IP address.
    :param org_uuid: The UUID of the organization to perform data retrieval on behalf of.
    :param ip_address_uuid: The UUID of the IP address to retrieve data about.
    :param ip_address_scan_uuid: The UUID of the IP address scan to associate retrieved data with.
    :return: None
    """
    logger.info(
        "Now getting historic DNS data for IP address %s."
        % (ip_address_uuid,)
    )
    task_sigs = []
    task_kwargs = {
        "org_uuid": org_uuid,
        "ip_address_uuid": ip_address_uuid,
        "ip_address_scan_uuid": ip_address_scan_uuid,
        "order_uuid": order_uuid,
    }
    task_sigs.append(get_historic_dns_data_for_ip_address_from_dnsdb.si(**task_kwargs))
    if len(task_sigs) > 1:
        collection_sig = group(task_sigs)
    else:
        collection_sig = task_sigs[0]
    self.finish_after(signature=collection_sig)


#USED 
Example #20
Source File: orders.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def handle_placed_order(self, order_uuid=None):
    """
    Handle the placement of the given order.
    :param order_uuid: The UUID of the order that was placed.
    :return: None
    """
    logger.info(
        "Now handling the placement of order %s."
        % (order_uuid,)
    )
    task_sigs = []
    scan_config = self.scan_config
    if scan_config.scan_domain_names:
        domain_count = count_domains_for_order(db_session=self.db_session, order_uuid=order_uuid)
        logger.info(
            "Domain count for order %s is %s."
            % (order_uuid, domain_count)
        )
        if domain_count > 0:
            task_sigs.append(initiate_domain_scans_for_order.si(order_uuid=order_uuid, scan_endpoints=True))
    if scan_config.scan_network_ranges:
        network_count = count_networks_for_order(db_session=self.db_session, order_uuid=order_uuid)
        logger.info(
            "Networks count for order %s is %s."
            % (order_uuid, network_count)
        )
        if network_count > 0:
            task_sigs.append(initiate_network_scans_for_order.si(order_uuid=order_uuid, requeue=False))
    if len(task_sigs) > 0:
        task_sigs.append(handle_order_completion.si(order_uuid=order_uuid))
        canvas_sig = group(task_sigs)
        canvas_sig.apply_async()
        logger.info(
            "All scanning tasks for order %s kicked off successfully."
            % (order_uuid,)
        )
    else:
        logger.warning("No tasks were created as a result of call to handle_placed_order.")


#USED 
Example #21
Source File: orders.py    From ws-backend-community with GNU General Public License v3.0 5 votes vote down vote up
def initiate_domain_scans_for_order(self, order_uuid=None, scan_endpoints=True):
    """
    Initiate all of the domain name scans for the given order.
    :param order_uuid: The UUID of the order to initiate scans for.
    :param scan_endpoints: Whether or not to scan discovered endpoints for network services.
    :return: None
    """
    logger.info(
        "Now initiating all domain name scans for order %s. Scan endpoints is %s."
        % (order_uuid, scan_endpoints)
    )
    domain_uuids = get_monitored_domain_uuids_from_order(db_session=self.db_session, order_uuid=order_uuid)
    logger.info(
        "There are a total of %s domains associated with order %s."
        % (len(domain_uuids), order_uuid)
    )
    task_sigs = []
    org_uuid = get_org_uuid_from_order(order_uuid=order_uuid, db_session=self.db_session)
    for domain_uuid in domain_uuids:
        task_sigs.append(scan_domain_name.si(
            org_uuid=org_uuid,
            domain_uuid=domain_uuid,
            order_uuid=order_uuid,
        ))
    logger.info(
        "Now kicking off %s tasks as a group to scan domains for organization %s."
        % (len(task_sigs), org_uuid)
    )
    canvas_sig = group(task_sigs)
    canvas_sig.apply_async()


#USED 
Example #22
Source File: tasks.py    From micromasters with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def batch_update_user_data():
    """
    Create sub tasks to update user data like enrollments,
    certificates and grades from edX platform.
    """
    expiration = now_in_utc() + timedelta(hours=5)
    lock = Lock(LOCK_ID, expiration)
    if not lock.acquire():
        # Lock should have expired by now
        log.error("Unable to acquire lock for batch_update_user_data")
        return

    users_to_refresh = calculate_users_to_refresh_in_bulk()

    jobs = release_batch_update_user_data_lock.s(token=lock.token.decode())
    try:
        if len(users_to_refresh) > 0:
            user_id_chunks = chunks(users_to_refresh)

            job = group(
                batch_update_user_data_subtasks.s(user_id_chunk, expiration.timestamp())
                for user_id_chunk in user_id_chunks
            )
            jobs = job | jobs
    finally:
        jobs.delay() 
Example #23
Source File: tasks.py    From HELPeR with GNU General Public License v3.0 5 votes vote down vote up
def dmap(iter, callback):
    # Map a callback over an iterator and return as a group
    callback = subtask(callback)
    return group(callback.clone([arg,]) for arg in iter)() 
Example #24
Source File: test_lib_celery.py    From build-relengapi with Mozilla Public License 2.0 5 votes vote down vote up
def test_group(app):
    with running_worker(app):
        with app.app_context():
            task_group = group(test_task.s(i, i) for i in xrange(10))
            eq_(task_group.delay().get(interval=0.01),
                [0, 2, 4, 6, 8, 10, 12, 14, 16, 18]) 
Example #25
Source File: test_lib_celery.py    From build-relengapi with Mozilla Public License 2.0 5 votes vote down vote up
def test_group_in_chain_json(app):
    """ This test protects against an issue with nested chains and groups when encoding with json.
        the error appears as 'EncodeError: keys must be a string' as described
        in https://github.com/celery/celery/issues/2033.
        Fixed by installing simplejson.
    """
    with running_worker(app):
        with app.app_context():
            task_group = group(task_json.s(i) for i in xrange(10))
            task_chain = chain(task_json.s(1, 2), task_json.s(4), task_group)
            eq_(task_chain.delay().get(interval=0.01),
                [7, 8, 9, 10, 11, 12, 13, 14, 15, 16]) 
Example #26
Source File: tasks.py    From toptal-blog-celery-toy-ex with MIT License 5 votes vote down vote up
def produce_hot_repo_report(period, ref_date=None):
    # 1. parse date
    ref_date_str = strf_date(period, ref_date=ref_date)

    # 2. fetch and join
    fetch_jobs = group([
        fetch_hot_repos.s(ref_date_str, 100, 1),
        fetch_hot_repos.s(ref_date_str, 100, 2),
        fetch_hot_repos.s(ref_date_str, 100, 3),
        fetch_hot_repos.s(ref_date_str, 100, 4),
        fetch_hot_repos.s(ref_date_str, 100, 5)
    ])
    # 3. group by language and
    # 4. create csv
    return chord(fetch_jobs)(build_report_task.s(ref_date_str)).get() 
Example #27
Source File: tasks.py    From videoSpider with MIT License 5 votes vote down vote up
def get_task_group_by_id(ids, task, **kwargs):
    id_size = len(ids)
    if 'group_size' in kwargs:
        group_size = kwargs.pop('group_size')
    else:
        group_size = 20
    kwargs['pool_number'] = group_size
    subtasks = [
        task.s(
            ids[x: x+group_size],
            **kwargs
        ) for x in range(0, id_size, group_size)
    ]

    return group(subtasks) 
Example #28
Source File: prfetcher.py    From gm_pr with Apache License 2.0 5 votes vote down vote up
def dmap(it, callback):
    # http://stackoverflow.com/questions/13271056/how-to-chain-a-celery-task-that-returns-a-list-into-a-group
    # Map a callback over an iterator and return as a group
    callback = subtask(callback)
    return group(callback.clone((arg,)) for arg in it)() 
Example #29
Source File: signature.py    From osspolice with GNU General Public License v3.0 5 votes vote down vote up
def extract_strings_regex(file_path):
    """
    Function to extract strings from a C source file.
    """
    # get all strings in this repo
    p = re.compile(
        r'(?P<prefix>(?:\bu8|\b[LuU])?)(?:"(?P<dbl>[^"\\]*(?:\\.[^"\\]*)*)"|\'(?P<sngl>[^\'\\]*(?:\\.[^\'\\]*)*)\')|R"([^"(]*)\((?P<raw>.*?)\)\4"')

    with open(file_path) as ins:
        for line in ins:
            line = line.rstrip('\n')

            # filter out "include *.c|cpp|cc|h"
            if re.search(r'\s*#\s*include\s*(?:<([^>]*)>|"([^"]*)")', line):
                continue

            # filter comments
            if re.search('//.*?\n|/\*.*?\*/|^\s*\*\s.*$', line):
                continue

            # iterate over them
            for x in p.finditer(line):
                if x.group("dbl"):
                    line = x.group("dbl")
                elif x.group("sngl"):
                    continue
                else:
                    line = x.group("raw")
                yield line 
Example #30
Source File: __init__.py    From data_integration_celery with GNU General Public License v3.0 5 votes vote down vote up
def grouped_task_daily():
    """only for test use"""
    group([
        # wind_daily_task,
        # ifind_daily_task,
        tushare_daily_task,
        # cmc_daily_task,
        jq_daily_task,
        jq_finance_task,
    ]
    ).delay()