Python queue.Queue() Examples

The following are 30 code examples of queue.Queue(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module queue , or try the search function .
Example #1
Source File: osdriver.py    From multibootusb with GNU General Public License v2.0 7 votes vote down vote up
def dd_iso_image(self, input_, output, gui_update, status_update):
        ''' Implementation for OS that use dd to write the iso image. 
        '''
        in_file_size = os.path.getsize(input_)
        cmd = [self.dd_exe, 'if=' + input_,
               'of=' + self.physical_disk(output), 'bs=1M']
        self.dd_iso_image_add_args(cmd, input_, output)
        kw_args = {
            'stdout' : subprocess.PIPE,
            'stderr' : subprocess.PIPE,
            'shell'  : False,
            }
        self.add_dd_iso_image_popen_args(kw_args)
        self.dd_iso_image_prepare(input, output, status_update)
        log('Executing => ' + str(cmd))
        dd_process = subprocess.Popen(cmd, **kw_args)
        output_q = queue.Queue()
        while dd_process.poll() is None:
            self.dd_iso_image_readoutput(dd_process, gui_update, in_file_size,
                                         output_q)
        output_lines = [output_q.get() for i in range(output_q.qsize())]
        for l in output_lines:
            log('dd: ' + l)
        return self.dd_iso_image_interpret_result(
            dd_process.returncode, output_lines) 
Example #2
Source File: itn_helpers.py    From django-payfast with MIT License 6 votes vote down vote up
def itn_handler(host, port):  # type: (str, int) -> Iterator[Queue]
    """
    Usage::

        with itn_handler(ITN_HOST, ITN_PORT) as itn_queue:
            # ...complete PayFast payment...
            itn_data = itn_queue.get(timeout=2)
    """
    server_address = (host, port)
    http_server = HTTPServer(server_address, PayFastITNHandler)
    http_server.itn_queue = Queue()  # type: ignore

    executor = ThreadPoolExecutor(max_workers=1)
    executor.submit(http_server.serve_forever)
    try:
        yield http_server.itn_queue  # type: ignore
    finally:
        http_server.shutdown() 
Example #3
Source File: timer_queue.py    From misp42splunk with GNU Lesser General Public License v3.0 6 votes vote down vote up
def _check_and_execute(self):
        wakeup_queue = self._wakeup_queue
        while 1:
            (next_expired_time, expired_timers) = self._get_expired_timers()
            for timer in expired_timers:
                try:
                    # Note, please make timer callback effective/short
                    timer()
                except Exception:
                    logging.error(traceback.format_exc())

            self._reset_timers(expired_timers)

            sleep_time = _calc_sleep_time(next_expired_time)
            try:
                wakeup = wakeup_queue.get(timeout=sleep_time)
                if wakeup is TEARDOWN_SENTINEL:
                    break
            except Queue.Empty:
                pass
        logging.info('TimerQueue stopped.') 
Example #4
Source File: timer_queue.py    From misp42splunk with GNU Lesser General Public License v3.0 6 votes vote down vote up
def _check_and_execute(self):
        wakeup_queue = self._wakeup_queue
        while 1:
            (next_expired_time, expired_timers) = self._get_expired_timers()
            for timer in expired_timers:
                try:
                    # Note, please make timer callback effective/short
                    timer()
                except Exception:
                    logging.error(traceback.format_exc())

            self._reset_timers(expired_timers)

            sleep_time = _calc_sleep_time(next_expired_time)
            try:
                wakeup = wakeup_queue.get(timeout=sleep_time)
                if wakeup is TEARDOWN_SENTINEL:
                    break
            except Queue.Empty:
                pass
        logging.info('TimerQueue stopped.') 
Example #5
Source File: ta_data_loader.py    From misp42splunk with GNU Lesser General Public License v3.0 6 votes vote down vote up
def __init__(self, job_scheduler, event_writer):
        """
        @configs: a list like object containing a list of dict
        like object. Each element shall implement dict.get/[] like interfaces
        to get the value for a key.
        @job_scheduler: schedulering the jobs. shall implement get_ready_jobs
        @event_writer: write_events
        """

        self._settings = self._read_default_settings()
        self._settings["daemonize_thread"] = False
        self._event_writer = event_writer
        self._wakeup_queue = queue.Queue()
        self._scheduler = job_scheduler
        self._timer_queue = tq.TimerQueue()
        self._executor = ce.ConcurrentExecutor(self._settings)
        self._started = False 
Example #6
Source File: camera_node.py    From RacingRobot with MIT License 6 votes vote down vote up
def extractInfo(self):
        try:
            while not self.exit:
                try:
                    frame = self.frame_queue.get(block=True, timeout=1)
                except queue.Empty:
                    print("Queue empty")
                    continue
                try:
                    # Publish new image
                    msg = self.bridge.cv2_to_imgmsg(frame, 'rgb8')
                    if not self.exit:
                        self.image_publisher.publish(msg)
                except CvBridgeError as e:
                    print("Error Converting cv image: {}".format(e.message))
                self.frame_num += 1
        except Exception as e:
            print("Exception after loop: {}".format(e))
            raise 
Example #7
Source File: rl_data.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def __init__(self, batch_size, input_length, nthreads=6, web_viz=False):
        super(RLDataIter, self).__init__()
        self.batch_size = batch_size
        self.input_length = input_length
        self.env = [self.make_env() for _ in range(batch_size)]
        self.act_dim = self.env[0].action_space.n

        self.state_ = None

        self.reset()

        self.provide_data = [mx.io.DataDesc('data', self.state_.shape, np.uint8)]

        self.web_viz = web_viz
        if web_viz:
            self.queue = queue.Queue()
            self.thread = Thread(target=make_web, args=(self.queue,))
            self.thread.daemon = True
            self.thread.start()

        self.nthreads = nthreads
        if nthreads > 1:
            self.pool = multiprocessing.pool.ThreadPool(6) 
Example #8
Source File: 8_prodcons.py    From deep-learning-note with MIT License 6 votes vote down vote up
def main():
    nloops = randint(2, 20)
    q = Queue(32)

    threads = []
    for i in nfuncs:
        t = MyThread(funcs[i], (q, nloops), funcs[i].__name__)
        threads.append(t)

    for i in nfuncs:
        threads[i].start()

    for i in nfuncs:
        threads[i].join()

    print("all DONE") 
Example #9
Source File: ta_data_loader.py    From misp42splunk with GNU Lesser General Public License v3.0 6 votes vote down vote up
def __init__(self, job_scheduler, event_writer):
        """
        @configs: a list like object containing a list of dict
        like object. Each element shall implement dict.get/[] like interfaces
        to get the value for a key.
        @job_scheduler: schedulering the jobs. shall implement get_ready_jobs
        @event_writer: write_events
        """

        self._settings = self._read_default_settings()
        self._settings["daemonize_thread"] = False
        self._event_writer = event_writer
        self._wakeup_queue = queue.Queue()
        self._scheduler = job_scheduler
        self._timer_queue = tq.TimerQueue()
        self._executor = ce.ConcurrentExecutor(self._settings)
        self._started = False 
Example #10
Source File: webCrawler.py    From Learning-Concurrency-in-Python with MIT License 6 votes vote down vote up
def main():
  print("Starting our Web Crawler")
  baseUrl = input("Website > ")
  numberOfThreads = input("No Threads > ")

  linksToCrawl = queue.Queue()
  urlLock = threading.Lock()
  linksToCrawl.put(baseUrl)
  haveVisited = []
  crawlers = []
  errorLinks = []

  for i in range(int(numberOfThreads)):
    crawler = Crawler(baseUrl, linksToCrawl, haveVisited, errorLinks, urlLock)
    crawler.start()
    crawlers.append(crawler)

  for crawler in crawlers:
    crawler.join()

  print("Total Number of Pages Visited {}".format(len(haveVisited)))
  print("Total Number of Pages with Errors {}".format(len(errorLinks))) 
Example #11
Source File: process.py    From plugin.video.kmediatorrent with GNU General Public License v3.0 5 votes vote down vote up
def _process_worker(call_queue, result_queue):
    """Evaluates calls from call_queue and places the results in result_queue.

    This worker is run in a separate process.

    Args:
        call_queue: A multiprocessing.Queue of _CallItems that will be read and
            evaluated by the worker.
        result_queue: A multiprocessing.Queue of _ResultItems that will written
            to by the worker.
        shutdown: A multiprocessing.Event that will be set as a signal to the
            worker that it should exit when call_queue is empty.
    """
    while True:
        call_item = call_queue.get(block=True)
        if call_item is None:
            # Wake up queue management thread
            result_queue.put(None)
            return
        try:
            r = call_item.fn(*call_item.args, **call_item.kwargs)
        except BaseException:
            e = sys.exc_info()[1]
            result_queue.put(_ResultItem(call_item.work_id,
                                         exception=e))
        else:
            result_queue.put(_ResultItem(call_item.work_id,
                                         result=r)) 
Example #12
Source File: timer_queue.py    From misp42splunk with GNU Lesser General Public License v3.0 5 votes vote down vote up
def __init__(self):
        self._timers = TimerQueue.sc.SortedSet()
        self._cancelling_timers = {}
        self._lock = threading.Lock()
        self._wakeup_queue = queue.Queue()
        self._thr = threading.Thread(target=self._check_and_execute)
        self._started = False 
Example #13
Source File: process.py    From plugin.video.kmediatorrent with GNU General Public License v3.0 5 votes vote down vote up
def _add_call_item_to_queue(pending_work_items,
                            work_ids,
                            call_queue):
    """Fills call_queue with _WorkItems from pending_work_items.

    This function never blocks.

    Args:
        pending_work_items: A dict mapping work ids to _WorkItems e.g.
            {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
        work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
            are consumed and the corresponding _WorkItems from
            pending_work_items are transformed into _CallItems and put in
            call_queue.
        call_queue: A multiprocessing.Queue that will be filled with _CallItems
            derived from _WorkItems.
    """
    while True:
        if call_queue.full():
            return
        try:
            work_id = work_ids.get(block=False)
        except queue.Empty:
            return
        else:
            work_item = pending_work_items[work_id]

            if work_item.future.set_running_or_notify_cancel():
                call_queue.put(_CallItem(work_id,
                                         work_item.fn,
                                         work_item.args,
                                         work_item.kwargs),
                               block=True)
            else:
                del pending_work_items[work_id]
                continue 
Example #14
Source File: event_writer.py    From misp42splunk with GNU Lesser General Public License v3.0 5 votes vote down vote up
def __init__(self, process_safe=False):
        if process_safe:
            self._mgr = multiprocessing.Manager()
            self._event_queue = self._mgr.Queue(1000)
        else:
            self._event_queue = queue.Queue(1000)
        self._event_writer = threading.Thread(target=self._do_write_events)
        self._event_writer.daemon = True
        self._started = False
        self._exception = False 
Example #15
Source File: discovery.py    From olympe with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, backend):
        self._backend = backend
        self._thread_loop = self._backend._thread_loop
        self.logger = self._backend.logger
        self._devices = OrderedDict()
        self._device_queue = queue.Queue()

        self.userdata = ctypes.c_void_p()
        self.discovery = None
        self._thread_loop.register_cleanup(self._destroy) 
Example #16
Source File: process.py    From plugin.video.kmediatorrent with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, max_workers=None):
        """Initializes a new ProcessPoolExecutor instance.

        Args:
            max_workers: The maximum number of processes that can be used to
                execute the given calls. If None or not given then as many
                worker processes will be created as the machine has processors.
        """
        _check_system_limits()

        if max_workers is None:
            self._max_workers = multiprocessing.cpu_count()
        else:
            self._max_workers = max_workers

        # Make the call queue slightly larger than the number of processes to
        # prevent the worker processes from idling. But don't make it too big
        # because futures in the call queue cannot be cancelled.
        self._call_queue = multiprocessing.Queue(self._max_workers +
                                                 EXTRA_QUEUED_CALLS)
        self._result_queue = multiprocessing.Queue()
        self._work_ids = queue.Queue()
        self._queue_management_thread = None
        self._processes = set()

        # Shutdown is a two-step process.
        self._shutdown_thread = False
        self._shutdown_lock = threading.Lock()
        self._queue_count = 0
        self._pending_work_items = {} 
Example #17
Source File: discovery.py    From olympe with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def start(self):
        self._devices = OrderedDict()
        self._device_queue = queue.Queue()
        f = self._thread_loop.run_async(self._do_start)
        try:
            return f.result_or_cancel(timeout=_DEFAULT_TIMEOUT)
        except concurrent.futures.TimeoutError:
            self.logger.warning("Discovery start timedout")
            return False 
Example #18
Source File: timer_queue.py    From misp42splunk with GNU Lesser General Public License v3.0 5 votes vote down vote up
def __init__(self):
        self._timers = TimerQueueStruct()
        self._lock = threading.Lock()
        self._wakeup_queue = Queue.Queue()
        self._thr = threading.Thread(target=self._check_and_execute)
        self._thr.daemon = True
        self._started = False 
Example #19
Source File: timer_queue.py    From misp42splunk with GNU Lesser General Public License v3.0 5 votes vote down vote up
def __init__(self):
        self._timers = TimerQueueStruct()
        self._lock = threading.Lock()
        self._wakeup_queue = Queue.Queue()
        self._thr = threading.Thread(target=self._check_and_execute)
        self._thr.daemon = True
        self._started = False 
Example #20
Source File: patch_conn.py    From rlite-py with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def __init__(self, *args, **kwargs):
        super(RliteConnection, self).__init__(*args, **kwargs)
        self.rlite = hirlite.Rlite(path=self.filename)
        self.q = Queue()

        if self.db:
            self.send_command('SELECT', self.db)
            self.read_response() 
Example #21
Source File: auto.py    From dcc with Apache License 2.0 5 votes vote down vote up
def fetcher(self, q):
        """
        This method is called to fetch a new app in order to analyse it. The queue
        must be fill with the following format: (filename, raw)

        must return False if the queue is filled, thus all files are read.

        :param q: the Queue to put new app
        """
        return False 
Example #22
Source File: camera_node.py    From RacingRobot with MIT License 5 votes vote down vote up
def __init__(self, camera, image_publisher):
        super(RGBAnalyser, self).__init__(camera)
        self.frame_num = 0
        self.referenceFrame = None
        self.frame_queue = queue.Queue(maxsize=1)
        self.exit = False
        self.bridge = CvBridge()
        self.image_publisher = image_publisher
        self.thread = None
        self.start() 
Example #23
Source File: picamera_threads.py    From RacingRobot with MIT License 5 votes vote down vote up
def __init__(self, camera, out_queue, debug=False):
        super(RGBAnalyser, self).__init__(camera)
        self.frame_num = 0
        self.frame_queue = queue.Queue(maxsize=1)
        self.exit = False
        self.out_queue = out_queue
        self.debug = debug
        self.thread = None
        self.start() 
Example #24
Source File: main.py    From RacingRobot with MIT License 5 votes vote down vote up
def forceStop(command_queue, n_received_semaphore):
    """
    Stop The car
    :param command_queue: (CustomQueue) Queue for sending orders to the Arduino
    :param n_received_semaphore: (threading.Semaphore) Semaphore to regulate orders sent to the Arduino
    """
    command_queue.clear()
    n_received_semaphore.release()
    n_received_semaphore.release()
    command_queue.put((Order.MOTOR, 0))
    command_queue.put((Order.SERVO, int((THETA_MIN + THETA_MAX) / 2))) 
Example #25
Source File: mavlink.py    From dronekit-python with Apache License 2.0 5 votes vote down vote up
def reset(self):
        self.out_queue = Queue()
        if hasattr(self.master, 'reset'):
            self.master.reset()
        else:
            try:
                self.master.close()
            except:
                pass
            self.master = mavutil.mavlink_connection(self.master.address) 
Example #26
Source File: test.py    From lnd_grpc with MIT License 5 votes vote down vote up
def get_updates(_queue):
    """
    Get all available updates from a queue.Queue() instance and return them as a list
    """
    _list = []
    while not _queue.empty():
        _list.append(_queue.get())
    return _list 
Example #27
Source File: main.py    From kivy-smoothie-host with GNU General Public License v3.0 5 votes vote down vote up
def async_get_display_data(self, *largs):
        ''' fetches data from the Queue and displays it, triggered by incoming data '''
        while not self._q.empty():
            # we need this loop until q is empty as trigger only triggers once per frame
            data = self._q.get(False)
            if data.endswith('\r'):
                self.add_line_to_log(data[0:-1], True)
            else:
                self.add_line_to_log(data) 
Example #28
Source File: main.py    From kivy-smoothie-host with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, **kwargs):
        super(MainWindow, self).__init__(**kwargs)
        self.app = App.get_running_app()
        self._trigger = Clock.create_trigger(self.async_get_display_data)
        self._q = queue.Queue()
        self.config = self.app.config
        self.last_path = self.config.get('General', 'last_gcode_path')
        self.paused = False
        self.last_line = 0

        # print('font size: {}'.format(self.ids.log_window.font_size))
        # Clock.schedule_once(self.my_callback, 2) # hack to overcome the page layout not laying out initially 
Example #29
Source File: threaded.py    From neat-python with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, num_workers, eval_function):
        """
        eval_function should take two arguments (a genome object and the
        configuration) and return a single float (the genome's fitness).
        """
        self.num_workers = num_workers
        self.eval_function = eval_function
        self.workers = []
        self.working = False
        self.inqueue = queue.Queue()
        self.outqueue = queue.Queue()

        if not HAVE_THREADS: # pragma: no cover
            warnings.warn("No threads available; use ParallelEvaluator, not ThreadedEvaluator") 
Example #30
Source File: collocations_app.py    From razzy-spinner with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self):
        self.queue = q.Queue()
        self.model = CollocationsModel(self.queue)
        self.top = Tk()
        self._init_top(self.top)
        self._init_menubar()
        self._init_widgets(self.top)
        self.load_corpus(self.model.DEFAULT_CORPUS)
        self.after = self.top.after(POLL_INTERVAL, self._poll)