Python six.moves.queue.get() Examples

The following are 21 code examples of six.moves.queue.get(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module six.moves.queue , or try the search function .
Example #1
Source File: parallel_map.py    From tensorpack with Apache License 2.0 6 votes vote down vote up
def __iter__(self):
        ds_itr = _repeat_iter(self.ds.get_data)
        with self._guard:
            while True:
                dps = []
                for k in range(self.nr_proc):
                    dps.append(copy.copy(next(ds_itr)))
                to_map = [x[self.index] for x in dps]
                res = self._pool.map_async(_pool_map, to_map)

                for index in res.get():
                    if index is None:
                        continue
                    arr = np.reshape(self._shared_mem[index], self.output_shape)
                    dp = dps[index]
                    dp[self.index] = arr.copy()
                    yield dp


# alias 
Example #2
Source File: parallel_map.py    From dataflow with Apache License 2.0 6 votes vote down vote up
def __iter__(self):
        ds_itr = _repeat_iter(self.ds.get_data)
        with self._guard:
            while True:
                dps = []
                for k in range(self.nr_proc):
                    dps.append(copy.copy(next(ds_itr)))
                to_map = [x[self.index] for x in dps]
                res = self._pool.map_async(_pool_map, to_map)

                for index in res.get():
                    if index is None:
                        continue
                    arr = np.reshape(self._shared_mem[index], self.output_shape)
                    dp = dps[index]
                    dp[self.index] = arr.copy()
                    yield dp


# alias 
Example #3
Source File: data_process.py    From 3D-R2N2 with MIT License 6 votes vote down vote up
def test_process():
    from multiprocessing import Queue
    from lib.config import cfg
    from lib.data_io import category_model_id_pair

    cfg.TRAIN.PAD_X = 10
    cfg.TRAIN.PAD_Y = 10

    data_queue = Queue(2)
    category_model_pair = category_model_id_pair(dataset_portion=[0, 0.1])

    data_process = ReconstructionDataProcess(data_queue, category_model_pair)
    data_process.start()
    batch_img, batch_voxel = data_queue.get()

    kill_processes(data_queue, [data_process]) 
Example #4
Source File: piapi.py    From piapi with Apache License 2.0 6 votes vote down vote up
def _request_wrapper(self, queue, url, params, timeout):
        """
        Wrapper to requests used by each thread.

        Parameters
        ----------
        queue : Queue.Queue
            The Queue to write the response from the request in.
        url : str
            The URL to be queried.
        params : dict
            A dictionary of parameters to pass to the request.
        timeout : int
            Timeout to wait for a response to the request.
        """
        response = self.session.get(url, params=params, verify=self.verify, timeout=timeout)
        queue.put(response) 
Example #5
Source File: data_process.py    From 3D-R2N2-PyTorch with MIT License 6 votes vote down vote up
def test_process():
    from multiprocessing import Queue
    from lib.config import cfg
    from lib.data_io import category_model_id_pair

    cfg.TRAIN.PAD_X = 10
    cfg.TRAIN.PAD_Y = 10

    data_queue = Queue(2)
    category_model_pair = category_model_id_pair(dataset_portion=[0, 0.1])

    data_process = ReconstructionDataProcess(data_queue, category_model_pair)
    data_process.start()
    batch_img, batch_voxel = data_queue.get()

    kill_processes(data_queue, [data_process]) 
Example #6
Source File: server.py    From testplan with Apache License 2.0 6 votes vote down vote up
def receive(self, conn_name=(None, None), timeout=30):
        """
        Receive a FIX message from the given connection.

        The connection name defaults to ``(None, None)``. In this case,
        the server will try to find the one and only available connection.
        This will fail if there are more connections available or if the initial
        connection is no longer active.

        :param conn_name: Connection name to receive message from
        :type conn_name: ``tuple`` of ``str`` and ``str``
        :param timeout: timeout in seconds
        :type timeout: ``int``

        :return: Fix message received
        :rtype: ``FixMessage``
        """
        conn_name = self._validate_connection_name(conn_name)
        return self._conndetails_by_name[conn_name].queue.get(True, timeout) 
Example #7
Source File: parallel_map.py    From ADL with MIT License 6 votes vote down vote up
def __iter__(self):
        ds_itr = _repeat_iter(self.ds.get_data)
        with self._guard:
            while True:
                dps = []
                for k in range(self.nr_proc):
                    dps.append(copy.copy(next(ds_itr)))
                to_map = [x[self.index] for x in dps]
                res = self._pool.map_async(_pool_map, to_map)

                for index in res.get():
                    if index is None:
                        continue
                    arr = np.reshape(self._shared_mem[index], self.output_shape)
                    dp = dps[index]
                    dp[self.index] = arr.copy()
                    yield dp


# alias 
Example #8
Source File: parallel_map.py    From petridishnn with MIT License 6 votes vote down vote up
def __iter__(self):
        ds_itr = _repeat_iter(self.ds.get_data)
        with self._guard:
            while True:
                dps = []
                for k in range(self.nr_proc):
                    dps.append(copy.copy(next(ds_itr)))
                to_map = [x[self.index] for x in dps]
                res = self._pool.map_async(_pool_map, to_map)

                for index in res.get():
                    if index is None:
                        continue
                    arr = np.reshape(self._shared_mem[index], self.output_shape)
                    dp = dps[index]
                    dp[self.index] = arr.copy()
                    yield dp 
Example #9
Source File: parallel_map.py    From petridishnn with MIT License 5 votes vote down vote up
def _recv(self):
        return self._out_queue.get() 
Example #10
Source File: parallel_map.py    From tensorpack with Apache License 2.0 5 votes vote down vote up
def __init__(self, ds, nr_proc, map_func, output_shape, output_dtype, index=0):
        """
        Args:
            ds (DataFlow): the dataflow to map on
            nr_proc(int): number of processes
            map_func (data component -> ndarray | None): the mapping function
            output_shape (tuple): the shape of the output of map_func
            output_dtype (np.dtype): the type of the output of map_func
            index (int): the index of the datapoint component to map on.
        """
        self.ds = ds
        self.nr_proc = nr_proc
        self.map_func = map_func
        self.output_shape = output_shape
        self.output_dtype = np.dtype(output_dtype).type
        self.index = index

        self._shared_mem = [self._create_shared_arr() for k in range(nr_proc)]
        id_queue = mp.Queue()
        for k in range(nr_proc):
            id_queue.put(k)

        def _init_pool(arrs, queue, map_func):
            id = queue.get()
            global SHARED_ARR, WORKER_ID, MAP_FUNC
            SHARED_ARR = arrs[id]
            WORKER_ID = id
            MAP_FUNC = map_func

        self._pool = mp.pool.Pool(
            processes=nr_proc,
            initializer=_init_pool,
            initargs=(self._shared_mem, id_queue, map_func)) 
Example #11
Source File: parallel_map.py    From tensorpack with Apache License 2.0 5 votes vote down vote up
def _recv(self):
        return self._out_queue.get() 
Example #12
Source File: data_process.py    From 3D-R2N2-PyTorch with MIT License 5 votes vote down vote up
def kill_processes(queue, processes):
    print('Signal processes')
    for p in processes:
        p.shutdown()

    print('Empty queue')
    while not queue.empty():
        time.sleep(0.5)
        queue.get(False)

    print('kill processes')
    for p in processes:
        p.terminate() 
Example #13
Source File: parallel_map.py    From ADL with MIT License 5 votes vote down vote up
def __init__(self, ds, nr_proc, map_func, output_shape, output_dtype, index=0):
        """
        Args:
            ds (DataFlow): the dataflow to map on
            nr_proc(int): number of processes
            map_func (data component -> ndarray | None): the mapping function
            output_shape (tuple): the shape of the output of map_func
            output_dtype (np.dtype): the type of the output of map_func
            index (int): the index of the datapoint component to map on.
        """
        self.ds = ds
        self.nr_proc = nr_proc
        self.map_func = map_func
        self.output_shape = output_shape
        self.output_dtype = np.dtype(output_dtype).type
        self.index = index

        self._shared_mem = [self._create_shared_arr() for k in range(nr_proc)]
        id_queue = mp.Queue()
        for k in range(nr_proc):
            id_queue.put(k)

        def _init_pool(arrs, queue, map_func):
            id = queue.get()
            global SHARED_ARR, WORKER_ID, MAP_FUNC
            SHARED_ARR = arrs[id]
            WORKER_ID = id
            MAP_FUNC = map_func

        self._pool = mp.pool.Pool(
            processes=nr_proc,
            initializer=_init_pool,
            initargs=(self._shared_mem, id_queue, map_func)) 
Example #14
Source File: parallel_map.py    From ADL with MIT License 5 votes vote down vote up
def _recv(self):
        return self._out_queue.get() 
Example #15
Source File: parallel_map.py    From petridishnn with MIT License 5 votes vote down vote up
def __init__(self, ds, nr_proc, map_func, output_shape, output_dtype, index=0):
        """
        Args:
            ds (DataFlow): the dataflow to map on
            nr_proc(int): number of processes
            map_func (data component -> ndarray | None): the mapping function
            output_shape (tuple): the shape of the output of map_func
            output_dtype (np.dtype): the type of the output of map_func
            index (int): the index of the datapoint component to map on.
        """
        self.ds = ds
        self.nr_proc = nr_proc
        self.map_func = map_func
        self.output_shape = output_shape
        self.output_dtype = np.dtype(output_dtype).type
        self.index = index

        self._shared_mem = [self._create_shared_arr() for k in range(nr_proc)]
        id_queue = mp.Queue()
        for k in range(nr_proc):
            id_queue.put(k)

        def _init_pool(arrs, queue, map_func):
            id = queue.get()
            global SHARED_ARR, WORKER_ID, MAP_FUNC
            SHARED_ARR = arrs[id]
            WORKER_ID = id
            MAP_FUNC = map_func

        self._pool = mp.pool.Pool(
            processes=nr_proc,
            initializer=_init_pool,
            initargs=(self._shared_mem, id_queue, map_func))
        self._guard = DataFlowReentrantGuard() 
Example #16
Source File: parallel_map.py    From dataflow with Apache License 2.0 5 votes vote down vote up
def _recv(self):
        return self._out_queue.get() 
Example #17
Source File: server.py    From testplan with Apache License 2.0 5 votes vote down vote up
def _flush_queue(self, queue):
        """
        Flush the given receive queue.

        :param queue: Queue to flush.
        :type queue: ``queue``
        """
        try:
            while True:
                queue.get(False)
        except queue.Empty:
            return 
Example #18
Source File: piapi.py    From piapi with Apache License 2.0 5 votes vote down vote up
def service_resources(self):
        """
        List of all available service resources, meaning resources that modify the NMS.
        """
        if self._service_resources:
            return list(self._service_resources.keys())

        service_resources_url = six.moves.urllib.parse.urljoin(self.base_url, "op.json")
        response = self.session.get(service_resources_url, verify=self.verify)
        response_json = self._parse(response)
        for entry in response_json["queryResponse"]["operation"]:
            self._service_resources[entry["$"]] = {"method": entry["@httpMethod"], "url": six.moves.urllib.parse.urljoin(self.base_url, "op/%s.json" % entry["@path"])}

        return list(self._service_resources.keys()) 
Example #19
Source File: piapi.py    From piapi with Apache License 2.0 5 votes vote down vote up
def data_resources(self):
        """
        List of all available data resources, meaning resources that return data.
        """
        if self._data_resources:
            return list(self._data_resources.keys())

        data_resources_url = six.moves.urllib.parse.urljoin(self.base_url, "data.json")
        response = self.session.get(data_resources_url, verify=self.verify)
        response_json = self._parse(response)
        for entry in response_json["queryResponse"]["entityType"]:
            self._data_resources[entry["$"]] = "%s.json" % entry["@url"]

        return list(self._data_resources.keys()) 
Example #20
Source File: data_process.py    From 3D-R2N2 with MIT License 5 votes vote down vote up
def kill_processes(queue, processes):
    print('Signal processes')
    for p in processes:
        p.shutdown()

    print('Empty queue')
    while not queue.empty():
        time.sleep(0.5)
        queue.get(False)

    print('kill processes')
    for p in processes:
        p.terminate() 
Example #21
Source File: parallel_map.py    From dataflow with Apache License 2.0 5 votes vote down vote up
def __init__(self, ds, nr_proc, map_func, output_shape, output_dtype, index=0):
        """
        Args:
            ds (DataFlow): the dataflow to map on
            nr_proc(int): number of processes
            map_func (data component -> ndarray | None): the mapping function
            output_shape (tuple): the shape of the output of map_func
            output_dtype (np.dtype): the type of the output of map_func
            index (int): the index of the datapoint component to map on.
        """
        self.ds = ds
        self.nr_proc = nr_proc
        self.map_func = map_func
        self.output_shape = output_shape
        self.output_dtype = np.dtype(output_dtype).type
        self.index = index

        self._shared_mem = [self._create_shared_arr() for k in range(nr_proc)]
        id_queue = mp.Queue()
        for k in range(nr_proc):
            id_queue.put(k)

        def _init_pool(arrs, queue, map_func):
            id = queue.get()
            global SHARED_ARR, WORKER_ID, MAP_FUNC
            SHARED_ARR = arrs[id]
            WORKER_ID = id
            MAP_FUNC = map_func

        self._pool = mp.pool.Pool(
            processes=nr_proc,
            initializer=_init_pool,
            initargs=(self._shared_mem, id_queue, map_func))