Python threading.RLock() Examples

The following are code examples for showing how to use threading.RLock(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: pyblish-win   Author: pyblish   File: _threading_local.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def __new__(cls, *args, **kw):
        self = object.__new__(cls)
        key = '_local__key', 'thread.local.' + str(id(self))
        object.__setattr__(self, '_local__key', key)
        object.__setattr__(self, '_local__args', (args, kw))
        object.__setattr__(self, '_local__lock', RLock())

        if (args or kw) and (cls.__init__ is object.__init__):
            raise TypeError("Initialization arguments are not supported")

        # We need to create the thread dict in anticipation of
        # __init__ being called, to make sure we don't call it
        # again ourselves.
        dict = object.__getattribute__(self, '__dict__')
        current_thread().__dict__[key] = dict

        return self 
Example 2
Project: pyblish-win   Author: pyblish   File: __init__.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def _checkLevel(level):
    if isinstance(level, (int, long)):
        rv = level
    elif str(level) == level:
        if level not in _levelNames:
            raise ValueError("Unknown level: %r" % level)
        rv = _levelNames[level]
    else:
        raise TypeError("Level not an integer or a valid string: %r" % level)
    return rv

#---------------------------------------------------------------------------
#   Thread-related stuff
#---------------------------------------------------------------------------

#
#_lock is used to serialize access to shared data structures in this module.
#This needs to be an RLock because fileConfig() creates and configures
#Handlers, and so might arbitrary user threads. Since Handler code updates the
#shared dictionary _handlers, it needs to acquire the lock. But if configuring,
#the lock would already have been acquired - so we need an RLock.
#The same argument applies to Loggers and Manager.loggerDict.
# 
Example 3
Project: Ansible-Example-AB2018   Author: umit-ozturk   File: locators.py    MIT License 6 votes vote down vote up
def __init__(self, url, timeout=None, num_workers=10, **kwargs):
        """
        Initialise an instance.
        :param url: The root URL to use for scraping.
        :param timeout: The timeout, in seconds, to be applied to requests.
                        This defaults to ``None`` (no timeout specified).
        :param num_workers: The number of worker threads you want to do I/O,
                            This defaults to 10.
        :param kwargs: Passed to the superclass.
        """
        super(SimpleScrapingLocator, self).__init__(**kwargs)
        self.base_url = ensure_slash(url)
        self.timeout = timeout
        self._page_cache = {}
        self._seen = set()
        self._to_fetch = queue.Queue()
        self._bad_hosts = set()
        self.skip_externals = False
        self.num_workers = num_workers
        self._lock = threading.RLock()
        # See issue #45: we need to be resilient when the locator is used
        # in a thread, e.g. with concurrent.futures. We can't use self._lock
        # as it is for coordinating our internal threads - the ones created
        # in _prepare_threads.
        self._gplock = threading.RLock() 
Example 4
Project: flasky   Author: RoseOu   File: locators.py    MIT License 6 votes vote down vote up
def __init__(self, url, timeout=None, num_workers=10, **kwargs):
        """
        Initialise an instance.
        :param url: The root URL to use for scraping.
        :param timeout: The timeout, in seconds, to be applied to requests.
                        This defaults to ``None`` (no timeout specified).
        :param num_workers: The number of worker threads you want to do I/O,
                            This defaults to 10.
        :param kwargs: Passed to the superclass.
        """
        super(SimpleScrapingLocator, self).__init__(**kwargs)
        self.base_url = ensure_slash(url)
        self.timeout = timeout
        self._page_cache = {}
        self._seen = set()
        self._to_fetch = queue.Queue()
        self._bad_hosts = set()
        self.skip_externals = False
        self.num_workers = num_workers
        self._lock = threading.RLock() 
Example 5
Project: myhoard   Author: aiven   File: binlog_scanner.py    Apache License 2.0 6 votes vote down vote up
def __init__(self, *, binlog_prefix, server_id, state_file, stats):
        super().__init__()
        binlogs = []
        lock = threading.RLock()
        self.binlog_prefix = binlog_prefix
        binlog_state_name = state_file.replace(".json", "") + ".binlogs"
        self.binlog_state = AppendOnlyStateManager(entries=binlogs, lock=lock, state_file=binlog_state_name)
        self.binlogs = binlogs
        # Keep track of binlogs we have in the file listing local binlogs; if persisting the binlogs
        # succeeds but persisting other metadata fails we'd end up in bad state without this logic
        self.known_local_indexes = {binlog["local_index"] for binlog in self.binlogs}
        self.lock = lock
        self.log = logging.getLogger(self.__class__.__name__)
        self.state = {
            "last_add": time.time(),
            "last_remove": None,
            "next_index": 1,
            "total_binlog_count": 0,
            "total_binlog_size": 0,
        }
        self.state_manager = StateManager(state=self.state, state_file=state_file)
        self.stats = stats
        self.server_id = server_id 
Example 6
Project: sic   Author: Yanixos   File: locators.py    GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, url, timeout=None, num_workers=10, **kwargs):
        """
        Initialise an instance.
        :param url: The root URL to use for scraping.
        :param timeout: The timeout, in seconds, to be applied to requests.
                        This defaults to ``None`` (no timeout specified).
        :param num_workers: The number of worker threads you want to do I/O,
                            This defaults to 10.
        :param kwargs: Passed to the superclass.
        """
        super(SimpleScrapingLocator, self).__init__(**kwargs)
        self.base_url = ensure_slash(url)
        self.timeout = timeout
        self._page_cache = {}
        self._seen = set()
        self._to_fetch = queue.Queue()
        self._bad_hosts = set()
        self.skip_externals = False
        self.num_workers = num_workers
        self._lock = threading.RLock()
        # See issue #45: we need to be resilient when the locator is used
        # in a thread, e.g. with concurrent.futures. We can't use self._lock
        # as it is for coordinating our internal threads - the ones created
        # in _prepare_threads.
        self._gplock = threading.RLock() 
Example 7
Project: bigquerylayers   Author: smandaric   File: bidi.py    GNU General Public License v3.0 6 votes vote down vote up
def __init__(
        self,
        start_rpc,
        should_recover,
        should_terminate=_never_terminate,
        initial_request=None,
        metadata=None,
        throttle_reopen=False,
    ):
        super(ResumableBidiRpc, self).__init__(start_rpc, initial_request, metadata)
        self._should_recover = should_recover
        self._should_terminate = should_terminate
        self._operational_lock = threading.RLock()
        self._finalized = False
        self._finalize_lock = threading.Lock()

        if throttle_reopen:
            self._reopen_throttle = _Throttle(
                access_limit=5, time_window=datetime.timedelta(seconds=10),
            )
        else:
            self._reopen_throttle = None 
Example 8
Project: ANN   Author: waynezv   File: spider_baidu.py    MIT License 6 votes vote down vote up
def __init__(self):
        Thread.__init__(self)
        self.browser=imitate_browser.BrowserBase()
        self.chance=0
        self.chance1=0
        self.request_queue=Queue()
        self.wait_ana_queue=Queue()
        #self.key_word_queue.put((("动态图", 0, 24)))
        self.count=0
        self.mutex = threading.RLock() #可重入锁,使单线程可以再次获得已经获得的锁
        self.commit_count=0
        self.ID=500
        self.next_proxy_set = set()
        self.dbconn = mdb.connect(DB_HOST, DB_USER, DB_PASS, 'sosogif', charset='utf8')
        self.dbconn.autocommit(False)
        self.dbcurr = self.dbconn.cursor()
        self.dbcurr.execute('SET NAMES utf8') 
Example 9
Project: AshsSDK   Author: thehappydinoa   File: locators.py    MIT License 6 votes vote down vote up
def __init__(self, url, timeout=None, num_workers=10, **kwargs):
        """
        Initialise an instance.
        :param url: The root URL to use for scraping.
        :param timeout: The timeout, in seconds, to be applied to requests.
                        This defaults to ``None`` (no timeout specified).
        :param num_workers: The number of worker threads you want to do I/O,
                            This defaults to 10.
        :param kwargs: Passed to the superclass.
        """
        super(SimpleScrapingLocator, self).__init__(**kwargs)
        self.base_url = ensure_slash(url)
        self.timeout = timeout
        self._page_cache = {}
        self._seen = set()
        self._to_fetch = queue.Queue()
        self._bad_hosts = set()
        self.skip_externals = False
        self.num_workers = num_workers
        self._lock = threading.RLock()
        # See issue #45: we need to be resilient when the locator is used
        # in a thread, e.g. with concurrent.futures. We can't use self._lock
        # as it is for coordinating our internal threads - the ones created
        # in _prepare_threads.
        self._gplock = threading.RLock() 
Example 10
Project: storjnode   Author: StorjOld   File: monitor.py    MIT License 6 votes vote down vote up
def __init__(self, node, limit=20, timeout=600):

        # CRAWLER PIPELINE
        self.pipeline_mutex = RLock()

        # sent info and peer requests but not yet received a response
        self.pipeline_scanning = {}  # {node_id: data}
        # |
        # | received info and peer responses and ready for bandwith test
        # V user ordered dict to have a fifo for bandwith test
        self.pipeline_scanned = OrderedDict()  # {node_id: data}
        # |
        # V only test bandwith of one node at a time to ensure best results
        self.pipeline_bandwith_test = None  # (node_id, data)
        # |
        # V peers processed and ready to save
        self.pipeline_processed = {}  # {node_id: data}

        self.stop_thread = False
        self.node = node
        self.server = self.node.server
        self.timeout = time.time() + timeout
        self.limit = limit 
Example 11
Project: storjnode   Author: StorjOld   File: map.py    MIT License 6 votes vote down vote up
def __init__(self, storjnode, worker_num=32):
        """Network crawler used to map the network.

        Args:
            storjnode: Node used to crawl the network.
            worker_num: Number of workers used to crawl the network.
        """
        # pipeline: toscan -> scanning -> scanned
        self.toscan = {}  # {id: (ip, port)}
        self.scanning = {}  # {id: (ip, port)}
        self.scanned = {}  # {id: {"addr":(ip, port),"peers":[(id, ip, port)]}}

        self.mutex = RLock()
        self.server = storjnode.server
        self.worker_num = worker_num

        # start crawl at self
        self.toscan[storjnode.get_id()] = ("127.0.0.1", storjnode.port) 
Example 12
Project: king-phisher-plugins   Author: securestate   File: directory.py    BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def ftp_acquire(self):
		"""
		Get a thread-specific ftp handle. This handle must not be transferred to
		another thread and it must be closed with a follow up call to
		:py:meth:`.ftp_release` when it is no longer needed.

		:return: A handle to an FTP session.
		"""
		current_tid = threading.current_thread().ident
		if current_tid not in self._thread_local_ftp:
			logger.info("opening a new sftp session for tid 0x{0:x}".format(current_tid))
			ftp = self.ssh.open_sftp()
			ftp.chdir(self.cwd)
			self._thread_local_ftp[current_tid] = ObjectLock(ftp, threading.RLock())
		else:
			logger.debug("leasing an existing sftp session to tid 0x{0:x}".format(current_tid))
		obj_lock = self._thread_local_ftp[current_tid]
		obj_lock.lock.acquire()
		return obj_lock.object 
Example 13
Project: deb-python-cassandra-driver   Author: openstack   File: cluster.py    Apache License 2.0 6 votes vote down vote up
def __init__(self, cluster, timeout,
                 schema_event_refresh_window,
                 topology_event_refresh_window,
                 status_event_refresh_window,
                 schema_meta_enabled=True,
                 token_meta_enabled=True):
        # use a weak reference to allow the Cluster instance to be GC'ed (and
        # shutdown) since implementing __del__ disables the cycle detector
        self._cluster = weakref.proxy(cluster)
        self._connection = None
        self._timeout = timeout

        self._schema_event_refresh_window = schema_event_refresh_window
        self._topology_event_refresh_window = topology_event_refresh_window
        self._status_event_refresh_window = status_event_refresh_window
        self._schema_meta_enabled = schema_meta_enabled
        self._token_meta_enabled = token_meta_enabled

        self._lock = RLock()
        self._schema_agreement_lock = Lock()

        self._reconnection_handler = None
        self._reconnection_lock = RLock()

        self._event_schedule_times = {} 
Example 14
Project: deb-python-cassandra-driver   Author: openstack   File: pool.py    Apache License 2.0 6 votes vote down vote up
def __init__(self, host, host_distance, session):
        self.host = host
        self.host_distance = host_distance

        self._session = weakref.proxy(session)
        self._lock = RLock()
        self._conn_available_condition = Condition()

        log.debug("Initializing new connection pool for host %s", self.host)
        core_conns = session.cluster.get_core_connections_per_host(host_distance)
        self._connections = [session.cluster.connection_factory(host.address)
                             for i in range(core_conns)]

        self._keyspace = session.keyspace
        if self._keyspace:
            for conn in self._connections:
                conn.set_keyspace_blocking(self._keyspace)

        self._trash = set()
        self._next_trash_allowed_at = time.time()
        self.open_count = core_conns
        log.debug("Finished initializing new connection pool for host %s", self.host) 
Example 15
Project: pyblish-win   Author: pyblish   File: cookielib.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def __init__(self, policy=None):
        if policy is None:
            policy = DefaultCookiePolicy()
        self._policy = policy

        self._cookies_lock = _threading.RLock()
        self._cookies = {} 
Example 16
Project: pyblish-win   Author: pyblish   File: __init__.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def createLock(self):
        """
        Acquire a thread lock for serializing access to the underlying I/O.
        """
        if thread:
            self.lock = threading.RLock()
        else:
            self.lock = None 
Example 17
Project: pyblish-win   Author: pyblish   File: _common.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def memoize(fun):
    """A simple memoize decorator for functions supporting (hashable)
    positional arguments.
    It also provides a cache_clear() function for clearing the cache:

    >>> @memoize
    ... def foo()
    ...     return 1
    ...
    >>> foo()
    1
    >>> foo.cache_clear()
    >>>
    """
    @functools.wraps(fun)
    def wrapper(*args, **kwargs):
        key = (args, frozenset(sorted(kwargs.items())))
        lock.acquire()
        try:
            try:
                return cache[key]
            except KeyError:
                ret = cache[key] = fun(*args, **kwargs)
        finally:
            lock.release()
        return ret

    def cache_clear():
        """Clear cache."""
        lock.acquire()
        try:
            cache.clear()
        finally:
            lock.release()

    lock = threading.RLock()
    cache = {}
    wrapper.cache_clear = cache_clear
    return wrapper 
Example 18
Project: pyblish-win   Author: pyblish   File: test_contextlib.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def testWithRLock(self):
        lock = threading.RLock()
        self.boilerPlate(lock, lock._is_owned) 
Example 19
Project: pyblish-win   Author: pyblish   File: managers.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def __init__(self, registry, address, authkey, serializer):
        assert isinstance(authkey, bytes)
        self.registry = registry
        self.authkey = AuthenticationString(authkey)
        Listener, Client = listener_client[serializer]

        # do authentication later
        self.listener = Listener(address=address, backlog=16)
        self.address = self.listener.address

        self.id_to_obj = {'0': (None, ())}
        self.id_to_refcount = {}
        self.mutex = threading.RLock()
        self.stop = 0 
Example 20
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: helpers.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, func, name=None, doc=None):
        self.__name__ = name or func.__name__
        self.__module__ = func.__module__
        self.__doc__ = doc or func.__doc__
        self.func = func
        self.lock = RLock() 
Example 21
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: helpers.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, func, name=None, doc=None):
        self.__name__ = name or func.__name__
        self.__module__ = func.__module__
        self.__doc__ = doc or func.__doc__
        self.func = func
        self.lock = RLock() 
Example 22
Project: recognize-text   Author: occrp-attic   File: service.py    MIT License 5 votes vote down vote up
def __init__(self):
        self.lock = RLock()
        self.ocr = OCR() 
Example 23
Project: Learning-Concurrency-in-Python   Author: PacktPublishing   File: rlocks.py    MIT License 5 votes vote down vote up
def __init__(self):
    self.a = 1
    self.b = 2
    self.rlock = threading.RLock() 
Example 24
Project: Learning-Concurrency-in-Python   Author: PacktPublishing   File: rlocks.py    MIT License 5 votes vote down vote up
def modifyA(self):
    with self.rlock:
      print("Modifying A : RLock Acquired: {}".format(self.rlock._is_owned()))
      print("{}".format(self.rlock))
      self.a = self.a + 1
      time.sleep(5) 
Example 25
Project: Learning-Concurrency-in-Python   Author: PacktPublishing   File: rlocks.py    MIT License 5 votes vote down vote up
def modifyB(self):
    with self.rlock:
      print("Modifying B : RLock Acquired: {}".format(self.rlock._is_owned()))
      print("{}".format(self.rlock))
      self.b = self.b - 1
      time.sleep(5) 
Example 26
Project: iSDX   Author: sdn-ixp   File: peer.py    Apache License 2.0 5 votes vote down vote up
def getlock(self, prefix):
        if prefix not in self.prefix_lock:
            self.prefix_lock[prefix] = RLock()
        return self.prefix_lock[prefix] 
Example 27
Project: iSDX   Author: sdn-ixp   File: participant_controller.py    Apache License 2.0 5 votes vote down vote up
def getlock(self, prefixes):
        prefixes.sort()
        hsh = "-".join(prefixes)
        if hsh not in self.prefix_lock:
            #self.logger.debug("First Lock:: "+str(hsh))
            self.prefix_lock[hsh] = RLock()
        #else:
            #self.logger.debug("Repeat :: "+str(hsh))
        return self.prefix_lock[hsh] 
Example 28
Project: Ansible-Example-AB2018   Author: umit-ozturk   File: _collections.py    MIT License 5 votes vote down vote up
def __init__(self, maxsize=10, dispose_func=None):
        self._maxsize = maxsize
        self.dispose_func = dispose_func

        self._container = self.ContainerCls()
        self.lock = RLock() 
Example 29
Project: Ansible-Example-AB2018   Author: umit-ozturk   File: cookies.py    MIT License 5 votes vote down vote up
def __getstate__(self):
        """Unlike a normal CookieJar, this class is pickleable."""
        state = self.__dict__.copy()
        # remove the unpickleable RLock object
        state.pop('_cookies_lock')
        return state 
Example 30
Project: Ansible-Example-AB2018   Author: umit-ozturk   File: cookies.py    MIT License 5 votes vote down vote up
def __setstate__(self, state):
        """Unlike a normal CookieJar, this class is pickleable."""
        self.__dict__.update(state)
        if '_cookies_lock' not in self.__dict__:
            self._cookies_lock = threading.RLock() 
Example 31
Project: flasky   Author: RoseOu   File: helpers.py    MIT License 5 votes vote down vote up
def __init__(self, func, name=None, doc=None):
        self.__name__ = name or func.__name__
        self.__module__ = func.__module__
        self.__doc__ = doc or func.__doc__
        self.func = func
        self.lock = RLock() 
Example 32
Project: flasky   Author: RoseOu   File: _collections.py    MIT License 5 votes vote down vote up
def __init__(self, maxsize=10, dispose_func=None):
        self._maxsize = maxsize
        self.dispose_func = dispose_func

        self._container = self.ContainerCls()
        self.lock = RLock() 
Example 33
Project: flasky   Author: RoseOu   File: cookies.py    MIT License 5 votes vote down vote up
def __getstate__(self):
        """Unlike a normal CookieJar, this class is pickleable."""
        state = self.__dict__.copy()
        # remove the unpickleable RLock object
        state.pop('_cookies_lock')
        return state 
Example 34
Project: flasky   Author: RoseOu   File: cookies.py    MIT License 5 votes vote down vote up
def __setstate__(self, state):
        """Unlike a normal CookieJar, this class is pickleable."""
        self.__dict__.update(state)
        if '_cookies_lock' not in self.__dict__:
            self._cookies_lock = threading.RLock() 
Example 35
Project: flasky   Author: RoseOu   File: gthread.py    MIT License 5 votes vote down vote up
def init_process(self):
        self.tpool = futures.ThreadPoolExecutor(max_workers=self.cfg.threads)
        self.poller = selectors.DefaultSelector()
        self._lock = RLock()
        super(ThreadWorker, self).init_process() 
Example 36
Project: flasky   Author: RoseOu   File: _collections.py    MIT License 5 votes vote down vote up
def __init__(self, maxsize=10, dispose_func=None):
        self._maxsize = maxsize
        self.dispose_func = dispose_func

        self._container = self.ContainerCls()
        self.lock = RLock() 
Example 37
Project: flasky   Author: RoseOu   File: cookies.py    MIT License 5 votes vote down vote up
def __getstate__(self):
        """Unlike a normal CookieJar, this class is pickleable."""
        state = self.__dict__.copy()
        # remove the unpickleable RLock object
        state.pop('_cookies_lock')
        return state 
Example 38
Project: flasky   Author: RoseOu   File: cookies.py    MIT License 5 votes vote down vote up
def __setstate__(self, state):
        """Unlike a normal CookieJar, this class is pickleable."""
        self.__dict__.update(state)
        if '_cookies_lock' not in self.__dict__:
            self._cookies_lock = threading.RLock() 
Example 39
Project: myhoard   Author: aiven   File: append_only_state_manager.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, *, entries, lock=None, state_file):
        self.dead_entry_count = 0
        self.entries = entries
        self.lock = lock or threading.RLock()
        self.log = logging.getLogger(self.__class__.__name__)
        self.max_dead_entry_count = self.MAX_DEAD_ENTRY_COUNT
        self.state_file = state_file
        self.read_state() 
Example 40
Project: myhoard   Author: aiven   File: state_manager.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, *, allow_unknown_keys=False, lock=None, state, state_file):
        self.allow_unknown_keys = allow_unknown_keys
        self.lock = lock or threading.RLock()
        self.state = state
        self.state_file = state_file
        self.read_state() 
Example 41
Project: sic   Author: Yanixos   File: _collections.py    GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, maxsize=10, dispose_func=None):
        self._maxsize = maxsize
        self.dispose_func = dispose_func

        self._container = self.ContainerCls()
        self.lock = RLock() 
Example 42
Project: sic   Author: Yanixos   File: cookies.py    GNU General Public License v3.0 5 votes vote down vote up
def __getstate__(self):
        """Unlike a normal CookieJar, this class is pickleable."""
        state = self.__dict__.copy()
        # remove the unpickleable RLock object
        state.pop('_cookies_lock')
        return state 
Example 43
Project: sic   Author: Yanixos   File: cookies.py    GNU General Public License v3.0 5 votes vote down vote up
def __setstate__(self, state):
        """Unlike a normal CookieJar, this class is pickleable."""
        self.__dict__.update(state)
        if '_cookies_lock' not in self.__dict__:
            self._cookies_lock = threading.RLock() 
Example 44
Project: bigquerylayers   Author: smandaric   File: _collections.py    GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, maxsize=10, dispose_func=None):
        self._maxsize = maxsize
        self.dispose_func = dispose_func

        self._container = self.ContainerCls()
        self.lock = RLock() 
Example 45
Project: bigquerylayers   Author: smandaric   File: cookies.py    GNU General Public License v3.0 5 votes vote down vote up
def __getstate__(self):
        """Unlike a normal CookieJar, this class is pickleable."""
        state = self.__dict__.copy()
        # remove the unpickleable RLock object
        state.pop('_cookies_lock')
        return state 
Example 46
Project: bigquerylayers   Author: smandaric   File: cookies.py    GNU General Public License v3.0 5 votes vote down vote up
def __setstate__(self, state):
        """Unlike a normal CookieJar, this class is pickleable."""
        self.__dict__.update(state)
        if '_cookies_lock' not in self.__dict__:
            self._cookies_lock = threading.RLock() 
Example 47
Project: cloud-profiler-python   Author: GoogleCloudPlatform   File: pythonprofiler.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, period_ms):
    """Constructs the Wall time profiler.

    Args:
      period_ms: An integer specifying the sampling interval in milliseconds.
    """
    self._profile_type = 'wall'
    self._period_sec = float(period_ms) / 1000
    self._traces = collections.defaultdict(int)
    self._in_handler = False
    self._started = False
    self._last_sample_time = None
    self._trace_count = 0
    self._sample_time_lock = threading.RLock() 
Example 48
Project: AshsSDK   Author: thehappydinoa   File: _collections.py    MIT License 5 votes vote down vote up
def __init__(self, maxsize=10, dispose_func=None):
        self._maxsize = maxsize
        self.dispose_func = dispose_func

        self._container = self.ContainerCls()
        self.lock = RLock() 
Example 49
Project: AshsSDK   Author: thehappydinoa   File: cookies.py    MIT License 5 votes vote down vote up
def __getstate__(self):
        """Unlike a normal CookieJar, this class is pickleable."""
        state = self.__dict__.copy()
        # remove the unpickleable RLock object
        state.pop('_cookies_lock')
        return state 
Example 50
Project: AshsSDK   Author: thehappydinoa   File: cookies.py    MIT License 5 votes vote down vote up
def __setstate__(self, state):
        """Unlike a normal CookieJar, this class is pickleable."""
        self.__dict__.update(state)
        if '_cookies_lock' not in self.__dict__:
            self._cookies_lock = threading.RLock()