Python functools.lru_cache() Examples

The following are 30 code examples for showing how to use functools.lru_cache(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module functools , or try the search function .

Example 1
Project: EEND   Author: hitachi-speech   File: kaldi_data.py    License: MIT License 8 votes vote down vote up
def load_wav(wav_rxfilename, start=0, end=None):
    """ This function reads audio file and return data in numpy.float32 array.
        "lru_cache" holds recently loaded audio so that can be called
        many times on the same audio file.
        OPTIMIZE: controls lru_cache size for random access,
        considering memory size
    """
    if wav_rxfilename.endswith('|'):
        # input piped command
        p = subprocess.Popen(wav_rxfilename[:-1], shell=True,
                             stdout=subprocess.PIPE)
        data, samplerate = sf.read(io.BytesIO(p.stdout.read()),
                                   dtype='float32')
        # cannot seek
        data = data[start:end]
    elif wav_rxfilename == '-':
        # stdin
        data, samplerate = sf.read(sys.stdin, dtype='float32')
        # cannot seek
        data = data[start:end]
    else:
        # normal wav file
        data, samplerate = sf.read(wav_rxfilename, start=start, stop=end)
    return data, samplerate 
Example 2
Project: mars   Author: mars-project   File: distributor.py    License: Apache License 2.0 7 votes vote down vote up
def gen_distributor(scheduler_n_process, worker_n_process):
    class LocalClusterDistributor(Distributor):
        def __init__(self, n_process):
            super().__init__(n_process)
            self._scheduler_distributor = MarsDistributor(scheduler_n_process, 's:h1:')
            self._worker_distributor = MarsDistributor(worker_n_process, 'w:0:')

        @staticmethod
        def _is_worker_uid(uid):
            return isinstance(uid, str) and uid.startswith('w:')

        @functools.lru_cache(100)
        def distribute(self, uid):
            if self._is_worker_uid(uid):
                return self._worker_distributor.distribute(uid) + scheduler_n_process

            return self._scheduler_distributor.distribute(uid)

        def make_same_process(self, uid, uid_rel, delta=0):
            if self._is_worker_uid(uid_rel):
                return self._worker_distributor.make_same_process(uid, uid_rel, delta=delta)
            return self._scheduler_distributor.make_same_process(uid, uid_rel, delta=delta)

    return LocalClusterDistributor(scheduler_n_process + worker_n_process) 
Example 3
Project: sawtooth-core   Author: hyperledger   File: state_view.py    License: Apache License 2.0 6 votes vote down vote up
def lru_cached_method(*lru_args, **lru_kwargs):
    def decorator(wrapped_fn):
        @wraps(wrapped_fn)
        def wrapped(self, *args, **kwargs):
            # Use a weak reference to self; this prevents a self-reference
            # cycle that fools the garbage collector into thinking the instance
            # shouldn't be dropped when all external references are dropped.
            weak_ref_to_self = weakref.ref(self)

            @wraps(wrapped_fn)
            @lru_cache(*lru_args, **lru_kwargs)
            def cached(*args, **kwargs):
                return wrapped_fn(weak_ref_to_self(), *args, **kwargs)
            setattr(self, wrapped_fn.__name__, cached)
            return cached(*args, **kwargs)
        return wrapped
    return decorator 
Example 4
Project: sawtooth-core   Author: hyperledger   File: settings_view.py    License: Apache License 2.0 6 votes vote down vote up
def lru_cached_method(*lru_args, **lru_kwargs):
    def decorator(wrapped_fn):
        @wraps(wrapped_fn)
        def wrapped(self, *args, **kwargs):
            # Use a weak reference to self; this prevents a self-reference
            # cycle that fools the garbage collector into thinking the instance
            # shouldn't be dropped when all external references are dropped.
            weak_ref_to_self = weakref.ref(self)

            @wraps(wrapped_fn)
            @lru_cache(*lru_args, **lru_kwargs)
            def cached(*args, **kwargs):
                return wrapped_fn(weak_ref_to_self(), *args, **kwargs)
            setattr(self, wrapped_fn.__name__, cached)
            return cached(*args, **kwargs)
        return wrapped
    return decorator 
Example 5
Project: pdbpp   Author: pdbpp   File: pdbpp.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def lru_cache(maxsize):
        """Simple cache (with no maxsize basically) for py27 compatibility.

        Given that pdb there uses linecache.getline for each line with
        do_list a cache makes a big differene."""

        def dec(fn, *args):
            cache = {}

            @wraps(fn)
            def wrapper(*args):
                key = args
                try:
                    ret = cache[key]
                except KeyError:
                    ret = cache[key] = fn(*args)
                return ret

            return wrapper

        return dec

# If it contains only _, digits, letters, [] or dots, it's probably side
# effects free. 
Example 6
Project: designate   Author: openstack   File: utils.py    License: Apache License 2.0 6 votes vote down vote up
def cache_result(function):
    """A function decorator to cache the result of the first call, every
    additional call will simply return the cached value.

    If we were python3 only, we would have used functools.lru_cache() in place
    of this. If there's a python2 backport in a lightweight library, then we
    should switch to that.
    """
    # NOTE: We're cheating a little here, by using a mutable type (a list),
    #       we're able to read and update the value from within in inline
    #       wrapper method. If we used an immutable type, the assignment
    #       would not work as we want.
    cache = []

    def wrapper(cls_instance):
        if not cache:
            cache.append(function(cls_instance))
        return cache[0]
    return wrapper 
Example 7
Project: bioforum   Author: reBiocoder   File: trans_real.py    License: MIT License 6 votes vote down vote up
def check_for_language(lang_code):
    """
    Check whether there is a global language file for the given language
    code. This is used to decide whether a user-provided language is
    available.

    lru_cache should have a maxsize to prevent from memory exhaustion attacks,
    as the provided language codes are taken from the HTTP request. See also
    <https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>.
    """
    # First, a quick check to make sure lang_code is well-formed (#21458)
    if lang_code is None or not language_code_re.search(lang_code):
        return False
    for path in all_locale_paths():
        if gettext_module.find('django', path, [to_locale(lang_code)]) is not None:
            return True
    return False 
Example 8
Project: loopchain   Author: icon-project   File: test_lru_cache.py    License: Apache License 2.0 6 votes vote down vote up
def test_cache_clear(self):
        from unittest.mock import MagicMock
        call_check_mock = MagicMock()

        @lru_cache(maxsize=4)
        def target_func():
            call_check_mock()

        target_func()
        self.assertEqual(call_check_mock.call_count, 1)
        target_func()
        self.assertEqual(call_check_mock.call_count, 1)


        # WHEN
        target_func.cache_clear()
        target_func()
        self.assertEqual(call_check_mock.call_count, 2) 
Example 9
Project: QUANTAXIS   Author: QUANTAXIS   File: QADataStruct.py    License: MIT License 6 votes vote down vote up
def tradetime(self):
        """返回交易所日历下的日期

        Returns:
            [type] -- [description]
        """

        try:
            return self.date
        except:
            return None

    # @property
    # @lru_cache()
    # def semiannual(self):
    #     return self.resample('SA') 
Example 10
Project: easse   Author: feralvam   File: lcs.py    License: GNU General Public License v3.0 6 votes vote down vote up
def get_lcs(seq1, seq2):
    '''Returns the longest common subsequence using memoization (only in local scope)'''
    @lru_cache(maxsize=None)
    def recursive_lcs(seq1, seq2):
        if len(seq1) == 0 or len(seq2) == 0:
            return []
        if seq1[-1] == seq2[-1]:
            return recursive_lcs(seq1[:-1], seq2[:-1]) + [seq1[-1]]
        else:
            return max(recursive_lcs(seq1[:-1], seq2), recursive_lcs(seq1, seq2[:-1]), key=lambda seq: len(seq))

    try:
        return recursive_lcs(tuple(seq1), tuple(seq2))
    except RecursionError as e:
        print(e)
        # TODO: Handle this case
        return [] 
Example 11
Project: python-stdlib-list   Author: jackmaney   File: base.py    License: MIT License 6 votes vote down vote up
def in_stdlib(module_name, version=None):
    """
    Return a ``bool`` indicating if module ``module_name`` is in the list of stdlib
    symbols for python version ``version``. If ``version`` is ``None`` (default), the
    version of current python interpreter is used.

    Note that ``True`` will be returned for built-in modules too, since this project
    considers they are part of stdlib. See :issue:21.

    It relies on ``@lru_cache`` to cache the stdlib list and query results for similar
    calls. Therefore it is much more efficient than ``module_name in stdlib_list()``
    especially if you wish to perform multiple checks.

    :param str|None module_name: The module name (as a string) to query for.
    :param str|None version: The version (as a string) whose list of libraries you want
    (one of ``"2.6"``, ``"2.7"``, ``"3.2"``, ``"3.3"``, ``"3.4"``, or ``"3.5"``).
    If not specified, the current version of Python will be used.

    :return: A bool indicating if the given module name is part of standard libraries
    for the specified version of Python.
    :rtype: list
    """
    ref_list = _stdlib_list_with_cache(version=version)
    return module_name in ref_list 
Example 12
Project: fonduer   Author: HazyResearch   File: utils_table.py    License: MIT License 6 votes vote down vote up
def _min_range_diff(coordinates: Tuple[Tuple[int, int]], absolute: bool = True) -> int:
    """Get the minimum range difference.

    # Using Tuple instead of list because list is unhashable with `lru_cache`
    # if absolute=True, return the absolute value of minimum magnitude difference
    # if absolute=False, return the raw value of minimum magnitude difference
    # TODO: move back to efficient implementation once it sees that
    # min_range_diff(3,3,2,3) = 0 return max(0, max(a_end - b_start, b_end -
    # a_start))

    :param coordinates: A tuple of a couple (start, end) indexes of the objects.
    :param absolute: Whether use absolute value, defaults to True.
    :return: The minimum range difference.
    """
    f = lambda x: (abs(x) if absolute else x)
    return min(
        [
            f(min([x - y for x, y in zip(ii[:-1], ii[1:])], key=abs))
            for ii in itertools.product(
                *[range(start, end + 1) for start, end in coordinates]
            )
        ],
        key=abs,
    ) 
Example 13
Project: xalpha   Author: refraction-ray   File: universal.py    License: MIT License 5 votes vote down vote up
def lru_cache_time(ttl=None, maxsize=None):
    """
    TTL support on lru_cache

    :param ttl: float or int, seconds
    :param maxsize: int, maxsize for lru_cache
    :return:
    """

    def wrapper(func):
        # Lazy function that makes sure the lru_cache() invalidate after X secs
        @lru_cache(maxsize)
        def time_aware(_ttl, *args, **kwargs):
            return func(*args, **kwargs)

        setattr(thismodule, func.__name__ + "_ttl", time_aware)

        @wraps(func)
        def newfunc(*args, **kwargs):
            ttl_hash = round(time.time() / ttl)
            f_ttl = getattr(thismodule, func.__name__ + "_ttl")
            return f_ttl(ttl_hash, *args, **kwargs)

        return newfunc

    return wrapper


# TODO: 缓存 token 的合适时间尺度 
Example 14
Project: python-podman   Author: containers   File: __init__.py    License: Apache License 2.0 5 votes vote down vote up
def cached_property(fn):
    """Decorate property to cache return value."""
    return property(functools.lru_cache(maxsize=8)(fn)) 
Example 15
Project: pyGSTi   Author: pyGSTio   File: opttools.py    License: Apache License 2.0 5 votes vote down vote up
def cache_by_hashed_args(obj):
    """ Decorator for caching a function values

    .. deprecated:: v0.9.8.3
       :func:`cache_by_hashed_args` will be removed in pyGSTi
       v0.9.9. Use :func:`functools.lru_cache` instead.
    """
    return lru_cache(maxsize=128)(obj) 
Example 16
Project: pyGSTi   Author: pyGSTio   File: references.py    License: Apache License 2.0 5 votes vote down vote up
def _memo(fn):
    return property(functools.lru_cache(maxsize=1)(fn)) 
Example 17
Project: linter-pylama   Author: AtomLinter   File: style_guide.py    License: MIT License 5 votes vote down vote up
def lru_cache(maxsize=128, typed=False):
        """Stub for missing lru_cache."""
        def fake_decorator(func):
            return func

        return fake_decorator


# TODO(sigmavirus24): Determine if we need to use enum/enum34 
Example 18
Project: linter-pylama   Author: AtomLinter   File: pycodestyle.py    License: MIT License 5 votes vote down vote up
def lru_cache(maxsize=128):  # noqa as it's a fake implementation.
        """Does not really need a real a lru_cache, it's just optimization, so
        let's just do nothing here. Python 3.2+ will just get better
        performances, time to upgrade?
        """
        return lambda function: function 
Example 19
Project: tartiflette   Author: tartiflette   File: engine.py    License: MIT License 5 votes vote down vote up
def __init__(
        self,
        sdl=None,
        schema_name=None,
        error_coercer=None,
        custom_default_resolver=None,
        custom_default_type_resolver=None,
        modules=None,
        query_cache_decorator=UNDEFINED_VALUE,
        json_loader=None,
        custom_default_arguments_coercer=None,
    ) -> None:
        """
        Creates an uncooked Engine instance.
        """
        # pylint: disable=too-many-arguments
        self._schema = None
        self._schema_name = schema_name
        self._error_coercer = error_coercer
        self._custom_default_resolver = custom_default_resolver
        self._custom_default_type_resolver = custom_default_type_resolver
        self._custom_default_arguments_coercer = (
            custom_default_arguments_coercer
        )
        self._modules = modules
        self._query_cache_decorator = (
            query_cache_decorator
            if query_cache_decorator is not UNDEFINED_VALUE
            else lru_cache(maxsize=512)
        )
        self._sdl = sdl
        self._cooked = False
        self._build_response = None
        self._query_executor = None
        self._subscription_executor = None
        self._cached_parse_and_validate_query = None
        self._json_loader = json_loader or default_json_module.loads 
Example 20
Project: filesystem_spec   Author: intake   File: dircache.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(
        self,
        use_listings_cache=True,
        listings_expiry_time=None,
        max_paths=None,
        **kwargs
    ):
        """

        Parameters
        ----------
        use_listings_cache: bool
            If False, this cache never returns items, but always reports KeyError,
            and setting items has no effect
        listings_expiry_time: int (optional)
            Time in seconds that a listing is considered valid. If None,
            listings do not expire.
        max_paths: int (optional)
            The number of most recent listings that are considered valid; 'recent'
            refers to when the entry was set.
        """
        self._cache = {}
        self._times = {}
        if max_paths:
            self._q = lru_cache(max_paths + 1)(lambda key: self._cache.pop(key, None))
        self.use_listings_cache = use_listings_cache
        self.listings_expiry_time = listings_expiry_time
        self.max_paths = max_paths 
Example 21
Project: filesystem_spec   Author: intake   File: caching.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, blocksize, fetcher, size, maxblocks=32):
        super().__init__(blocksize, fetcher, size)
        self.nblocks = math.ceil(size / blocksize)
        self.maxblocks = maxblocks
        self._fetch_block_cached = functools.lru_cache(maxblocks)(self._fetch_block) 
Example 22
Project: filesystem_spec   Author: intake   File: caching.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __setstate__(self, state):
        self.__dict__.update(state)
        self._fetch_block_cached = functools.lru_cache(state["maxblocks"])(
            self._fetch_block
        ) 
Example 23
Project: python-netsurv   Author: sofia-netsurv   File: pycodestyle.py    License: MIT License 5 votes vote down vote up
def lru_cache(maxsize=128):  # noqa as it's a fake implementation.
        """Does not really need a real a lru_cache, it's just
        optimization, so let's just do nothing here. Python 3.2+ will
        just get better performances, time to upgrade?
        """
        return lambda function: function 
Example 24
Project: python-netsurv   Author: sofia-netsurv   File: pycodestyle.py    License: MIT License 5 votes vote down vote up
def lru_cache(maxsize=128):  # noqa as it's a fake implementation.
        """Does not really need a real a lru_cache, it's just
        optimization, so let's just do nothing here. Python 3.2+ will
        just get better performances, time to upgrade?
        """
        return lambda function: function 
Example 25
Project: python-pytest-cases   Author: smarie   File: case_funcs.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def cases_generator(names=None,       # type: Union[str, Callable[[Any], str], Iterable[str]]
                    lru_cache=False,  # type: bool,
                    case_func=DECORATED,
                    **param_ranges    # type: Iterable[Any]
                    ):
    """
    Decorator to declare a case function as being a cases generator. `param_ranges` should be a named list of parameter
    ranges to explore to generate the cases.

    The decorator will use `itertools.product` to create a cartesian product of the named parameter ranges, and create
    a case for each combination. When the case function will be called for a given combination, the corresponding
    parameters will be passed to the decorated function.

    >>> @cases_generator("test with i={i}", i=range(10))
    >>> def case_10_times(i):
    >>>     ''' Generates 10 cases '''
    >>>     ins = dict(a=i, b=i+1)
    >>>     outs = i+1, i+2
    >>>     return ins, outs, None

    :param names: a name template, that will be transformed into the case name using
        `names.format(**params)` for each case, where `params` is the dictionary of parameter values for this
        generated case. Alternately a callable returning a string can be provided, in which case
        `names(**params)` will be used. Finally an explicit list of names can be provided, in which case it should have
        the correct length (an error will be raised otherwise).
    :param lru_cache: a boolean (default False) indicating if the generated cases should be cached. This is identical
        to decorating the function with an additional `@lru_cache(maxsize=n)` where n is the total number of generated
        cases.
    :param param_ranges: named parameters and for each of them the list of values to be used to generate cases. For
        each combination of values (a cartesian product is made) the parameters will be passed to the underlying
        function so they should have names the underlying function can handle.
    :return:
    """
    kwarg_values = list(product(*param_ranges.values()))
    setattr(case_func, _GENERATOR_FIELD, (names, param_ranges.keys(), kwarg_values))
    if lru_cache:
        nb_cases = len(kwarg_values)
        # decorate the function with the appropriate lru cache size
        case_func = lru(maxsize=nb_cases)(case_func)

    return case_func 
Example 26
Project: python-pytest-cases   Author: smarie   File: test_so2.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def setup_dataset(db):
    # this is run once per db thanks to the lru_cache decorator
    print("setup for %s" % db) 
Example 27
Project: python-pytest-cases   Author: smarie   File: test_so2.py    License: BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def finalize_dataset(db):
    # this is run once per db thanks to the lru_cache decorator
    print("teardown for %s" % db) 
Example 28
Project: Bert-Chinese-Text-Classification-Pytorch   Author: 649453932   File: tokenization_gpt2.py    License: MIT License 5 votes vote down vote up
def lru_cache():
        return lambda func: func 
Example 29
Project: vnpy_crypto   Author: birforce   File: _compat.py    License: MIT License 5 votes vote down vote up
def b(s):
        return s


# --- stdlib additions


# py 3.2 functools.lru_cache
# Taken from: http://code.activestate.com/recipes/578078
# Credit: Raymond Hettinger 
Example 30
Project: indy-plenum   Author: hyperledger   File: node_handler.py    License: Apache License 2.0 5 votes vote down vote up
def _steward_has_node(self, steward_nym) -> bool:
        # Cannot use lru_cache since a steward might have a node in future and
        # unfortunately lru_cache does not allow single entries to be cleared
        # TODO: Modify lru_cache to clear certain entities
        for nodeNym, nodeData in self.state.as_dict.items():
            nodeData = self.state_serializer.deserialize(nodeData)
            if nodeData.get(f.IDENTIFIER.nm) == steward_nym:
                return True
        return False