Python functools.lru_cache() Examples

The following are 30 code examples of functools.lru_cache(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module functools , or try the search function .
Example #1
Source File: kaldi_data.py    From EEND with MIT License 8 votes vote down vote up
def load_wav(wav_rxfilename, start=0, end=None):
    """ This function reads audio file and return data in numpy.float32 array.
        "lru_cache" holds recently loaded audio so that can be called
        many times on the same audio file.
        OPTIMIZE: controls lru_cache size for random access,
        considering memory size
    """
    if wav_rxfilename.endswith('|'):
        # input piped command
        p = subprocess.Popen(wav_rxfilename[:-1], shell=True,
                             stdout=subprocess.PIPE)
        data, samplerate = sf.read(io.BytesIO(p.stdout.read()),
                                   dtype='float32')
        # cannot seek
        data = data[start:end]
    elif wav_rxfilename == '-':
        # stdin
        data, samplerate = sf.read(sys.stdin, dtype='float32')
        # cannot seek
        data = data[start:end]
    else:
        # normal wav file
        data, samplerate = sf.read(wav_rxfilename, start=start, stop=end)
    return data, samplerate 
Example #2
Source File: distributor.py    From mars with Apache License 2.0 7 votes vote down vote up
def gen_distributor(scheduler_n_process, worker_n_process):
    class LocalClusterDistributor(Distributor):
        def __init__(self, n_process):
            super().__init__(n_process)
            self._scheduler_distributor = MarsDistributor(scheduler_n_process, 's:h1:')
            self._worker_distributor = MarsDistributor(worker_n_process, 'w:0:')

        @staticmethod
        def _is_worker_uid(uid):
            return isinstance(uid, str) and uid.startswith('w:')

        @functools.lru_cache(100)
        def distribute(self, uid):
            if self._is_worker_uid(uid):
                return self._worker_distributor.distribute(uid) + scheduler_n_process

            return self._scheduler_distributor.distribute(uid)

        def make_same_process(self, uid, uid_rel, delta=0):
            if self._is_worker_uid(uid_rel):
                return self._worker_distributor.make_same_process(uid, uid_rel, delta=delta)
            return self._scheduler_distributor.make_same_process(uid, uid_rel, delta=delta)

    return LocalClusterDistributor(scheduler_n_process + worker_n_process) 
Example #3
Source File: pdbpp.py    From pdbpp with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def lru_cache(maxsize):
        """Simple cache (with no maxsize basically) for py27 compatibility.

        Given that pdb there uses linecache.getline for each line with
        do_list a cache makes a big differene."""

        def dec(fn, *args):
            cache = {}

            @wraps(fn)
            def wrapper(*args):
                key = args
                try:
                    ret = cache[key]
                except KeyError:
                    ret = cache[key] = fn(*args)
                return ret

            return wrapper

        return dec

# If it contains only _, digits, letters, [] or dots, it's probably side
# effects free. 
Example #4
Source File: QADataStruct.py    From QUANTAXIS with MIT License 6 votes vote down vote up
def tradetime(self):
        """返回交易所日历下的日期

        Returns:
            [type] -- [description]
        """

        try:
            return self.date
        except:
            return None

    # @property
    # @lru_cache()
    # def semiannual(self):
    #     return self.resample('SA') 
Example #5
Source File: test_lru_cache.py    From loopchain with Apache License 2.0 6 votes vote down vote up
def test_cache_clear(self):
        from unittest.mock import MagicMock
        call_check_mock = MagicMock()

        @lru_cache(maxsize=4)
        def target_func():
            call_check_mock()

        target_func()
        self.assertEqual(call_check_mock.call_count, 1)
        target_func()
        self.assertEqual(call_check_mock.call_count, 1)


        # WHEN
        target_func.cache_clear()
        target_func()
        self.assertEqual(call_check_mock.call_count, 2) 
Example #6
Source File: settings_view.py    From sawtooth-core with Apache License 2.0 6 votes vote down vote up
def lru_cached_method(*lru_args, **lru_kwargs):
    def decorator(wrapped_fn):
        @wraps(wrapped_fn)
        def wrapped(self, *args, **kwargs):
            # Use a weak reference to self; this prevents a self-reference
            # cycle that fools the garbage collector into thinking the instance
            # shouldn't be dropped when all external references are dropped.
            weak_ref_to_self = weakref.ref(self)

            @wraps(wrapped_fn)
            @lru_cache(*lru_args, **lru_kwargs)
            def cached(*args, **kwargs):
                return wrapped_fn(weak_ref_to_self(), *args, **kwargs)
            setattr(self, wrapped_fn.__name__, cached)
            return cached(*args, **kwargs)
        return wrapped
    return decorator 
Example #7
Source File: state_view.py    From sawtooth-core with Apache License 2.0 6 votes vote down vote up
def lru_cached_method(*lru_args, **lru_kwargs):
    def decorator(wrapped_fn):
        @wraps(wrapped_fn)
        def wrapped(self, *args, **kwargs):
            # Use a weak reference to self; this prevents a self-reference
            # cycle that fools the garbage collector into thinking the instance
            # shouldn't be dropped when all external references are dropped.
            weak_ref_to_self = weakref.ref(self)

            @wraps(wrapped_fn)
            @lru_cache(*lru_args, **lru_kwargs)
            def cached(*args, **kwargs):
                return wrapped_fn(weak_ref_to_self(), *args, **kwargs)
            setattr(self, wrapped_fn.__name__, cached)
            return cached(*args, **kwargs)
        return wrapped
    return decorator 
Example #8
Source File: utils_table.py    From fonduer with MIT License 6 votes vote down vote up
def _min_range_diff(coordinates: Tuple[Tuple[int, int]], absolute: bool = True) -> int:
    """Get the minimum range difference.

    # Using Tuple instead of list because list is unhashable with `lru_cache`
    # if absolute=True, return the absolute value of minimum magnitude difference
    # if absolute=False, return the raw value of minimum magnitude difference
    # TODO: move back to efficient implementation once it sees that
    # min_range_diff(3,3,2,3) = 0 return max(0, max(a_end - b_start, b_end -
    # a_start))

    :param coordinates: A tuple of a couple (start, end) indexes of the objects.
    :param absolute: Whether use absolute value, defaults to True.
    :return: The minimum range difference.
    """
    f = lambda x: (abs(x) if absolute else x)
    return min(
        [
            f(min([x - y for x, y in zip(ii[:-1], ii[1:])], key=abs))
            for ii in itertools.product(
                *[range(start, end + 1) for start, end in coordinates]
            )
        ],
        key=abs,
    ) 
Example #9
Source File: base.py    From python-stdlib-list with MIT License 6 votes vote down vote up
def in_stdlib(module_name, version=None):
    """
    Return a ``bool`` indicating if module ``module_name`` is in the list of stdlib
    symbols for python version ``version``. If ``version`` is ``None`` (default), the
    version of current python interpreter is used.

    Note that ``True`` will be returned for built-in modules too, since this project
    considers they are part of stdlib. See :issue:21.

    It relies on ``@lru_cache`` to cache the stdlib list and query results for similar
    calls. Therefore it is much more efficient than ``module_name in stdlib_list()``
    especially if you wish to perform multiple checks.

    :param str|None module_name: The module name (as a string) to query for.
    :param str|None version: The version (as a string) whose list of libraries you want
    (one of ``"2.6"``, ``"2.7"``, ``"3.2"``, ``"3.3"``, ``"3.4"``, or ``"3.5"``).
    If not specified, the current version of Python will be used.

    :return: A bool indicating if the given module name is part of standard libraries
    for the specified version of Python.
    :rtype: list
    """
    ref_list = _stdlib_list_with_cache(version=version)
    return module_name in ref_list 
Example #10
Source File: trans_real.py    From bioforum with MIT License 6 votes vote down vote up
def check_for_language(lang_code):
    """
    Check whether there is a global language file for the given language
    code. This is used to decide whether a user-provided language is
    available.

    lru_cache should have a maxsize to prevent from memory exhaustion attacks,
    as the provided language codes are taken from the HTTP request. See also
    <https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>.
    """
    # First, a quick check to make sure lang_code is well-formed (#21458)
    if lang_code is None or not language_code_re.search(lang_code):
        return False
    for path in all_locale_paths():
        if gettext_module.find('django', path, [to_locale(lang_code)]) is not None:
            return True
    return False 
Example #11
Source File: lcs.py    From easse with GNU General Public License v3.0 6 votes vote down vote up
def get_lcs(seq1, seq2):
    '''Returns the longest common subsequence using memoization (only in local scope)'''
    @lru_cache(maxsize=None)
    def recursive_lcs(seq1, seq2):
        if len(seq1) == 0 or len(seq2) == 0:
            return []
        if seq1[-1] == seq2[-1]:
            return recursive_lcs(seq1[:-1], seq2[:-1]) + [seq1[-1]]
        else:
            return max(recursive_lcs(seq1[:-1], seq2), recursive_lcs(seq1, seq2[:-1]), key=lambda seq: len(seq))

    try:
        return recursive_lcs(tuple(seq1), tuple(seq2))
    except RecursionError as e:
        print(e)
        # TODO: Handle this case
        return [] 
Example #12
Source File: utils.py    From designate with Apache License 2.0 6 votes vote down vote up
def cache_result(function):
    """A function decorator to cache the result of the first call, every
    additional call will simply return the cached value.

    If we were python3 only, we would have used functools.lru_cache() in place
    of this. If there's a python2 backport in a lightweight library, then we
    should switch to that.
    """
    # NOTE: We're cheating a little here, by using a mutable type (a list),
    #       we're able to read and update the value from within in inline
    #       wrapper method. If we used an immutable type, the assignment
    #       would not work as we want.
    cache = []

    def wrapper(cls_instance):
        if not cache:
            cache.append(function(cls_instance))
        return cache[0]
    return wrapper 
Example #13
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def price(self):
        """return the deal price of tick transaction

        Decorators:
            lru_cache

        Returns:
            [type] -- [description]
        """

        return self.data.price 
Example #14
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def buyorsell(self):
        """return the buy or sell towards 0--buy 1--sell 2--none

        Decorators:
            lru_cache

        Returns:
            [pd.Series] -- [description]
        """

        return self.data.buyorsell 
Example #15
Source File: tflite_model.py    From tflite-tools with MIT License 5 votes vote down vote up
def compute_best_peak_memory_usage(self):
        if self.peak_usage is not None:
            return self.peak_usage

        g = self.model_graph

        # Can turn into an iterative function if this ever causes performance / stack overflow issues
        @functools.lru_cache(maxsize=None)
        def mem(tensors):
            # Computes the peak memory usage of a runtime system that computes all tensors in a set `tensors`.
            constants = [t for t in tensors if t.producer is None]
            if constants:
                upstream_mem_use, op_order = mem(frozenset(t for t in tensors if t.producer is not None))
                return TFLiteModel._cum_tensor_sizes(constants) + upstream_mem_use, op_order
            if not tensors:
                return 0, []

            min_use = sys.maxsize  # A reasonably large integer
            op_order = []
            # For each of tensors in our working set, we try to unapply the operator that produced it
            for t in tensors:
                rest = tensors - {t}
                # We constrain the search to never consider evaluating an operator (`t.producer`) more than once ---
                # so we prevent cases where we consider unapplying `t.producer` but it's actually necessary for other
                # tensors in the working set.
                if any(t in r.predecessors for r in rest):
                    continue
                inputs = frozenset(t.producer.non_empty_inputs)
                new_set = rest | inputs
                upstream_mem_use, operators = mem(new_set)

                tensors_in_memory = new_set | {t}
                mem_use = max(upstream_mem_use, TFLiteModel._cum_tensor_sizes(tensors_in_memory))
                if mem_use < min_use:
                    min_use = mem_use
                    op_order = operators + [t.producer]
            return min_use, op_order

        self.peak_usage = mem(frozenset(g.outputs))
        return self.peak_usage 
Example #16
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def vol(self):
        """return the deal volume of tick

        Decorators:
            lru_cache

        Returns:
            pd.Series -- volume of transaction
        """

        try:
            return self.data.volume
        except:
            return self.data.vol 
Example #17
Source File: __init__.py    From retdec-regression-tests-framework with MIT License 5 votes vote down vote up
def memoize(func):
    """A decorator to memoize the given function.

    It stores the results of function calls and returns the stored result when
    the same inputs occur again. The decorator also works on methods or
    properties.
    """
    return functools.lru_cache(maxsize=None)(func) 
Example #18
Source File: wrappers.py    From textpipe with MIT License 5 votes vote down vote up
def __init__(self, uri, key='', max_lru_cache_size=1024, idf_weighting='naive'):
        self.word_vec = lru_cache(maxsize=max_lru_cache_size)(self.word_vec)
        self.key = f'w2v_{key}'
        self.idf_weighting = idf_weighting

        try:
            host, port, database = self._parse_uri(uri)
            self._redis = Redis(host, port, database)
        except RedisError as exception:
            raise RedisKeyedVectorException(f'The connection to Redis failed while trying to '
                                            f'initiate the client. Redis error message: '
                                            f'{exception}') 
Example #19
Source File: base.py    From mimesis with MIT License 5 votes vote down vote up
def pull(self, datafile: str = '') -> None:
        """Pull the content from the JSON and memorize one.

        Opens JSON file ``file`` in the folder ``data/locale``
        and get content from the file and memorize ones using lru_cache.

        :param datafile: The name of file.
        :return: The content of the file.
        :raises UnsupportedLocale: Raises if locale is unsupported.
        """
        locale = self.locale
        data_dir = self._data_dir

        if not datafile:
            datafile = self._datafile

        def get_data(locale_name: str) -> JSON:
            """Pull JSON data from file.

            :param locale_name: Locale name.
            :return: Content of JSON file as dict.
            """
            file_path = Path(data_dir).joinpath(locale_name, datafile)
            with open(file_path, 'r', encoding='utf8') as f:
                return json.load(f)

        separator = locales.LOCALE_SEPARATOR

        master_locale = locale.split(separator).pop(0)
        data = get_data(master_locale)

        if separator in locale:
            data = self._update_dict(data, get_data(locale))

        self._data = data 
Example #20
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def index(self):
        """return the transaction index

        Decorators:
            lru_cache

        Returns:
            [type] -- [description]
        """

        return self.data.index 
Example #21
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def index(self):
        """return the transaction index

        Decorators:
            lru_cache

        Returns:
            [type] -- [description]
        """

        return self.data.index 
Example #22
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def order(self):
        """return the order num of transaction/ for everyday change

        Decorators:
            lru_cache

        Returns:
            pd.series -- [description]
        """

        return self.data.order 
Example #23
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def datetime(self):
        """return the datetime of transaction

        Decorators:
            lru_cache

        Returns:
            pd.Series -- [description]
        """

        return self.data.datetime 
Example #24
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def time(self):
        """return the exact time of transaction(to minute level)

        Decorators:
            lru_cache

        Returns:
            pd.Series -- till minute level
        """

        return self.data.time 
Example #25
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def date(self):
        """return the date of transaction

        Decorators:
            lru_cache

        Returns:
            pd.Series -- date of transaction
        """

        return self.data.date 
Example #26
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def price(self):
        """return the deal price of tick transaction

        Decorators:
            lru_cache

        Returns:
            [type] -- [description]
        """

        return self.data.price 
Example #27
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def buyorsell(self):
        """return the buy or sell towards 0--buy 1--sell 2--none

        Decorators:
            lru_cache

        Returns:
            [pd.Series] -- [description]
        """

        return self.data.buyorsell 
Example #28
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def quarter(self):
        return self.resample('Q')

    # @property
    # @lru_cache()
    # def semiannual(self):
    #     return self.resample('SA') 
Example #29
Source File: QADataStruct.py    From QUANTAXIS with MIT License 5 votes vote down vote up
def quarter(self):
        return self.resample('Q')

    # @property
    # @lru_cache()
    # def semiannual(self):
    #     return self.resample('SA') 
Example #30
Source File: wrappers.py    From deep_pipe with MIT License 5 votes vote down vote up
def cache_methods(instance, methods: Iterable[str] = None, maxsize: int = None):
    """Cache the ``instance``'s ``methods``. If ``methods`` is None, all public methods will be cached."""
    if methods is None:
        methods = _get_public_methods(instance)

    cache = functools.lru_cache(maxsize)
    new_methods = {method: staticmethod(cache(getattr(instance, method))) for method in methods}
    proxy = type('Cached', (Proxy,), new_methods)
    return proxy(instance)