Python hashlib.algorithms_available() Examples

The following are 25 code examples of hashlib.algorithms_available(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module hashlib , or try the search function .
Example #1
Source File: fileutils.py    From oslo.utils with Apache License 2.0 6 votes vote down vote up
def compute_file_checksum(path, read_chunksize=65536, algorithm='sha256'):
    """Compute checksum of a file's contents.

    :param path: Path to the file
    :param read_chunksize: Maximum number of bytes to be read from the file
     at once. Default is 65536 bytes or 64KB
    :param algorithm: The hash algorithm name to use. For example, 'md5',
     'sha256', 'sha512' and so on. Default is 'sha256'. Refer to
     hashlib.algorithms_available for available algorithms
    :return: Hex digest string of the checksum

    .. versionadded:: 3.31.0
    """
    checksum = hashlib.new(algorithm)  # Raises appropriate exceptions.
    with open(path, 'rb') as f:
        for chunk in iter(lambda: f.read(read_chunksize), b''):
            checksum.update(chunk)
            # Release greenthread, if greenthreads are not used it is a noop.
            time.sleep(0)
    return checksum.hexdigest() 
Example #2
Source File: app.py    From github-webhook-lambda with MIT License 6 votes vote down vote up
def validate_signature(request):
    """Validate that the signature in the header matches the payload."""
    if CONFIG["SECRET"] is None:
        return
    try:
        signature = request.headers["X-Hub-Signature"]
        hashname, hashval = signature.split("=")
    except (KeyError, ValueError):
        raise BadRequestError()

    if (hashname in CONFIG["HASHLIB_BLACKLIST"]) or (
        hashname not in hashlib.algorithms_available
    ):
        raise BadRequestError("X-Hub-Signature hash algorithm unavailable")

    digest = hmac.new(
        CONFIG["SECRET"].encode(), request.raw_body.encode(), hashname
    ).hexdigest()
    if not hmac.compare_digest(digest.encode(), hashval.encode("utf-8")):
        raise UnauthorizedError("X-Hub-Signature mismatch") 
Example #3
Source File: hasher.py    From habu with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def hasher(data, algos=ALGOS):
    """Create the hash(es) of a given string or file."""
    try:
        data = data.encode()
    except Exception:
        pass

    result = {}

    for algo in sorted(hashlib.algorithms_available):
        if algo in algos:
            h = hashlib.new(algo)
            h.update(data)
            result[algo] = h.hexdigest()

    return result 
Example #4
Source File: hash.py    From FACT_core with GNU General Public License v3.0 6 votes vote down vote up
def process_object(self, file_object):
        '''
        This function must be implemented by the plugin.
        Analysis result must be a dict stored in file_object.processed_analysis[self.NAME]
        If you want to propagate results to parent objects store a list of strings 'summary' entry of your result dict
        '''
        file_object.processed_analysis[self.NAME] = {}
        for h in self.hashes_to_create:
            if h in algorithms_available:
                file_object.processed_analysis[self.NAME][h] = get_hash(h, file_object.binary)
            else:
                logging.debug('algorithm {} not available'.format(h))
        file_object.processed_analysis[self.NAME]['ssdeep'] = get_ssdeep(file_object.binary)
        file_object.processed_analysis[self.NAME]['imphash'] = get_imphash(file_object)

        tlsh_hash = get_tlsh(file_object.binary)
        if tlsh_hash:
            file_object.processed_analysis[self.NAME]['tlsh'] = get_tlsh(file_object.binary)

        return file_object 
Example #5
Source File: resource_loader.py    From mindmeld with Apache License 2.0 6 votes vote down vote up
def _set_algorithm(self, value):
        """Setter for algorithm property.

        Args:
            value (str): The hashing algorithm to use. Defaults
                to sha1. See `hashlib.algorithms_available` for a list of
                options.
            value (str): The hashing algorithm to use.
        """
        if value not in hashlib.algorithms_available:
            raise ValueError("Invalid hashing algorithm: {!r}".format(value))

        if value != self._algorithm:
            # reset cache when changing algorithm
            self._cache = {}
            self._algorithm = value 
Example #6
Source File: hash_processor.py    From SATOSA with Apache License 2.0 6 votes vote down vote up
def process(self, internal_data, attribute, **kwargs):
        salt = kwargs.get(CONFIG_KEY_HASHALGO, CONFIG_DEFAULT_SALT)
        hash_algo = kwargs.get(CONFIG_KEY_HASHALGO, CONFIG_DEFAULT_HASHALGO)
        if hash_algo not in hashlib.algorithms_available:
            raise AttributeProcessorError(
                "Hash algorithm not supported: {}".format(hash_algo))

        attributes = internal_data.attributes
        value = attributes.get(attribute, [None])[0]
        if value is None:
            raise AttributeProcessorError(
                "No value for attribute: {}".format(attribute))

        hasher = hashlib.new(hash_algo)
        hasher.update(value.encode('utf-8'))
        hasher.update(salt.encode('utf-8'))
        value_hashed = hasher.hexdigest()
        attributes[attribute][0] = value_hashed 
Example #7
Source File: queries.py    From RTFMbot with Mozilla Public License 2.0 6 votes vote down vote up
def list(self, ctx, *, group=None):
        """Lists available choices for other commands"""

        choices = {
            "documentations": self.documented,
            "hashing": sorted([h for h in algorithms if h.islower()]),
            "references": self.referred,
            "wrapped argument": self.wrapping,
        }

        if group == 'languages':
            emb = discord.Embed(title=f"Available for {group}: {len(self.bot.languages)}",
                description=f'View them on [tio.run](https://tio.run/#), or in [JSON format](https://tio.run/languages.json)')
            return await ctx.send(embed=emb)

        if not group in choices:
            emb = discord.Embed(title="Available listed commands", description=f"`languages`, `{'`, `'.join(choices)}`")
            return await ctx.send(embed=emb)

        availables = choices[group]
        description=f"`{'`, `'.join([*availables])}`"
        emb = discord.Embed(title=f"Available for {group}: {len(availables)}", description=description)
        await ctx.send(embed=emb) 
Example #8
Source File: utils.py    From pooch with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def file_hash(fname, alg="sha256"):
    """
    Calculate the hash of a given file.

    Useful for checking if a file has changed or been corrupted.

    Parameters
    ----------
    fname : str
        The name of the file.
    alg : str
        The type of the hashing algorithm

    Returns
    -------
    hash : str
        The hash of the file.

    Examples
    --------

    >>> fname = "test-file-for-hash.txt"
    >>> with open(fname, "w") as f:
    ...     __ = f.write("content of the file")
    >>> print(file_hash(fname))
    0fc74468e6a9a829f103d069aeb2bb4f8646bad58bf146bb0e3379b759ec4a00
    >>> import os
    >>> os.remove(fname)

    """
    if alg not in hashlib.algorithms_available:
        raise ValueError("Algorithm '{}' not available in hashlib".format(alg))
    # Calculate the hash in chunks to avoid overloading the memory
    chunksize = 65536
    hasher = hashlib.new(alg)
    with open(fname, "rb") as fin:
        buff = fin.read(chunksize)
        while buff:
            hasher.update(buff)
            buff = fin.read(chunksize)
    return hasher.hexdigest() 
Example #9
Source File: tinychain.py    From tinychain with MIT License 5 votes vote down vote up
def pubkey_to_address(pubkey: bytes) -> str:
    if 'ripemd160' not in hashlib.algorithms_available:
        raise RuntimeError('missing ripemd160 hash algorithm')

    sha = hashlib.sha256(pubkey).digest()
    ripe = hashlib.new('ripemd160', sha).digest()
    return b58encode_check(b'\x00' + ripe) 
Example #10
Source File: file_operations.py    From Reusables with MIT License 5 votes vote down vote up
def file_hash(path, hash_type="md5", block_size=65536, hex_digest=True):
    """
    Hash a given file with md5, or any other and return the hex digest. You
    can run `hashlib.algorithms_available` to see which are available on your
    system unless you have an archaic python version, you poor soul).

    This function is designed to be non memory intensive.

    .. code:: python

        reusables.file_hash(test_structure.zip")
        # '61e387de305201a2c915a4f4277d6663'

    :param path: location of the file to hash
    :param hash_type: string name of the hash to use
    :param block_size: amount of bytes to add to hasher at a time
    :param hex_digest: returned as hexdigest, false will return digest
    :return: file's hash
    """
    hashed = hashlib.new(hash_type)
    with open(path, "rb") as infile:
        buf = infile.read(block_size)
        while len(buf) > 0:
            hashed.update(buf)
            buf = infile.read(block_size)
    return hashed.hexdigest() if hex_digest else hashed.digest() 
Example #11
Source File: hashanalyzer.py    From pastepwn with MIT License 5 votes vote down vote up
def __init__(self, actions, passwords, algorithms=None):
        """"Hashes given passwords with multiple algorithms and matches the output.

        :param actions: A single action or a list of actions to be executed on every paste
        :param passwords: A single password or a list of passwords to hash, as bytes
        :param algorithms: A list of algorithm names to use for hashing. This should be a subset
                           of hashlib.algorithms_available, and defaults to it.
        """
        # Make sure passwords is a list
        if isinstance(passwords, bytes):
            passwords = [passwords]

        # Build algorithm list
        if algorithms is None:
            algorithms = hashlib.algorithms_available
        else:
            algorithms = set(algorithms).intersection(hashlib.algorithms_available)

        if not algorithms:
            raise ValueError('No valid algorithm names specified')

        # Compute hashes with all algorithms
        hashes = []
        for hash_name in algorithms:
            hash_function = hashlib.new(hash_name)
            for password in passwords:
                hash = hash_function.copy()
                hash.update(password)
                if hash_name == 'shake_128':
                    digest = hash.hexdigest(128)
                elif hash_name == 'shake_256':
                    digest = hash.hexdigest(256)
                else:
                    digest = hash.hexdigest()
                hashes.append(digest)

        # Build regex
        regex = r"\b(%s)\b" % '|'.join(hashes)
        super().__init__(actions, regex, re.IGNORECASE) 
Example #12
Source File: tools.py    From RTFMbot with Mozilla Public License 2.0 5 votes vote down vote up
def __init__(self, bot):
        self.bot = bot
        self.algos = sorted([h for h in hashlib.algorithms_available if h.islower()]) 
Example #13
Source File: test_hashlib.py    From Project-New-Reign---Nemesis-Main with GNU General Public License v3.0 5 votes vote down vote up
def test_algorithms_available(self):
        self.assertTrue(set(hashlib.algorithms_guaranteed).
                            issubset(hashlib.algorithms_available)) 
Example #14
Source File: omnihash.py    From omnihash with MIT License 5 votes vote down vote up
def make_digesters(fpath, families, include_CRCs=False):
    """
    Create and return a dictionary of all our active hash algorithms.

    Each digester is a 2-tuple ``( digester.update_func(bytes), digest_func(digester) -> int)``.
    """
    ## TODO: simplify digester-tuple API, ie: (digester, update_func(d), digest_func(d))

    families = set(f.upper() for f in families)
    digesters = OrderedDict()

    digesters['LENGTH'] = (LenDigester(), LenDigester.digest)

    # Default Algos
    for algo in sorted(hashlib.algorithms_available):
        # algorithms_available can have duplicates
        aname = algo.upper()
        if aname not in digesters and is_algo_in_families(aname, families):
            digesters[aname] = (hashlib.new(algo), lambda d: d.hexdigest())

    # CRC
    if include_CRCs:
        for name in sorted(crcmod._crc_definitions_by_name):
            crc_name = crcmod._crc_definitions_by_name[name]['name']
            aname = crc_name.upper()
            if is_algo_in_families(aname, families):
                digesters[aname] = (crcmod.PredefinedCrc(crc_name),
                                    lambda d: hex(d.crcValue))

    add_git_digesters(digesters, fpath)

    ## Append plugin digesters.
    #
    digesters.update(known_digesters)
    for digester in list(digesters.keys()):
        if not is_algo_in_families(digester.upper(), families):
            digesters.pop(digester, None)

    return digesters 
Example #15
Source File: makehash.py    From apex-sigma-core with GNU General Public License v3.0 5 votes vote down vote up
def makehash(_cmd, pld):
    """
    :param _cmd: The command object referenced in the command.
    :type _cmd: sigma.core.mechanics.command.SigmaCommand
    :param pld: The payload with execution data and details.
    :type pld: sigma.core.mechanics.payload.CommandPayload
    """
    if pld.args:
        if len(pld.args) >= 2:
            hash_name = pld.args[0]
            hashes = hashlib.algorithms_available
            if hash_name in hashes:
                qry = ' '.join(pld.args[1:])
                crypt = hashlib.new(hash_name)
                crypt.update(qry.encode('utf-8'))
                final = crypt.hexdigest()
                response = discord.Embed(color=0x66cc66)
                response.add_field(name=f'✅ Hashing With {hash_name.upper()} Done', value=f'```\n{final}\n```')
            else:
                response = discord.Embed(color=0xBE1931)
                response.add_field(name='❗ Unknown Hashing Method', value=f'Available:\n```\n{", ".join(hashes)}\n```')
        else:
            response = error('Not enough arguments.')
    else:
        response = error('Nothing inputted.')
    await pld.msg.channel.send(embed=response) 
Example #16
Source File: downloads.py    From ungoogled-chromium with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _chromium_hashes_generator(hashes_path):
    with hashes_path.open(encoding=ENCODING) as hashes_file:
        hash_lines = hashes_file.read().splitlines()
    for hash_name, hash_hex, _ in map(lambda x: x.lower().split('  '), hash_lines):
        if hash_name in hashlib.algorithms_available:
            yield hash_name, hash_hex
        else:
            get_logger().warning('Skipping unknown hash algorithm: %s', hash_name) 
Example #17
Source File: test_hashlib.py    From ironpython3 with Apache License 2.0 5 votes vote down vote up
def test_algorithms_available(self):
        self.assertTrue(set(hashlib.algorithms_guaranteed).
                            issubset(hashlib.algorithms_available)) 
Example #18
Source File: wheelfile.py    From stopstalk-deployment with MIT License 5 votes vote down vote up
def __init__(self, file, mode='r'):
        basename = os.path.basename(file)
        self.parsed_filename = WHEEL_INFO_RE.match(basename)
        if not basename.endswith('.whl') or self.parsed_filename is None:
            raise WheelError("Bad wheel filename {!r}".format(basename))

        super(WheelFile, self).__init__(file, mode, compression=ZIP_DEFLATED, allowZip64=True)

        self.dist_info_path = '{}.dist-info'.format(self.parsed_filename.group('namever'))
        self.record_path = self.dist_info_path + '/RECORD'
        self._file_hashes = OrderedDict()
        self._file_sizes = {}
        if mode == 'r':
            # Ignore RECORD and any embedded wheel signatures
            self._file_hashes[self.record_path] = None, None
            self._file_hashes[self.record_path + '.jws'] = None, None
            self._file_hashes[self.record_path + '.p7s'] = None, None

            # Fill in the expected hashes by reading them from RECORD
            try:
                record = self.open(self.record_path)
            except KeyError:
                raise WheelError('Missing {} file'.format(self.record_path))

            with record:
                for line in record:
                    line = line.decode('utf-8')
                    path, hash_sum, size = line.rsplit(u',', 2)
                    if hash_sum:
                        algorithm, hash_sum = hash_sum.split(u'=')
                        if algorithm not in hashlib.algorithms_available:
                            raise WheelError('Unsupported hash algorithm: {}'.format(algorithm))
                        elif algorithm.lower() in {'md5', 'sha1'}:
                            raise WheelError(
                                'Weak hash algorithm ({}) is not permitted by PEP 427'
                                .format(algorithm))

                        self._file_hashes[path] = (
                            algorithm, urlsafe_b64decode(hash_sum.encode('ascii'))) 
Example #19
Source File: test_hashlib.py    From Fluid-Designer with GNU General Public License v3.0 5 votes vote down vote up
def test_algorithms_available(self):
        self.assertTrue(set(hashlib.algorithms_guaranteed).
                            issubset(hashlib.algorithms_available)) 
Example #20
Source File: multi_backend.py    From glance_store with Apache License 2.0 5 votes vote down vote up
def store_add_to_backend_with_multihash(
        image_id, data, size, hashing_algo, store,
        context=None, verifier=None):
    """
    A wrapper around a call to each store's add() method that requires
    a hashing_algo identifier and returns a 5-tuple including the
    "multihash" computed using the specified hashing_algo.  (This
    is an enhanced version of store_add_to_backend(), which is left
    as-is for backward compatibility.)

    :param image_id:  The image add to which data is added
    :param data: The data to be stored
    :param size: The length of the data in bytes
    :param store: The store to which the data is being added
    :param hashing_algo: A hashlib algorithm identifier (string)
    :param context: The request context
    :param verifier: An object used to verify signatures for images
    :return: The url location of the file,
             the size amount of data,
             the checksum of the data,
             the multihash of the data,
             the storage system's metadata dictionary for the location
    :raises: ``glance_store.exceptions.BackendException``
             ``glance_store.exceptions.UnknownHashingAlgo``
    """

    if hashing_algo not in hashlib.algorithms_available:
        raise exceptions.UnknownHashingAlgo(algo=hashing_algo)

    (location, size, checksum, multihash, metadata) = store.add(
        image_id, data, size, hashing_algo, context=context, verifier=verifier)

    if metadata is not None:
        _check_metadata(store, metadata)

    return (location, size, checksum, multihash, metadata) 
Example #21
Source File: backend.py    From glance_store with Apache License 2.0 5 votes vote down vote up
def store_add_to_backend_with_multihash(
        image_id, data, size, hashing_algo, store,
        context=None, verifier=None):
    """
    A wrapper around a call to each store's add() method that requires
    a hashing_algo identifier and returns a 5-tuple including the
    "multihash" computed using the specified hashing_algo.  (This
    is an enhanced version of store_add_to_backend(), which is left
    as-is for backward compatibility.)

    :param image_id:  The image add to which data is added
    :param data: The data to be stored
    :param size: The length of the data in bytes
    :param store: The store to which the data is being added
    :param hashing_algo: A hashlib algorithm identifier (string)
    :param context: The request context
    :param verifier: An object used to verify signatures for images
    :return: The url location of the file,
             the size amount of data,
             the checksum of the data,
             the multihash of the data,
             the storage system's metadata dictionary for the location
    :raises: ``glance_store.exceptions.BackendException``
             ``glance_store.exceptions.UnknownHashingAlgo``
    """

    if hashing_algo not in hashlib.algorithms_available:
        raise exceptions.UnknownHashingAlgo(algo=hashing_algo)

    (location, size, checksum, multihash, metadata) = store.add(
        image_id, data, size, hashing_algo, context=context, verifier=verifier)

    if metadata is not None:
        _check_metadata(store, metadata)

    return (location, size, checksum, multihash, metadata) 
Example #22
Source File: cracker.py    From findmyhash with GNU General Public License v3.0 5 votes vote down vote up
def validate_hash(hash: str, cracked: str, algo: Algo) -> bool:
        # NOTE: I'm all for trusting the third parties added here and not double checking
        # Hence removing the cross-check with multiple services

        res = False
        if algo.name in hashlib.algorithms_available:
            h = hashlib.new(algo.name)
            h.update(cracked.encode("utf-8"))

            if h.hexdigest().lower() == hash:
                res = True
        elif algo in [Algo.LDAP_MD5, Algo.LDAP_SHA1]:
            # NOTE: this is fucking ugly, almost tempted to leave the user do the pre-work
            # leaving for now for compatibility purposes
            alg = algo.name.split('_')[1]
            ahash = base64.decodestring(hash.split('}')[1])

            h = hashlib.new(alg)
            h.update(cracked.encode("utf-8"))

            if h.digest() == ahash:
                res =  True
        elif algo == Algo.NTLM or (algo == Algo.LM and ':' in hash):
            candidate = hashlib.new('md4', cracked.split()[-1].encode('utf-16le')).hexdigest()

            # It's a LM:NTLM combination or a single NTLM hash
            if (':' in hash and candidate == hash.split(':')[1]) or (':' not in hash and candidate == hash):
                res = True
        else:
            # Can't and won't validate the hash, assuming it's correct
            res = True

        return res 
Example #23
Source File: test_hashlib.py    From oss-ftp with MIT License 5 votes vote down vote up
def test_algorithms_available(self):
        self.assertTrue(set(hashlib.algorithms_guaranteed).
                            issubset(hashlib.algorithms_available)) 
Example #24
Source File: test_hashlib.py    From ironpython2 with Apache License 2.0 5 votes vote down vote up
def test_algorithms_available(self):
        self.assertTrue(set(hashlib.algorithms_guaranteed).
                            issubset(hashlib.algorithms_available)) 
Example #25
Source File: util_hash.py    From ubelt with Apache License 2.0 4 votes vote down vote up
def _rectify_hasher(hasher):
    """
    Convert a string-based key into a hasher class

    Notes:
        In terms of speed on 64bit systems, sha1 is the fastest followed by md5
        and sha512. The slowest algorithm is sha256. If xxhash is installed
        the fastest algorithm is xxh64.

    Example:
        >>> assert _rectify_hasher(NoParam) is DEFAULT_HASHER
        >>> assert _rectify_hasher('sha1') is hashlib.sha1
        >>> assert _rectify_hasher('sha256') is hashlib.sha256
        >>> assert _rectify_hasher('sha512') is hashlib.sha512
        >>> assert _rectify_hasher('md5') is hashlib.md5
        >>> assert _rectify_hasher(hashlib.sha1) is hashlib.sha1
        >>> if HASH is not None:
        >>>     assert _rectify_hasher(hashlib.sha1())().name == 'sha1'
        >>> import pytest
        >>> assert pytest.raises(KeyError, _rectify_hasher, '42')
        >>> #assert pytest.raises(TypeError, _rectify_hasher, object)
        >>> if xxhash:
        >>>     assert _rectify_hasher('xxh64') is xxhash.xxh64
        >>>     assert _rectify_hasher('xxh32') is xxhash.xxh32
    """
    if xxhash is not None:  # pragma: no cover
        if hasher in {'xxh32', 'xx32', 'xxhash'}:
            return xxhash.xxh32
        if hasher in {'xxh64', 'xx64'}:
            return xxhash.xxh64

    if hasher is NoParam or hasher == 'default':
        hasher = DEFAULT_HASHER
    elif isinstance(hasher, six.string_types):
        if hasher not in hashlib.algorithms_available:
            raise KeyError('unknown hasher: {}'.format(hasher))
        else:
            hasher = getattr(hashlib, hasher)
    elif HASH is not None and isinstance(hasher, HASH):
        # by default the result of this function is a class we will make an
        # instance of, if we already have an instance, wrap it in a callable
        # so the external syntax does not need to change.
        return lambda: hasher
    return hasher