import itertools
from typing import Tuple, Any, Union, List
import torch.jit
import functools
import operator
import geoopt

__all__ = [

def copy_or_set_(dest: torch.Tensor, source: torch.Tensor) -> torch.Tensor:
    Copy or inplace set from :code:`source` to :code:`dest`.

    A workaround to respect strides of :code:`dest` when copying :code:`source`.
    The original issue was raised `here <>`_
    when working with matrix manifolds. Inplace set operation is mode efficient,
    but the resulting storage might be incompatible after. To avoid the issue we refer to
    the safe option and use :code:`copy_` if strides do not match.

    dest : torch.Tensor
        Destination tensor where to store new data
    source : torch.Tensor
        Source data to put in the new tensor

        torch.Tensor, modified inplace
    if dest.stride() != source.stride():
        return dest.copy_(source)
        return dest.set_(source)

def strip_tuple(tup: Tuple) -> Union[Tuple, Any]:
    if len(tup) == 1:
        return tup[0]
        return tup

def make_tuple(obj: Union[Tuple, List, Any]) -> Tuple:
    if isinstance(obj, list):
        obj = tuple(obj)
    if not isinstance(obj, tuple):
        return (obj,)
        return obj

def prod(items):
    return functools.reduce(operator.mul, items, 1)

def sign(x):
    return torch.sign(x.sign() + 0.5)

def sabs(x, eps: float = 1e-15):
    return x.abs().add_(eps)

def clamp_abs(x, eps: float = 1e-15):
    s = sign(x)
    return s * sabs(x, eps=eps)

def idx2sign(idx: int, dim: int, neg: bool = True):
    Unify idx to be negative or positive, that helps in cases of broadcasting.

    idx : int
        current index
    dim : int
        maximum dimension
    neg : bool
        indicate we need negative index

    if neg:
        if idx < 0:
            return idx
            return (idx + 1) % -(dim + 1)
        return idx % dim

def drop_dims(tensor: torch.Tensor, dims: List[int]):
    # Workaround to drop several dims in :func:`torch.squeeze`.
    seen: int = 0
    for d in dims:
        tensor = tensor.squeeze(d - seen)
        seen += 1
    return tensor

def list_range(end: int):
    res: List[int] = []
    for d in range(end):
    return res

def canonical_dims(dims: List[int], maxdim: int):
    result: List[int] = []
    for idx in dims:
        result.append(idx2sign(idx, maxdim, neg=False))
    return result

def size2shape(*size: Union[Tuple[int], int]) -> Tuple[int]:
    return make_tuple(strip_tuple(size))

def broadcast_shapes(*shapes: Tuple[int]) -> Tuple[int]:
    """Apply numpy broadcasting rules to shapes."""
    result = []
    for dims in itertools.zip_longest(*map(reversed, shapes), fillvalue=1):
        dim: int = 1
        for d in dims:
            if dim != 1 and d != 1 and d != dim:
                raise ValueError("Shapes can't be broadcasted")
            elif d > dim:
                dim = d
    return tuple(reversed(result))

def ismanifold(instance, cls):
    Check if interface of an instance is compatible with given class.

    instance : geoopt.Manifold
        check if a given manifold is compatible with cls API
    cls : type
        manifold type

        comparison result
    if not issubclass(cls, geoopt.manifolds.Manifold):
        raise TypeError("`cls` should be a subclass of geoopt.manifolds.Manifold")
    if not isinstance(instance, geoopt.manifolds.Manifold):
        return False
        # this is the case to care about, Scaled class is a proxy, but fails instance checks
        while isinstance(instance, geoopt.Scaled):
            instance = instance.base
        return isinstance(instance, cls)

def canonical_manifold(manifold: "geoopt.Manifold"):
    Get a canonical manifold.

    If a manifold is wrapped with Scaled. Some attributes may not be available. This should help if you really need them.

    manifold : geoopt.Manifold

        an unwrapped manifold
    while isinstance(manifold, geoopt.Scaled):
        manifold = manifold.base
    return manifold