Python torch.nn.CELU Examples

The following are 1 code examples of torch.nn.CELU(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module torch.nn , or try the search function .
Example #1
Source File: base_utils.py    From pt-ranking.github.io with MIT License 4 votes vote down vote up
def get_AF(af_str):
    """
    Given the string identifier, get PyTorch-supported activation function.

    """
    if af_str == 'R':
        return nn.ReLU()         # ReLU(x)=max(0,x)

    elif af_str == 'LR':
        return nn.LeakyReLU()    # LeakyReLU(x)=max(0,x)+negative_slope∗min(0,x)

    elif af_str == 'RR':
        return nn.RReLU()        # the randomized leaky rectified liner unit function

    elif af_str == 'E':          # ELU(x)=max(0,x)+min(0,α∗(exp(x)−1))
        return nn.ELU()

    elif af_str == 'SE':         # SELU(x)=scale∗(max(0,x)+min(0,α∗(exp(x)−1)))
        return nn.SELU()

    elif af_str == 'CE':         # CELU(x)=max(0,x)+min(0,α∗(exp(x/α)−1))
        return nn.CELU()

    elif af_str == 'S':
        return nn.Sigmoid()

    elif af_str == 'SW':
        #return SWISH()
        raise NotImplementedError

    elif af_str == 'T':
        return nn.Tanh()

    elif af_str == 'ST':         # a kind of normalization
        return F.softmax()      # Applies the Softmax function to an n-dimensional input Tensor rescaling them so that the elements of the n-dimensional output Tensor lie in the range (0,1) and sum to 1

    elif af_str == 'EP':
        #return Exp()
        raise NotImplementedError

    else:
        raise NotImplementedError