Python torch.nn.RReLU() Examples

The following are 9 code examples of torch.nn.RReLU(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module torch.nn , or try the search function .
Example #1
Source File: supervised_topic_model.py    From causal-text-embeddings with MIT License 7 votes vote down vote up
def get_activation(self, act):
        if act == 'tanh':
            act = nn.Tanh()
        elif act == 'relu':
            act = nn.ReLU()
        elif act == 'softplus':
            act = nn.Softplus()
        elif act == 'rrelu':
            act = nn.RReLU()
        elif act == 'leakyrelu':
            act = nn.LeakyReLU()
        elif act == 'elu':
            act = nn.ELU()
        elif act == 'selu':
            act = nn.SELU()
        elif act == 'glu':
            act = nn.GLU()
        else:
            print('Defaulting to tanh activations...')
            act = nn.Tanh()
        return act 
Example #2
Source File: utils.py    From pnn.pytorch.update with MIT License 7 votes vote down vote up
def act_fn(act):
    if act == 'relu':
        act_ = nn.ReLU(inplace=False)
    elif act == 'lrelu':
        act_ = nn.LeakyReLU(inplace=True)
    elif act == 'prelu':
        act_ = nn.PReLU()
    elif act == 'rrelu':
        act_ = nn.RReLU(inplace=True)
    elif act == 'elu':
        act_ = nn.ELU(inplace=True)
    elif act == 'selu':
        act_ = nn.SELU(inplace=True)
    elif act == 'tanh':
        act_ = nn.Tanh()
    elif act == 'sigmoid':
        act_ = nn.Sigmoid()
    else:
        print('\n\nActivation function {} is not supported/understood\n\n'.format(act))
        act_ = None
    return act_ 
Example #3
Source File: etm.py    From ETM with MIT License 7 votes vote down vote up
def get_activation(self, act):
        if act == 'tanh':
            act = nn.Tanh()
        elif act == 'relu':
            act = nn.ReLU()
        elif act == 'softplus':
            act = nn.Softplus()
        elif act == 'rrelu':
            act = nn.RReLU()
        elif act == 'leakyrelu':
            act = nn.LeakyReLU()
        elif act == 'elu':
            act = nn.ELU()
        elif act == 'selu':
            act = nn.SELU()
        elif act == 'glu':
            act = nn.GLU()
        else:
            print('Defaulting to tanh activations...')
            act = nn.Tanh()
        return act 
Example #4
Source File: test_cli.py    From skorch with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_parse_net_kwargs(self, parse_net_kwargs):
        kwargs = {
            'lr': 0.05,
            'max_epochs': 5,
            'module__num_units': 10,
            'module__nonlin': 'torch.nn.RReLU(0.123, upper=0.456)',
        }
        parsed_kwargs = parse_net_kwargs(kwargs)

        assert len(parsed_kwargs) == 4
        assert np.isclose(parsed_kwargs['lr'], 0.05)
        assert parsed_kwargs['max_epochs'] == 5
        assert parsed_kwargs['module__num_units'] == 10
        assert isinstance(parsed_kwargs['module__nonlin'], RReLU)
        assert np.isclose(parsed_kwargs['module__nonlin'].lower, 0.123)
        assert np.isclose(parsed_kwargs['module__nonlin'].upper, 0.456) 
Example #5
Source File: fc.py    From Attention-on-Attention-for-VQA with MIT License 6 votes vote down vote up
def get_act(act):
    if act == 'ReLU':
        act_layer = nn.ReLU
    elif act == 'LeakyReLU':
        act_layer = nn.LeakyReLU
    elif act == 'PReLU':
        act_layer = nn.PReLU
    elif act == 'RReLU':
        act_layer = nn.RReLU
    elif act == 'ELU':
        act_layer = nn.ELU
    elif act == 'SELU':
        act_layer = nn.SELU
    elif act == 'Tanh':
        act_layer = nn.Tanh
    elif act == 'Hardtanh':
        act_layer = nn.Hardtanh
    elif act == 'Sigmoid':
        act_layer = nn.Sigmoid
    else:
        print("Invalid activation function")
        raise Exception("Invalid activation function")
    return act_layer 
Example #6
Source File: Base_Network.py    From nn_builder with MIT License 5 votes vote down vote up
def create_str_to_activations_converter(self):
        """Creates a dictionary which converts strings to activations"""
        str_to_activations_converter = {"elu": nn.ELU(), "hardshrink": nn.Hardshrink(), "hardtanh": nn.Hardtanh(),
                                        "leakyrelu": nn.LeakyReLU(), "logsigmoid": nn.LogSigmoid(), "prelu": nn.PReLU(),
                                        "relu": nn.ReLU(), "relu6": nn.ReLU6(), "rrelu": nn.RReLU(), "selu": nn.SELU(),
                                        "sigmoid": nn.Sigmoid(), "softplus": nn.Softplus(), "logsoftmax": nn.LogSoftmax(),
                                        "softshrink": nn.Softshrink(), "softsign": nn.Softsign(), "tanh": nn.Tanh(),
                                        "tanhshrink": nn.Tanhshrink(), "softmin": nn.Softmin(), "softmax": nn.Softmax(dim=1),
                                         "none": None}
        return str_to_activations_converter 
Example #7
Source File: test_cli.py    From skorch with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_resolve_dotted_name_instantiated(self, resolve_dotted_name):
        result = resolve_dotted_name('torch.nn.RReLU(0.123, upper=0.456)')
        assert isinstance(result, RReLU)
        assert np.isclose(result.lower, 0.123)
        assert np.isclose(result.upper, 0.456) 
Example #8
Source File: unet.py    From elektronn3 with MIT License 5 votes vote down vote up
def get_activation(activation):
    if isinstance(activation, str):
        if activation == 'relu':
            return nn.ReLU()
        elif activation == 'leaky':
            return nn.LeakyReLU(negative_slope=0.1)
        elif activation == 'prelu':
            return nn.PReLU(num_parameters=1)
        elif activation == 'rrelu':
            return nn.RReLU()
        elif activation == 'lin':
            return nn.Identity()
    else:
        # Deep copy is necessary in case of paremtrized activations
        return copy.deepcopy(activation) 
Example #9
Source File: base_utils.py    From pt-ranking.github.io with MIT License 4 votes vote down vote up
def get_AF(af_str):
    """
    Given the string identifier, get PyTorch-supported activation function.

    """
    if af_str == 'R':
        return nn.ReLU()         # ReLU(x)=max(0,x)

    elif af_str == 'LR':
        return nn.LeakyReLU()    # LeakyReLU(x)=max(0,x)+negative_slope∗min(0,x)

    elif af_str == 'RR':
        return nn.RReLU()        # the randomized leaky rectified liner unit function

    elif af_str == 'E':          # ELU(x)=max(0,x)+min(0,α∗(exp(x)−1))
        return nn.ELU()

    elif af_str == 'SE':         # SELU(x)=scale∗(max(0,x)+min(0,α∗(exp(x)−1)))
        return nn.SELU()

    elif af_str == 'CE':         # CELU(x)=max(0,x)+min(0,α∗(exp(x/α)−1))
        return nn.CELU()

    elif af_str == 'S':
        return nn.Sigmoid()

    elif af_str == 'SW':
        #return SWISH()
        raise NotImplementedError

    elif af_str == 'T':
        return nn.Tanh()

    elif af_str == 'ST':         # a kind of normalization
        return F.softmax()      # Applies the Softmax function to an n-dimensional input Tensor rescaling them so that the elements of the n-dimensional output Tensor lie in the range (0,1) and sum to 1

    elif af_str == 'EP':
        #return Exp()
        raise NotImplementedError

    else:
        raise NotImplementedError