Python torch.nn.functional.relu6() Examples

The following are 30 code examples of torch.nn.functional.relu6(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module torch.nn.functional , or try the search function .
Example #1
Source File: mobilenet_v2.py    From MetaPruning with MIT License 6 votes vote down vote up
def forward(self, x, inp_scale_id):

        inp_scale = overall_channel_scale[inp_scale_id]

        inp = int(self.base_inp * inp_scale)

        scale_tensor = torch.FloatTensor([inp_scale/self.max_overall_scale]).to(x.device)

        fc11_out = F.relu(self.fc11(scale_tensor))
        conv1_weight = self.fc12(fc11_out).view(self.base_oup, self.max_inp_channel, 1, 1)

        out = F.conv2d(x, conv1_weight[:, :inp, :, :], bias=None, stride=self.stride, padding=0)
        out = self.first_bn[inp_scale_id](out)
        out = F.relu6(out)

        return out 
Example #2
Source File: mobilenet_v2.py    From MetaPruning with MIT License 6 votes vote down vote up
def forward(self, x, inp_scale_id):

        inp_scale = overall_channel_scale[inp_scale_id]

        inp = int(self.base_inp * inp_scale)

        scale_tensor = torch.FloatTensor([inp_scale/self.max_overall_scale]).to(x.device)

        fc11_out = F.relu(self.fc11(scale_tensor))
        conv1_weight = self.fc12(fc11_out).view(self.base_oup, self.max_inp_channel, 1, 1)

        out = F.conv2d(x, conv1_weight[:, :inp, :, :], bias=None, stride=self.stride, padding=0)
        out = self.first_bn[inp_scale_id](out)
        out = F.relu6(out)

        return out 
Example #3
Source File: model.py    From DeepRecommender with MIT License 6 votes vote down vote up
def activation(input, kind):
  #print("Activation: {}".format(kind))
  if kind == 'selu':
    return F.selu(input)
  elif kind == 'relu':
    return F.relu(input)
  elif kind == 'relu6':
    return F.relu6(input)
  elif kind == 'sigmoid':
    return F.sigmoid(input)
  elif kind == 'tanh':
    return F.tanh(input)
  elif kind == 'elu':
    return F.elu(input)
  elif kind == 'lrelu':
    return F.leaky_relu(input)
  elif kind == 'swish':
    return input*F.sigmoid(input)
  elif kind == 'none':
    return input
  else:
    raise ValueError('Unknown non-linearity type') 
Example #4
Source File: mobilenet_v2.py    From MetaPruning with MIT License 6 votes vote down vote up
def forward(self, x):

        out = self.conv1(x)
        out = self.bn1(out)
        out = F.relu6(out)

        out = self.conv2(out)
        out = self.bn2(out)
        out = F.relu6(out)

        out = self.conv3(out)
        out = self.bn3(out)

        if self.inp == self.oup and self.stride == 1:
            return (out + x)

        else:
            return out 
Example #5
Source File: modules.py    From Pytorch_Lightweight_Network with MIT License 6 votes vote down vote up
def get_activation(name):
    if isinstance(name, nn.Module):
        return name
    if name == 'default':
        return get_activation(get_default_activation())
    elif name == 'relu':
        return nn.ReLU(inplace=True)
    elif name == 'relu6':
        return nn.ReLU6(inplace=True)
    elif name == 'leaky_relu':
        return nn.LeakyReLU(negative_slope=0.1, inplace=True)
    elif name == 'sigmoid':
        return nn.Sigmoid()
    elif name == 'hswish':
        return HardSwish(inplace=True)
    elif name == 'swish':
        return Swish()
    else:
        raise NotImplementedError("No activation named %s" % name) 
Example #6
Source File: mobilenet_v2.py    From MetaPruning with MIT License 5 votes vote down vote up
def forward(self, x):

        out = self.conv1(x)
        out = self.bn1(out)
        out = F.relu6(out)

        return out 
Example #7
Source File: mobilenet_utils.py    From Auto-PyTorch with Apache License 2.0 5 votes vote down vote up
def hard_sigmoid(x, inplace=False):
    if inplace:
        return x.add_(3.).clamp_(0., 6.).div_(6.)
    else:
        return F.relu6(x + 3.) / 6. 
Example #8
Source File: model_for_FLOPs.py    From MetaPruning with MIT License 5 votes vote down vote up
def forward(self, x):

        out = self.conv1(x)
        out = self.bn1(out)
        out = F.relu6(out)

        return out 
Example #9
Source File: model_for_FLOPs.py    From MetaPruning with MIT License 5 votes vote down vote up
def forward(self, x):

        out = self.conv1(x)
        out = self.bn1(out)
        out = F.relu6(out)

        return out 
Example #10
Source File: mobilenet_v2.py    From MetaPruning with MIT License 5 votes vote down vote up
def forward(self, x, mid_scale_id, inp_scale_id, oup_scale_id):

        mid_scale = mid_channel_scale[mid_scale_id]
        inp_scale = overall_channel_scale[inp_scale_id]
        oup_scale = overall_channel_scale[oup_scale_id]

        mid = int(self.max_mid * mid_scale)
        inp = int(self.max_inp * inp_scale)
        oup = int(self.max_oup * oup_scale)

        scale_ratio_tensor = torch.FloatTensor([mid_scale, inp_scale, oup_scale]).to(x.device)

        fc11_out = F.relu(self.fc11(scale_ratio_tensor))
        conv1_weight = self.fc12(fc11_out).view(self.max_mid, self.max_inp, 1, 1)

        fc21_out = F.relu(self.fc21(scale_ratio_tensor))
        conv2_weight = self.fc22(fc21_out).view(self.max_mid, 1, 3, 3)

        fc31_out = F.relu(self.fc31(scale_ratio_tensor))
        conv3_weight = self.fc32(fc31_out).view(self.max_oup, self.max_mid, 1, 1)

        out = F.conv2d(x, conv1_weight[:mid, :inp, :, :], bias=None, stride=1, padding=0, groups=1)
        out = self.bn1[mid_scale_id](out)
        out = F.relu6(out)

        out = F.conv2d(out, conv2_weight[:mid, :, :, :], bias=None, stride=self.stride, padding=1, groups=mid)
        out = self.bn2[mid_scale_id](out)
        out = F.relu6(out)

        out = F.conv2d(out, conv3_weight[:oup, :mid, :, :], bias=None, stride=1, padding=0, groups=1)
        out = self.bn3[oup_scale_id](out)

        if self.max_inp == self.max_oup:
            return (out + x)

        else:
            return out 
Example #11
Source File: mobilenet_v2.py    From MetaPruning with MIT License 5 votes vote down vote up
def forward(self, x, oup_scale_id):

        oup_scale = overall_channel_scale[oup_scale_id]
        oup = int(self.base_oup * oup_scale)
        scale_tensor = torch.FloatTensor([oup_scale/self.max_overall_scale]).to(x.device)

        fc11_out = F.relu(self.fc11(scale_tensor))
        conv1_weight = self.fc12(fc11_out).view(self.max_oup_channel, self.base_inp, 3, 3)

        out = F.conv2d(x, conv1_weight[:oup, :, :, :], bias=None, stride=self.stride, padding=1)
        out = self.first_bn[oup_scale_id](out)
        out = F.relu6(out)

        return out 
Example #12
Source File: mobilenet_v2.py    From MetaPruning with MIT License 5 votes vote down vote up
def forward(self, x, oup_scale_id):

        oup_scale = overall_channel_scale[oup_scale_id]
        oup = int(self.base_oup * oup_scale)
        scale_tensor = torch.FloatTensor([oup_scale/self.max_overall_scale]).to(x.device)

        fc11_out = F.relu(self.fc11(scale_tensor))
        conv1_weight = self.fc12(fc11_out).view(self.max_oup_channel, self.base_inp, 3, 3)

        out = F.conv2d(x, conv1_weight[:oup, :, :, :], bias=None, stride=self.stride, padding=1)
        out = self.first_bn[oup_scale_id](out)
        out = F.relu6(out)

        return out 
Example #13
Source File: mobilenet_v2.py    From MetaPruning with MIT License 5 votes vote down vote up
def forward(self, x):

        out = self.conv1(x)
        out = self.bn1(out)
        out = F.relu6(out)

        return out 
Example #14
Source File: DBFace.py    From PINTO_model_zoo with MIT License 5 votes vote down vote up
def forward(self, x):
        out = x * F.relu6(x + 3, inplace=True) / 6
        return out 
Example #15
Source File: activations_jit.py    From pytorch-image-models with Apache License 2.0 5 votes vote down vote up
def hard_sigmoid_jit(x, inplace: bool = False):
    # return F.relu6(x + 3.) / 6.
    return (x + 3).clamp(min=0, max=6).div(6.)  # clamp seems ever so slightly faster? 
Example #16
Source File: relu6.py    From onnx2keras with MIT License 5 votes vote down vote up
def forward(self, x):
        from torch.nn import functional as F
        return F.relu6(x) 
Example #17
Source File: model.py    From MobileNetV3-Pytorch with MIT License 5 votes vote down vote up
def forward(self, x):
        return F.relu6(x + 3., inplace=self.inplace) / 6. 
Example #18
Source File: test_pyprof_nvtx.py    From apex with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_relu6(self):
        inp = torch.randn(1, 3, 32, 32, device='cuda', dtype=self.dtype)
        output = F.relu6(inp, inplace=False) 
Example #19
Source File: mobilenetv3.py    From pytracking with GNU General Public License v3.0 5 votes vote down vote up
def forward(self, x):
        out = F.relu6(x + 3., self.inplace) / 6.
        return out * x 
Example #20
Source File: mobilenetv3.py    From pytracking with GNU General Public License v3.0 5 votes vote down vote up
def forward(self, x):
        return F.relu6(x + 3., inplace=self.inplace) / 6. 
Example #21
Source File: MobilenetV3.py    From DBNet.pytorch with Apache License 2.0 5 votes vote down vote up
def forward(self, x):
        out = x * F.relu6(x + 3, inplace=True) / 6
        return out 
Example #22
Source File: DBFace.py    From PINTO_model_zoo with MIT License 5 votes vote down vote up
def forward(self, x):
        out = F.relu6(x + 3, inplace=True) / 6
        return out 
Example #23
Source File: DBFace_org.py    From PINTO_model_zoo with MIT License 5 votes vote down vote up
def forward(self, x):
        out = x * F.relu6(x + 3, inplace=True) / 6
        return out 
Example #24
Source File: DBFace_org.py    From PINTO_model_zoo with MIT License 5 votes vote down vote up
def forward(self, x):
        out = F.relu6(x + 3, inplace=True) / 6
        return out 
Example #25
Source File: activations.py    From Efficient-Segmentation-Networks with MIT License 5 votes vote down vote up
def forward(self, x):
        return F.relu6(x + 3., inplace=self.inplace) / 6. 
Example #26
Source File: activations.py    From Efficient-Segmentation-Networks with MIT License 5 votes vote down vote up
def forward(self, x):
        return x * F.relu6(x + 3., inplace=self.inplace) / 6. 
Example #27
Source File: mobilenetv2.py    From deep-person-reid with MIT License 5 votes vote down vote up
def forward(self, x):
        return F.relu6(self.bn(self.conv(x))) 
Example #28
Source File: MobileNet.py    From ReXCam with MIT License 5 votes vote down vote up
def forward(self, x):
        return F.relu6(self.bn(self.conv(x))) 
Example #29
Source File: Xception.py    From ReXCam with MIT License 5 votes vote down vote up
def forward(self, x):
        return F.relu6(self.bn(self.conv(x))) 
Example #30
Source File: activations_jit.py    From pytorch-image-models with Apache License 2.0 5 votes vote down vote up
def hard_swish_jit(x, inplace: bool = False):
    # return x * (F.relu6(x + 3.) / 6)
    return x * (x + 3).clamp(min=0, max=6).div(6.)  # clamp seems ever so slightly faster?