Python torch.nn.MarginRankingLoss() Examples

The following are 30 code examples of torch.nn.MarginRankingLoss(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module torch.nn , or try the search function .
Example #1
Source File: pairwise_ranking_loss.py    From torecsys with MIT License 6 votes vote down vote up
def __init__(self, 
                 margin    : float = 1.0, 
                 reduction : str = None):
        r"""Initialize TripletLoss
        
        Args:
            margin (float, optional): size of margin. Defaults to 1.0.
            reduction (str, optional): method of reduction. Defaults to None.
        """
        # Refer to parent class
        super(TripletLoss, self).__init__()

        # Initialize module with input margin
        if margin:
            self.parser = margin_ranking_loss_parser
            self.loss = nn.MarginRankingLoss(margin=margin, reduction=reduction)
        else:
            self.parser = soft_margin_loss_parser
            self.loss = nn.SoftMarginLoss(reduction=reduction) 
Example #2
Source File: triplet_loss.py    From PAST-ReID with MIT License 6 votes vote down vote up
def __init__(self, args, margin=None, name=None, tri_sampler_type='CTL'):
        self.margin = margin
        self.args = args
        self.name = name
        self.tri_sampler_type = tri_sampler_type
        if margin is not None:
            if self.tri_sampler_type == 'CTL':
                self.ranking_loss = nn.MarginRankingLoss(margin=self.margin)
            elif self.tri_sampler_type == 'RTL':
                self.ranking_loss = SoftMarginTriplet(margin=self.margin)
            elif self.tri_sampler_type == 'CTL_RTL':
                if '_CTL' in name:
                    self.ranking_loss = nn.MarginRankingLoss(margin=self.margin)
                if '_RTL' in name:
                    self.ranking_loss = SoftMarginTriplet(margin=self.margin)
        else:
            self.ranking_loss = nn.SoftMarginLoss() 
Example #3
Source File: base.py    From incremental_learning.pytorch with MIT License 6 votes vote down vote up
def github_ucir_ranking_mr(logits, targets, n_classes, task_size, nb_negatives=2, margin=0.2):
    gt_index = torch.zeros(logits.size()).to(logits.device)
    gt_index = gt_index.scatter(1, targets.view(-1, 1), 1).ge(0.5)
    gt_scores = logits.masked_select(gt_index)
    #get top-K scores on novel classes
    num_old_classes = logits.shape[1] - task_size
    max_novel_scores = logits[:, num_old_classes:].topk(nb_negatives, dim=1)[0]
    #the index of hard samples, i.e., samples of old classes
    hard_index = targets.lt(num_old_classes)
    hard_num = torch.nonzero(hard_index).size(0)
    #print("hard examples size: ", hard_num)
    if hard_num > 0:
        gt_scores = gt_scores[hard_index].view(-1, 1).repeat(1, nb_negatives)
        max_novel_scores = max_novel_scores[hard_index]
        assert (gt_scores.size() == max_novel_scores.size())
        assert (gt_scores.size(0) == hard_num)
        #print("hard example gt scores: ", gt_scores.size(), gt_scores)
        #print("hard example max novel scores: ", max_novel_scores.size(), max_novel_scores)
        loss = nn.MarginRankingLoss(margin=margin)(gt_scores.view(-1, 1), \
            max_novel_scores.view(-1, 1), torch.ones(hard_num*nb_negatives).to(logits.device))
        return loss
    return torch.tensor(0).float() 
Example #4
Source File: losses.py    From Ithemal with MIT License 6 votes vote down vote up
def mse_loss_plus_rank_loss(output,target):

    cost = output
    target_cost = target

    if output.size()[0] > 1:
        inter = output[:-1]
        inter_1 = output[1:]
    else: #emulate no rank loss
        inter = torch.ones(1)
        inter_1 = 2 * torch.ones(1)

    target_rank = torch.ones(inter.size())

    loss_mse = nn.MSELoss(reduce = False)
    loss1 = torch.sqrt(loss_mse(cost, target_cost)) / (target_cost + 1e-3)
    loss1 = torch.mean(loss1)

    loss_rank = nn.MarginRankingLoss()
    loss2 = loss_rank(inter_1, inter, target_rank)

    return [loss1, loss2] 
Example #5
Source File: loss.py    From batch-dropblock-network with MIT License 5 votes vote down vote up
def __init__(self, margin=None):
        self.margin = margin
        if margin is not None:
            self.ranking_loss = nn.MarginRankingLoss(margin=margin)
        else:
            self.ranking_loss = nn.SoftMarginLoss() 
Example #6
Source File: triplet.py    From catalyst with Apache License 2.0 5 votes vote down vote up
def __init__(self, margin: float = 0.3):
        """
        Args:
            margin (float): margin for triplet
        """
        super().__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin) 
Example #7
Source File: deep_loss.py    From Deep-Person with MIT License 5 votes vote down vote up
def __init__(self, margin=0):
        super(DeepLoss, self).__init__()
        self.triplet_criterion = nn.MarginRankingLoss(margin=margin)
        self.soft_criterion = nn.CrossEntropyLoss() 
Example #8
Source File: triplet.py    From Deep-Person with MIT License 5 votes vote down vote up
def __init__(self, margin=0):
        super(TripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin) 
Example #9
Source File: TransH.py    From NeuralTripleTranslation with Apache License 2.0 5 votes vote down vote up
def loss_func(self,p_score,n_score):
		criterion= nn.MarginRankingLoss(self.config.margin,False).cuda()
		y=Variable(torch.Tensor([-1])).cuda()
		loss=criterion(p_score,n_score,y)
		return loss 
Example #10
Source File: TransD.py    From NeuralTripleTranslation with Apache License 2.0 5 votes vote down vote up
def loss_func(self,p_score,n_score):
		criterion= nn.MarginRankingLoss(self.config.margin,False).cuda()
		y=Variable(torch.Tensor([-1])).cuda()
		loss=criterion(p_score,n_score,y)
		return loss 
Example #11
Source File: triplet.py    From PAST-ReID with MIT License 5 votes vote down vote up
def __init__(self, margin=0):
        super(OnlineTripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin) 
Example #12
Source File: RESCAL.py    From NeuralTripleTranslation with Apache License 2.0 5 votes vote down vote up
def loss_func(self,p_score,n_score):
		criterion= nn.MarginRankingLoss(self.config.margin,False).cuda()
		y=Variable(torch.Tensor([1])).cuda()
		loss=criterion(p_score,n_score,y)
		return loss 
Example #13
Source File: TransR.py    From NeuralTripleTranslation with Apache License 2.0 5 votes vote down vote up
def loss_func(self,p_score,n_score):
		criterion= nn.MarginRankingLoss(self.config.margin,False).cuda()
		y=Variable(torch.Tensor([-1])).cuda()
		loss=criterion(p_score,n_score,y)
		return loss 
Example #14
Source File: tri_clu_loss.py    From Exploit-Unknown-Gradually with MIT License 5 votes vote down vote up
def __init__(self, clusters, margin=0,):
        super(TripletClusteringLoss, self).__init__()
        assert isinstance(clusters, torch.autograd.Variable)
        self.clusters = clusters
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin)
        self.num_classes = clusters.size(0)
        self.num_features = clusters.size(1)
        self.dist = torch.pow(self.clusters, 2).sum(dim=1, keepdim=True) 
Example #15
Source File: RHINE.py    From OpenHINE with MIT License 5 votes vote down vote up
def loss_func(self, p_score, n_score):
        criterion = nn.MarginRankingLoss(self.config.margin, False).cuda()
        y = Variable(torch.Tensor([-1])).cuda()
        loss = criterion(p_score, n_score, y)
        return loss 
Example #16
Source File: triplet.py    From kpm_rw_person_reid with MIT License 5 votes vote down vote up
def __init__(self, margin=0):
        super(TripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin) 
Example #17
Source File: triplet_loss.py    From CVWC2019-Amur-Tiger-Re-ID with Apache License 2.0 5 votes vote down vote up
def __init__(self, margin=None):
        self.margin = margin
        if margin is not None:
            self.ranking_loss = nn.MarginRankingLoss(margin=margin)
        else:
            self.ranking_loss = nn.SoftMarginLoss() 
Example #18
Source File: triplet.py    From Exploit-Unknown-Gradually with MIT License 5 votes vote down vote up
def __init__(self, margin=0):
        super(TripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin) 
Example #19
Source File: loss_set.py    From Relation-Aware-Global-Attention-Networks with MIT License 5 votes vote down vote up
def __init__(self, margin=None, metric="euclidean"):
		self.margin = margin
		self.metric = metric
		if margin is not None:
			self.ranking_loss = nn.MarginRankingLoss(margin=margin)
		else:
			self.ranking_loss = nn.SoftMarginLoss() 
Example #20
Source File: losses.py    From vidreid_cosegmentation with Apache License 2.0 5 votes vote down vote up
def __init__(self, margin=0.3):
        super(TripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin) 
Example #21
Source File: loss.py    From ScenarioMeta with MIT License 5 votes vote down vote up
def __init__(self, margin=0.0):
        nn.Module.__init__(self)
        self.m = nn.MarginRankingLoss(margin=margin) 
Example #22
Source File: triplet.py    From Celeb-reID with MIT License 5 votes vote down vote up
def __init__(self, margin=0):
        super(TripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin) 
Example #23
Source File: hard_mine_triplet_loss.py    From deep-person-reid with MIT License 5 votes vote down vote up
def __init__(self, margin=0.3):
        super(TripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin) 
Example #24
Source File: losses.py    From AlignedReID with MIT License 5 votes vote down vote up
def __init__(self, margin=0.3, mutual_flag = False):
        super(TripletLossAlignedReID, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin)
        self.ranking_loss_local = nn.MarginRankingLoss(margin=margin)
        self.mutual = mutual_flag 
Example #25
Source File: losses.py    From AlignedReID with MIT License 5 votes vote down vote up
def __init__(self, margin=0.3, mutual_flag = False):
        super(TripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin)
        self.mutual = mutual_flag 
Example #26
Source File: losses.py    From ReXCam with MIT License 5 votes vote down vote up
def __init__(self, margin=0.3):
        super(TripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin) 
Example #27
Source File: triplet_loss.py    From reid_baseline_with_syncbn with MIT License 5 votes vote down vote up
def __init__(self, margin=None):
        self.margin = margin
        if margin is not None:
            self.ranking_loss = nn.MarginRankingLoss(margin=margin)
        else:
            self.ranking_loss = nn.SoftMarginLoss() 
Example #28
Source File: reid_loss.py    From ARN with MIT License 5 votes vote down vote up
def __init__(self, margin=None):
        self.margin = margin
        if margin is not None:
            self.ranking_loss = nn.MarginRankingLoss(margin=margin)
        else:
            self.ranking_loss = nn.SoftMarginLoss() 
Example #29
Source File: triplet.py    From open-reid with MIT License 5 votes vote down vote up
def __init__(self, margin=0):
        super(TripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin) 
Example #30
Source File: loss.py    From Cross-Modal-Re-ID-baseline with MIT License 5 votes vote down vote up
def __init__(self, batch_size, margin=0.3):
        super(OriTripletLoss, self).__init__()
        self.margin = margin
        self.ranking_loss = nn.MarginRankingLoss(margin=margin)