Java Code Examples for org.apache.mahout.math.Vector#plus()

The following examples show how to use org.apache.mahout.math.Vector#plus() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: 1000021_CDbwEvaluator_t.java    From coming with MIT License 6 votes vote down vote up
private void setStDev(int cI) {
   List<VectorWritable> repPts = representativePoints.get(cI);
   //if (repPts == null) {
   //  System.out.println();
   //}
   int s0 = 0;
   Vector s1 = null;
   Vector s2 = null;
   for (VectorWritable vw : repPts) {
     s0++;
     Vector v = vw.get();
     s1 = s1 == null ? v.clone() : s1.plus(v);
     s2 = s2 == null ? v.times(v) : s2.plus(v.times(v));
   }
  if (s0 > 1) {
   Vector std = s2.times(s0).minus(s1.times(s1)).assign(new SquareRootFunction()).divide(s0);
   double d = std.zSum() / std.size();
   //System.out.println("stDev[" + cI + "]=" + d);
   stDevs.put(cI, d);
 }
}
 
Example 2
Source File: 1000021_CDbwEvaluator_s.java    From coming with MIT License 6 votes vote down vote up
private void setStDev(int cI) {
  List<VectorWritable> repPts = representativePoints.get(cI);
  //if (repPts == null) {
  //  System.out.println();
  //}
  int s0 = 0;
  Vector s1 = null;
  Vector s2 = null;
  for (VectorWritable vw : repPts) {
    s0++;
    Vector v = vw.get();
    s1 = s1 == null ? v.clone() : s1.plus(v);
    s2 = s2 == null ? v.times(v) : s2.plus(v.times(v));
  }
  Vector std = s2.times(s0).minus(s1.times(s1)).assign(new SquareRootFunction()).divide(s0);
  double d = std.zSum() / std.size();
  //System.out.println("stDev[" + cI + "]=" + d);
  stDevs.put(cI, d);
}
 
Example 3
Source File: CMLCRFElasticNet.java    From pyramid with Apache License 2.0 6 votes vote down vote up
/**
 * a special back track line search for sufficient decrease with elasticnet penalized model
 * reference:
 * An improved glmnet for l1-regularized logistic regression.
 * @param searchDirection
 * @return
 */
private void lineSearch(Vector searchDirection, Vector gradient){
    Vector localSearchDir;
    double initialStepLength = 1;
    double shrinkage = 0.5;
    double c = 1e-4;
    double stepLength = initialStepLength;
    Vector start = cmlcrf.getWeights().getAllWeights();
    double penalty = getPenalty();
    double value = getValue();
    double product = gradient.dot(searchDirection);

    localSearchDir = searchDirection;

    while(true){
        Vector step = localSearchDir.times(stepLength);
        Vector target = start.plus(step);
        cmlcrf.getWeights().setWeightVector(target);
        double targetPenalty = getPenalty();
        double targetValue = getValue();
        if (targetValue <= value + c*stepLength*(product + targetPenalty - penalty)){
            break;
        }
        stepLength *= shrinkage;
    }
}
 
Example 4
Source File: CBMInspector.java    From pyramid with Apache License 2.0 5 votes vote down vote up
public static Weights getMean(CBM bmm, int label){
    int numClusters = bmm.getNumComponents();
    int length = ((LogisticRegression)bmm.getBinaryClassifiers()[0][0]).getWeights().getAllWeights().size();
    int numFeatures = ((LogisticRegression)bmm.getBinaryClassifiers()[0][0]).getNumFeatures();
    Vector mean = new DenseVector(length);
    for (int k=0;k<numClusters;k++){
        mean = mean.plus(((LogisticRegression)bmm.getBinaryClassifiers()[k][label]).getWeights().getAllWeights());
    }

    mean = mean.divide(numClusters);
    return new Weights(2,numFeatures,mean);
}
 
Example 5
Source File: CMLCRFElasticNet.java    From pyramid with Apache License 2.0 5 votes vote down vote up
public void iterate() {
//        System.out.println("weights: " + cmlcrf.getWeights().getAllWeights());
        // O(NdL)
//        System.out.println(Arrays.toString(cmlcrf.getCombinationLabelPartScores()));
        updateClassScoreMatrix();
        cmlcrf.updateCombLabelPartScores();
        updateAssignmentScoreMatrix();
        updateAssignmentProbMatrix();
        updateCombProbSums();
        updatePredictedCounts();
        updateClassProbMatrix();
        // update for each support label set
        Vector accumulateWeights = new SequentialAccessSparseVector(numParameters);
        Vector oldWeights = cmlcrf.getWeights().deepCopy().getAllWeights();
        for (int l=0; l<numSupport; l++) {
//            System.out.println("label: " + supportedCombinations.get(l));
            DataSet newData = expandData(l);
            iterateForOneComb(newData, l);
            accumulateWeights = accumulateWeights.plus(cmlcrf.getWeights().getAllWeights());
            cmlcrf.getWeights().setWeightVector(oldWeights);
        }
        // lineSearch
        if (true) {
            Vector searchDirection = accumulateWeights;
            Vector gradient = this.predictedCounts.minus(empiricalCounts).divide(numData);
            lineSearch(searchDirection, gradient);
        }

        this.terminator.add(getValue());
    }
 
Example 6
Source File: SupervisedEmbeddingLoss.java    From pyramid with Apache License 2.0 5 votes vote down vote up
public Vector getGradient() {
    int numData = this.updatedEmbeddingMatrix.getNumDataPoints();
    int numFeatures = this.updatedEmbeddingMatrix.getNumFeatures();
    int vecSize = numData * numFeatures;
    Vector finalGradient = new DenseVector(vecSize);

    for (int i = 0; i < numData; i++) {
        Vector gradient = new DenseVector(numFeatures);
        Vector q_i = this.updatedEmbeddingMatrix.getRow(i);
        Vector q_i_orig = this.embeddingMatrix.getRow(i);
        gradient = gradient.plus(q_i.minus(q_i_orig).times(2.0 * this.alpha));

        for (int j = 0; j < numData; j++) {
            Vector q_j = this.updatedEmbeddingMatrix.getRow(j);
            double pi_x = this.projMatrix.getColumn(0).dot(q_i);
            double pi_y = this.projMatrix.getColumn(1).dot(q_i);
            double pj_x = this.projMatrix.getColumn(0).dot(q_j);
            double pj_y = this.projMatrix.getColumn(1).dot(q_j);
            double p_sq = (pi_x - pj_x) * (pi_x - pj_x) + (pi_y - pj_y) * (pi_y - pj_y);
            double d_sq = this.distMatrix.getRow(i).get(j) * this.distMatrix.getRow(i).get(j);
            Vector p_dist_vec = new DenseVector(2);
            p_dist_vec.set(0, pi_x - pj_x);
            p_dist_vec.set(1, pi_y - pj_y);
            Vector tmp = new DenseVector(this.projMatrix.getNumDataPoints());
            for (int k = 0; k < this.projMatrix.getNumDataPoints(); k++) {
                tmp.set(k, this.projMatrix.getRow(k).dot(p_dist_vec));
            }
            gradient = gradient.plus(tmp.times(4.0 * this.beta * (p_sq - d_sq)));
        }

        for (int j = 0; j < numFeatures; j++) {
            finalGradient.set(i * numFeatures + j, gradient.get(j));
        }
    }
    return finalGradient;
}
 
Example 7
Source File: ElasticNetLogisticTrainer.java    From pyramid with Apache License 2.0 5 votes vote down vote up
/**
 * a special back track line search for sufficient decrease with elasticnet penalized model
 * reference:
 * An improved glmnet for l1-regularized logistic regression.
 * @param searchDirection
 * @return
 */
private void lineSearch(Vector searchDirection, Vector gradient){
    Vector localSearchDir;
    double initialStepLength = 1;
    double shrinkage = 0.5;
    double c = 1e-4;
    double stepLength = initialStepLength;
    Vector start = logisticRegression.getWeights().getAllWeights();
    double penalty = penalty();
    double value = loss(penalty);
    if (logger.isDebugEnabled()){
        logger.debug("start line search");
        logger.debug("initial loss = "+loss());
    }
    double product = gradient.dot(searchDirection);

    localSearchDir = searchDirection;

    while(true){
        Vector step = localSearchDir.times(stepLength);
        Vector target = start.plus(step);
        logisticRegression.getWeights().setWeightVector(target);
        double targetPenalty = penalty();
        double targetValue = loss(targetPenalty);
        if (targetValue <= value + c*stepLength*(product + targetPenalty - penalty)){
            if (logger.isDebugEnabled()){
                logger.debug("step size = "+stepLength);
                logger.debug("final loss = "+targetValue);
                logger.debug("line search done");
            }
            break;
        }
        stepLength *= shrinkage;
    }
}
 
Example 8
Source File: LogisticLoss.java    From pyramid with Apache License 2.0 5 votes vote down vote up
private Vector penaltyGradient(){
    Vector weightsVector = this.logisticRegression.getWeights().getAllWeights();
    Vector penalty = new DenseVector(weightsVector.size());

    penalty = penalty.plus(weightsVector.divide(priorGaussianVariance));

    for (int j:logisticRegression.getWeights().getAllBiasPositions()){
        penalty.set(j,0);
    }
    return penalty;
}
 
Example 9
Source File: Step5.java    From recsys-offline with Apache License 2.0 5 votes vote down vote up
public void reduce(VarLongWritable key, Iterable<VectorWritable> values,Context context) throws IOException, InterruptedException{  
    Vector partial=null;  
    for(VectorWritable v:values){  
        partial=partial==null?v.get():partial.plus(v.get());  
    }  
    context.write(key, new VectorWritable(partial));  
    System.err.println("userid:"+key.toString()+",vecotr:"+partial);//   here also should be the same as my paper's result  
}
 
Example 10
Source File: Step5.java    From recsys-offline with Apache License 2.0 5 votes vote down vote up
public void reduce(VarLongWritable key, Iterable<VectorWritable> values,Context context) throws IOException, InterruptedException{  
  
    int userID=(int)key.get();  
    Vector rev=null;  
    for(VectorWritable vec:values){  
        rev=rev==null? vec.get():rev.plus(vec.get());  
    }  
    Queue<RecommendedItem>topItems=new PriorityQueue<RecommendedItem>( recommendationsPerUser+1,  Collections.reverseOrder(ByValueRecommendedItemComparator.getInstance())   );  
    Iterator<Vector.Element>recommendationVectorIterator=  rev.nonZeroes().iterator();  
    while(recommendationVectorIterator.hasNext()){  
        Vector.Element e=recommendationVectorIterator.next();  
        int index=e.index();  
        System.out.println("Vecotr.element.indxe:"+index);  //  test here  find the index is item id or not  ** test result : index is item  
        if(!hasItem(userID,String.valueOf(index))){  
            float value=(float) e.get();  
            if(topItems.size()<recommendationsPerUser){  
                //  here only set index  
                topItems.add(new GenericRecommendedItem(index,value));  
            }else if(value>topItems.peek().getValue()){  
                topItems.add(new GenericRecommendedItem(index,value));  
                topItems.poll();  
            }  
        }  
    }  
    List<RecommendedItem>recom=new ArrayList<RecommendedItem>(topItems.size());  
    recom.addAll(topItems);  
    Collections.sort(recom,ByValueRecommendedItemComparator.getInstance());  
    context.write(key, new RecommendedItemsWritable(recom));          
}