Java Code Examples for org.apache.mahout.math.Vector#get()

The following examples show how to use org.apache.mahout.math.Vector#get() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LaserMessageConsumer.java    From laser with Apache License 2.0 6 votes vote down vote up
public synchronized double knownOffset(Request value) throws IOException {

		if (alpha == null || beta == null || quadratic == null) {
			return 0;
		}
		Vector userFeature = value.getUserFeature();
		double offset = alpha.dot(userFeature);
		Vector itemFeature = value.getItemFeature();
		offset += beta.dot(itemFeature);

		for (int row = 0; row < quadratic.numRows(); row++) {
			offset += userFeature.get(row)
					* quadratic.viewRow(row).dot(itemFeature);
		}
		return offset;

	}
 
Example 2
Source File: RegressionTree.java    From pyramid with Apache License 2.0 6 votes vote down vote up
private Optional<Double> predictNoMissingValue(Vector vector){
    Node node = root;
    while(!node.isLeaf()){
        int featureIndex = node.getFeatureIndex();
        double featureValue = vector.get(featureIndex);
        if (Double.isNaN(featureValue)){
            return Optional.empty();
        }
        double threshold = node.getThreshold();
        Node child;
        if (featureValue<=threshold){
            child = node.getLeftChild();
        } else {
            child = node.getRightChild();
        }
        node = child;
    }
    return Optional.of(node.getValue());
}
 
Example 3
Source File: KMeansTest.java    From pyramid with Apache License 2.0 6 votes vote down vote up
private static void plot(Vector vector, int height, int width, String imageFile) throws Exception{

        BufferedImage image = new BufferedImage(width,height,BufferedImage.TYPE_INT_RGB);
//        Graphics2D g2d = image.createGraphics();
//        g2d.setBackground(Color.WHITE);
//
//
//        g2d.fillRect ( 0, 0, image.getWidth(), image.getHeight() );
//        g2d.dispose();
        for (int i=0;i<width;i++){
            for (int j=0;j<height;j++){
                int v = (int)(vector.get(i*width+j));
                int rgb = 65536 * v + 256 * v + v;
                image.setRGB(j,i,rgb);
//                image.setRGB(j,i,(int)(vector.get(i*width+j)/255*16777215));
            }
        }


        new File(imageFile).getParentFile().mkdirs();
        ImageIO.write(image,"png",new File(imageFile));
    }
 
Example 4
Source File: VectorCardIsoSetCalibrator.java    From pyramid with Apache License 2.0 5 votes vote down vote up
public double calibrate(Vector vector){
    double uncalibrated = vector.get(scoreIndex);
    int cardinality = (int)vector.get(cardIndex);
    //deal with unseen cardinality
    if (!calibrations.containsKey(cardinality)){
        return 0;
    }
    return calibrations.get(cardinality).predict(uncalibrated);
}
 
Example 5
Source File: KMeans.java    From pyramid with Apache License 2.0 5 votes vote down vote up
static double distance(Vector vector1, Vector vector2){
    double sum = 0;
    for (int i=0;i<vector1.size();i++){
        double diff = (vector1.get(i)-vector2.get(i));
        sum += diff*diff;
    }
    return Math.pow(sum,0.5);
}
 
Example 6
Source File: BRLRInspector.java    From pyramid with Apache License 2.0 5 votes vote down vote up
public static ClassScoreCalculation decisionProcess(CBM cbm, LabelTranslator labelTranslator, double prob,
                                                    Vector vector, int classIndex, int limit){
    LogisticRegression logisticRegression = (LogisticRegression)cbm.getBinaryClassifiers()[0][classIndex];
    ClassScoreCalculation classScoreCalculation = new ClassScoreCalculation(classIndex,labelTranslator.toExtLabel(classIndex),
            logisticRegression.predictClassScore(vector,1));
    classScoreCalculation.setClassProbability(prob);

    List<LinearRule> linearRules = new ArrayList<>();
    Rule bias = new ConstantRule(logisticRegression.getWeights().getBiasForClass(1));
    classScoreCalculation.addRule(bias);
    //todo speed up using sparsity
    for (int j=0;j<logisticRegression.getNumFeatures();j++){
        Feature feature = logisticRegression.getFeatureList().get(j);
        double weight = logisticRegression.getWeights().getWeightsWithoutBiasForClass(1).get(j);
        double featureValue = vector.get(j);
        double score = weight*featureValue;
        LinearRule rule = new LinearRule();
        rule.setFeature(feature);
        rule.setFeatureValue(featureValue);
        rule.setScore(score);
        rule.setWeight(weight);
        linearRules.add(rule);
    }

    Comparator<LinearRule> comparator = Comparator.comparing(decision -> Math.abs(decision.getScore()));
    List<LinearRule> sorted = linearRules.stream().sorted(comparator.reversed()).limit(limit).collect(Collectors.toList());

    for (LinearRule linearRule : sorted){
        classScoreCalculation.addRule(linearRule);
    }

    return classScoreCalculation;
}
 
Example 7
Source File: MLLogisticRegressionInspector.java    From pyramid with Apache License 2.0 5 votes vote down vote up
public static ClassScoreCalculation decisionProcess(MLLogisticRegression logisticRegression,
                                                    LabelTranslator labelTranslator, Vector vector, int classIndex, int limit){
    ClassScoreCalculation classScoreCalculation = new ClassScoreCalculation(classIndex,labelTranslator.toExtLabel(classIndex),
            logisticRegression.predictClassScore(vector,classIndex));
    List<LinearRule> linearRules = new ArrayList<>();
    Rule bias = new ConstantRule(logisticRegression.getWeights().getBiasForClass(classIndex));
    classScoreCalculation.addRule(bias);
    for (int j=0;j<logisticRegression.getNumFeatures();j++){
        Feature feature = logisticRegression.getFeatureList().get(j);
        double weight = logisticRegression.getWeights().getWeightsWithoutBiasForClass(classIndex).get(j);
        double featureValue = vector.get(j);
        double score = weight*featureValue;
        LinearRule rule = new LinearRule();
        rule.setFeature(feature);
        rule.setFeatureValue(featureValue);
        rule.setScore(score);
        rule.setWeight(weight);
        linearRules.add(rule);
    }

    Comparator<LinearRule> comparator = Comparator.comparing(decision -> Math.abs(decision.getScore()));
    List<LinearRule> sorted = linearRules.stream().sorted(comparator.reversed()).limit(limit).collect(Collectors.toList());

    for (LinearRule linearRule : sorted){
        classScoreCalculation.addRule(linearRule);
    }

    return classScoreCalculation;
}
 
Example 8
Source File: RidgeBinaryLogisticLoss.java    From pyramid with Apache License 2.0 5 votes vote down vote up
/**
 * dot product of a row vector (adding the constant bias feature ) and another vector
 */
private double rowDot(int rowIndex, Vector vector){
    double product = 0;
    // add bias
    product += vector.get(0);
    Vector part = vector.viewPart(1,vector.size()-1);
    product += dataSet.getRow(rowIndex).dot(part);
    return product;
}
 
Example 9
Source File: LogisticRegressionInspector.java    From pyramid with Apache License 2.0 5 votes vote down vote up
public static ClassScoreCalculation decisionProcess(LogisticRegression logisticRegression,
                                                    LabelTranslator labelTranslator, Vector vector, int classIndex, int limit){
    ClassScoreCalculation classScoreCalculation = new ClassScoreCalculation(classIndex,labelTranslator.toExtLabel(classIndex),
            logisticRegression.predictClassScore(vector,classIndex));
    List<LinearRule> linearRules = new ArrayList<>();
    Rule bias = new ConstantRule(logisticRegression.getWeights().getBiasForClass(classIndex));
    classScoreCalculation.addRule(bias);
    for (int j=0;j<logisticRegression.getNumFeatures();j++){
        Feature feature = logisticRegression.getFeatureList().get(j);
        double weight = logisticRegression.getWeights().getWeightsWithoutBiasForClass(classIndex).get(j);
        double featureValue = vector.get(j);
        double score = weight*featureValue;
        LinearRule rule = new LinearRule();
        rule.setFeature(feature);
        rule.setFeatureValue(featureValue);
        rule.setScore(score);
        rule.setWeight(weight);
        linearRules.add(rule);
    }

    Comparator<LinearRule> comparator = Comparator.comparing(decision -> Math.abs(decision.getScore()));
    List<LinearRule> sorted = linearRules.stream().sorted(comparator.reversed()).limit(limit).collect(Collectors.toList());

    for (LinearRule linearRule : sorted){
        classScoreCalculation.addRule(linearRule);
    }

    return classScoreCalculation;
}
 
Example 10
Source File: TreeRule.java    From pyramid with Apache License 2.0 5 votes vote down vote up
public void add (RegressionTree tree, Node node, Vector vector){
    List<Feature> featureList = tree.getFeatureList().getAll();
    if (node.isLeaf()){
        this.score = node.getValue();
    } else {
        int featureIndex = node.getFeatureIndex();
        Feature feature = featureList.get(node.getFeatureIndex());
        double threshold = node.getThreshold();
        double featureValue = vector.get(featureIndex);
        if (Double.isNaN(featureValue)){
            // todo this is a poor man's solution
            featureValue = -9999;
        }
        boolean direction = featureValue<=threshold;
        this.checks.featureIndices.add(featureIndex);
        this.checks.features.add(feature);
        this.checks.thresholds.add(threshold);
        this.checks.directions.add(direction);
        this.checks.values.add(featureValue);
        Node child;
        if (direction){
            child = node.getLeftChild();
        } else {
            child = node.getRightChild();
        }
        add(tree,child,vector);
    }
}
 
Example 11
Source File: ALSWRFactorizer.java    From elasticsearch-taste with Apache License 2.0 4 votes vote down vote up
protected void setFeatureColumn(final double[][] matrix,
        final int idIndex, final Vector vector) {
    for (int feature = 0; feature < numFeatures; feature++) {
        matrix[idIndex][feature] = vector.get(feature);
    }
}
 
Example 12
Source File: VectorIdentityCalibrator.java    From pyramid with Apache License 2.0 4 votes vote down vote up
@Override
public double calibrate(Vector vector) {
    return vector.get(scoreIndex);
}
 
Example 13
Source File: IdentityMapping.java    From pyramid with Apache License 2.0 4 votes vote down vote up
@Override
public double predict(Vector vector) {
    return vector.get(0);
}