Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#gt()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#gt() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: EvaluationMultiLabel.java    From scava with Eclipse Public License 2.0 5 votes vote down vote up
@Override
public void eval(INDArray labels, INDArray networkPredictions)
{
	// Length of real labels must be same as length of predicted labels
	if (!Arrays.equals(labels.shape(),networkPredictions.shape())) {
           throw new IllegalArgumentException("Unable to evaluate. Predictions and labels arrays are not same shape." +
                   " Predictions shape: " + Arrays.toString(networkPredictions.shape()) + ", Labels shape: " + Arrays.toString(labels.shape()));
       }
       INDArray guess;
       INDArray realOutcome;
       //The nExamples are given by mini batch, so we need to keep the total length
       nExamples += networkPredictions.rows();
       List<Integer> actual;
       List<Integer> predicted;
       for (int i = 0; i < networkPredictions.rows(); i++)
       {
       	//get the first row to analyze
       	guess = networkPredictions.getRow(i);
       	realOutcome=labels.getRow(i);
       	guess=guess.gt(activationThreshold);
       	actual=new ArrayList<Integer>();
       	predicted=new ArrayList<Integer>();
       	for(int j = 0; j < nLabels; j++)
       	{
       		actual.add((int) realOutcome.getDouble(j));
               predicted.add((int) guess.getDouble(j));
       	}
       	actualList.add(actual);
       	predictedList.add(predicted);
       }
}
 
Example 2
Source File: Vasttext.java    From scava with Eclipse Public License 2.0 5 votes vote down vote up
private List<Object> predictLabels(DataIteratorConstructor vasttextMemoryDataContrustor)
{
	INDArray predictions = predict(vasttextMemoryDataContrustor);
	List<Object> predictionsLabels = new ArrayList<Object>();
	if(multiLabel)
	{
		predictions=predictions.gt(multiLabelActivation);
		List<String> activatedLabels;
		for(int i=0; i<predictions.rows(); i++)
		{
			//This is the worst case scenario in which all the labels are present
			activatedLabels = new ArrayList<String>(labelsSize);
			for(int j=0; j<labelsSize; j++)
			{
				if(predictions.getDouble(i, j)==1.0)
					activatedLabels.add(labels.get(j));
			}
			predictionsLabels.add(activatedLabels);
		}
	}
	else
	{
		INDArray predictionIndexes = Nd4j.argMax(predictions, 1);
		for(int i=0; i<predictionIndexes.length(); i++)
		{
			predictionsLabels.add(labels.get(predictionIndexes.getInt(i)));
		}
	}
	return predictionsLabels;
}
 
Example 3
Source File: TransformsTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testGT1() {
    INDArray x = Nd4j.create(new double[] {0, 1, 2, 4});
    INDArray exp = Nd4j.create(new double[] {0, 0, 1, 1});

    INDArray z = x.gt(1);

    assertEquals(exp, z);
}
 
Example 4
Source File: TransformsTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testGT1() {
    INDArray x = Nd4j.create(new double[] {0, 1, 2, 4});
    INDArray exp = Nd4j.create(new boolean[] {false, false, true, true});

    INDArray z = x.gt(1);

    assertEquals(exp, z);
}
 
Example 5
Source File: SameDiffTests.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testPairwiseBooleanTransforms() {
    /*
    eq, neq, gt, lt, gte, lte, or, and, xor
     */
    //Test transforms (pairwise)
    Nd4j.getRandom().setSeed(12345);

    for (int i = 0; i < 11; i++) {
        SameDiff sd = SameDiff.create();

        int nOut = 4;
        int minibatch = 5;

        INDArray ia = Nd4j.randn(minibatch, nOut);
        INDArray ib = Nd4j.randn(minibatch, nOut);

        SDVariable in1 = sd.var("in1", ia);
        SDVariable in2 = sd.var("in2", ib);


        SDVariable t;
        INDArray expOut;
        switch (i) {
            case 0:
                t = sd.eq(in1, in2);
                expOut = ia.eq(ib);
                break;
            case 1:
                t = sd.neq(in1, in2);
                expOut = ia.neq(ib);
                break;
            case 2:
                t = sd.gt(in1, in2);
                expOut = ia.gt(ib);
                break;
            case 3:
                t = sd.lt(in1, in2);
                expOut = ia.lt(ib);
                break;
            case 4:
                t = sd.gte(in1, in2);
                expOut = ia.dup();
                Nd4j.getExecutioner().exec(new GreaterThanOrEqual(new INDArray[]{ia, ib}, new INDArray[]{expOut}));
                break;
            case 5:
                t = sd.lte(in1, in2);
                expOut = ia.dup();
                Nd4j.getExecutioner().exec(new LessThanOrEqual(new INDArray[]{ia, ib}, new INDArray[]{expOut}));
                break;
            case 6:
                ia = Nd4j.getExecutioner().exec(new BernoulliDistribution(ia, 0.5));
                ib = Nd4j.getExecutioner().exec(new BernoulliDistribution(ib, 0.5));
                t = sd.or(in1, in2);
                expOut = Transforms.or(ia, ib);
                break;
            case 7:
                t = sd.max(in1, in2);
                expOut = Nd4j.getExecutioner().execAndReturn(new OldMax(ia, ib, ia.dup(), ia.length()));
                break;
            case 8:
                t = sd.min(in1, in2);
                expOut = Nd4j.getExecutioner().execAndReturn(new OldMin(ia, ib, ia.dup(), ia.length()));
                break;
            case 9:
                ia = Nd4j.getExecutioner().exec(new BernoulliDistribution(ia, 0.5));
                ib = Nd4j.getExecutioner().exec(new BernoulliDistribution(ib, 0.5));
                t = sd.and(in1, in2);
                expOut = Transforms.and(ia, ib);
                break;
            case 10:
                ia = Nd4j.getExecutioner().exec(new BernoulliDistribution(ia, 0.5));
                ib = Nd4j.getExecutioner().exec(new BernoulliDistribution(ib, 0.5));
                t = sd.xor(in1, in2);
                expOut = Transforms.xor(ia, ib);
                break;
            default:
                throw new RuntimeException();
        }

        log.info("Executing: " + i);
        INDArray out = sd.execAndEndResult();

        assertEquals(expOut, out);
    }
}
 
Example 6
Source File: EvaluationBinaryTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testEvaluationBinary() {
    //Compare EvaluationBinary to Evaluation class
    DataType dtypeBefore = Nd4j.defaultFloatingPointType();
    EvaluationBinary first = null;
    String sFirst = null;
    try {
        for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.INT}) {
            Nd4j.setDefaultDataTypes(globalDtype, globalDtype.isFPType() ? globalDtype : DataType.DOUBLE);
            for (DataType lpDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {

                Nd4j.getRandom().setSeed(12345);

                int nExamples = 50;
                int nOut = 4;
                long[] shape = {nExamples, nOut};

                INDArray labels = Nd4j.getExecutioner().exec(new BernoulliDistribution(Nd4j.createUninitialized(lpDtype, shape), 0.5));

                INDArray predicted = Nd4j.rand(lpDtype, shape);
                INDArray binaryPredicted = predicted.gt(0.5);

                EvaluationBinary eb = new EvaluationBinary();
                eb.eval(labels, predicted);

                //System.out.println(eb.stats());

                double eps = 1e-6;
                for (int i = 0; i < nOut; i++) {
                    INDArray lCol = labels.getColumn(i,true);
                    INDArray pCol = predicted.getColumn(i,true);
                    INDArray bpCol = binaryPredicted.getColumn(i,true);

                    int countCorrect = 0;
                    int tpCount = 0;
                    int tnCount = 0;
                    for (int j = 0; j < lCol.length(); j++) {
                        if (lCol.getDouble(j) == bpCol.getDouble(j)) {
                            countCorrect++;
                            if (lCol.getDouble(j) == 1) {
                                tpCount++;
                            } else {
                                tnCount++;
                            }
                        }
                    }
                    double acc = countCorrect / (double) lCol.length();

                    Evaluation e = new Evaluation();
                    e.eval(lCol, pCol);

                    assertEquals(acc, eb.accuracy(i), eps);
                    assertEquals(e.accuracy(), eb.scoreForMetric(ACCURACY, i), eps);
                    assertEquals(e.precision(1), eb.scoreForMetric(PRECISION, i), eps);
                    assertEquals(e.recall(1), eb.scoreForMetric(RECALL, i), eps);
                    assertEquals(e.f1(1), eb.scoreForMetric(F1, i), eps);
                    assertEquals(e.falseAlarmRate(), eb.scoreForMetric(FAR, i), eps);
                    assertEquals(e.falsePositiveRate(1), eb.falsePositiveRate(i), eps);


                    assertEquals(tpCount, eb.truePositives(i));
                    assertEquals(tnCount, eb.trueNegatives(i));

                    assertEquals((int) e.truePositives().get(1), eb.truePositives(i));
                    assertEquals((int) e.trueNegatives().get(1), eb.trueNegatives(i));
                    assertEquals((int) e.falsePositives().get(1), eb.falsePositives(i));
                    assertEquals((int) e.falseNegatives().get(1), eb.falseNegatives(i));

                    assertEquals(nExamples, eb.totalCount(i));

                    String s = eb.stats();
                    if(first == null) {
                        first = eb;
                        sFirst = s;
                    } else {
                        assertEquals(first, eb);
                        assertEquals(sFirst, s);
                    }
                }
            }
        }
    } finally {
        Nd4j.setDefaultDataTypes(dtypeBefore, dtypeBefore);
    }
}
 
Example 7
Source File: SameDiffTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testPairwiseBooleanTransforms() {
    /*
    eq, neq, gt, lt, gte, lte, or, and, xor
     */
    //Test transforms (pairwise)
    Nd4j.getRandom().setSeed(12345);

    for (int i = 0; i < 11; i++) {
        SameDiff sd = SameDiff.create();

        int nOut = 4;
        int minibatch = 5;

        INDArray ia = Nd4j.randn(minibatch, nOut);
        INDArray ib = Nd4j.randn(minibatch, nOut);

        SDVariable in1 = sd.var("in1", ia);
        SDVariable in2 = sd.var("in2", ib);

        SDVariable t;
        INDArray expOut;
        switch (i) {
            case 0:
                t = sd.eq(in1, in2);
                expOut = ia.eq(ib);
                break;
            case 1:
                t = sd.neq(in1, in2);
                expOut = ia.neq(ib);
                break;
            case 2:
                t = sd.gt(in1, in2);
                expOut = ia.gt(ib);
                break;
            case 3:
                t = sd.lt(in1, in2);
                expOut = ia.lt(ib);
                break;
            case 4:
                t = sd.gte(in1, in2);
                expOut = Nd4j.create(DataType.BOOL, ia.shape());
                Nd4j.exec(new GreaterThanOrEqual(new INDArray[]{ia, ib}, new INDArray[]{expOut}));
                break;
            case 5:
                t = sd.lte(in1, in2);
                expOut = Nd4j.create(DataType.BOOL, ia.shape());
                Nd4j.exec(new LessThanOrEqual(new INDArray[]{ia, ib}, new INDArray[]{expOut}));
                break;
            case 6:
                ia = Nd4j.exec(new BernoulliDistribution(ia, 0.5));
                ib = Nd4j.exec(new BernoulliDistribution(ib, 0.5));
                t = sd.math().or(in1.castTo(DataType.BOOL), in2.castTo(DataType.BOOL));
                expOut = Transforms.or(ia, ib);
                break;
            case 7:
                t = sd.max(in1, in2);
                expOut = Nd4j.exec(new Max(ia, ib, ia.dup()))[0];
                break;
            case 8:
                t = sd.min(in1, in2);
                expOut = Nd4j.exec(new Min(ia, ib, ia.dup()))[0];
                break;
            case 9:
                ia = Nd4j.exec(new BernoulliDistribution(ia, 0.5));
                ib = Nd4j.exec(new BernoulliDistribution(ib, 0.5));
                t = sd.math().and(in1.castTo(DataType.BOOL), in2.castTo(DataType.BOOL));
                expOut = Transforms.and(ia, ib);
                break;
            case 10:
                ia = Nd4j.exec(new BernoulliDistribution(ia, 0.5));
                ib = Nd4j.exec(new BernoulliDistribution(ib, 0.5));
                t = sd.math().xor(in1.castTo(DataType.BOOL), in2.castTo(DataType.BOOL));
                expOut = Transforms.xor(ia, ib);
                break;
            default:
                throw new RuntimeException();
        }

        log.info("Executing: " + i);
        INDArray out = t.eval();

        assertEquals(expOut, out);
    }
}
 
Example 8
Source File: EvaluationBinary.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public void eval(INDArray labelsArr, INDArray predictionsArr, INDArray maskArr) {

    //Check for NaNs in predictions - without this, evaulation could silently be intepreted as class 0 prediction due to argmax
    long count = Nd4j.getExecutioner().execAndReturn(new MatchCondition(predictionsArr, Conditions.isNan())).getFinalResult().longValue();
    Preconditions.checkState(count == 0, "Cannot perform evaluation with NaNs present in predictions:" +
            " %s NaNs present in predictions INDArray", count);

    if (countTruePositive != null && countTruePositive.length != labelsArr.size(axis)) {
        throw new IllegalStateException("Labels array does not match stored state size. Expected labels array with "
                        + "size " + countTruePositive.length + ", got labels array with size " + labelsArr.size(axis) + " for axis " + axis);
    }

    Triple<INDArray,INDArray, INDArray> p = BaseEvaluation.reshapeAndExtractNotMasked(labelsArr, predictionsArr, maskArr, axis);
    INDArray labels = p.getFirst();
    INDArray predictions = p.getSecond();
    INDArray maskArray = p.getThird();

    if(labels.dataType() != predictions.dataType())
        labels = labels.castTo(predictions.dataType());

    if(decisionThreshold != null && decisionThreshold.dataType() != predictions.dataType())
        decisionThreshold = decisionThreshold.castTo(predictions.dataType());

    //First: binarize the network prediction probabilities, threshold 0.5 unless otherwise specified
    //This gives us 3 binary arrays: labels, predictions, masks
    INDArray classPredictions;
    if (decisionThreshold != null) {
        classPredictions = Nd4j.createUninitialized(DataType.BOOL, predictions.shape());
        Nd4j.getExecutioner()
                        .exec(new BroadcastGreaterThan(predictions, decisionThreshold, classPredictions, 1));
    } else {
        classPredictions = predictions.gt(0.5);
    }
    classPredictions = classPredictions.castTo(predictions.dataType());

    INDArray notLabels = labels.rsub(1.0);  //If labels are 0 or 1, then rsub(1) swaps
    INDArray notClassPredictions = classPredictions.rsub(1.0);

    INDArray truePositives = classPredictions.mul(labels); //1s where predictions are 1, and labels are 1. 0s elsewhere
    INDArray trueNegatives = notClassPredictions.mul(notLabels); //1s where predictions are 0, and labels are 0. 0s elsewhere
    INDArray falsePositives = classPredictions.mul(notLabels); //1s where predictions are 1, labels are 0
    INDArray falseNegatives = notClassPredictions.mul(labels); //1s where predictions are 0, labels are 1

    if (maskArray != null) {
        //By multiplying by mask, we keep only those 1s that are actually present
        maskArray = maskArray.castTo(truePositives.dataType());
        truePositives.muli(maskArray);
        trueNegatives.muli(maskArray);
        falsePositives.muli(maskArray);
        falseNegatives.muli(maskArray);
    }

    int[] tpCount = truePositives.sum(0).data().asInt();
    int[] tnCount = trueNegatives.sum(0).data().asInt();
    int[] fpCount = falsePositives.sum(0).data().asInt();
    int[] fnCount = falseNegatives.sum(0).data().asInt();

    if (countTruePositive == null) {
        int l = tpCount.length;
        countTruePositive = new int[l];
        countFalsePositive = new int[l];
        countTrueNegative = new int[l];
        countFalseNegative = new int[l];
    }

    addInPlace(countTruePositive, tpCount);
    addInPlace(countFalsePositive, fpCount);
    addInPlace(countTrueNegative, tnCount);
    addInPlace(countFalseNegative, fnCount);

    if (rocBinary != null) {
        rocBinary.eval(labels, predictions, maskArray);
    }
}