Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#var()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#var() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Nd4jTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testVar() {
    INDArray data = Nd4j.create(new double[] {4., 4., 4., 4., 8., 8., 8., 8., 4., 4., 4., 4., 8., 8., 8., 8., 4.,
                    4., 4., 4., 8., 8., 8., 8., 4., 4., 4., 4., 8., 8., 8., 8, 2., 2., 2., 2., 4., 4., 4., 4., 2.,
                    2., 2., 2., 4., 4., 4., 4., 2., 2., 2., 2., 4., 4., 4., 4., 2., 2., 2., 2., 4., 4., 4., 4.},
                    new int[] {2, 2, 4, 4});

    INDArray actualResult = data.var(false, 0);
    INDArray expectedResult = Nd4j.create(new double[] {1., 1., 1., 1., 4., 4., 4., 4., 1., 1., 1., 1., 4., 4., 4.,
                    4., 1., 1., 1., 1., 4., 4., 4., 4., 1., 1., 1., 1., 4., 4., 4., 4.}, new int[] {2, 4, 4});
    assertEquals(getFailureMessage(), expectedResult, actualResult);
}
 
Example 2
Source File: ShapeTestsC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testColumnVariance() {
    INDArray twoByThree = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2);
    INDArray columnVar = twoByThree.var(true, 0);
    INDArray assertion = Nd4j.create(new double[] {2, 2});
    assertEquals(assertion, columnVar);

}
 
Example 3
Source File: OpExecutionerTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testVariance() {
    val f = new double[] {0.9296161, 0.31637555, 0.1839188};
    INDArray arr = Nd4j.create(f, new int[] {1, 3}, ordering());
    double var = arr.varNumber().doubleValue();

    INDArray var1 = arr.var(1);
    double var2 = var1.getDouble(0);
    assertEquals(var, var2, 1e-3);

    val variance = new org.apache.commons.math3.stat.descriptive.moment.Variance(true);
    double exp = variance.evaluate(arr.toDoubleVector());
    assertEquals(exp, var, 1e-7f);
}
 
Example 4
Source File: OpExecutionerTestsC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testVariance() {

    INDArray arr = Nd4j.create(new float[] {0.9296161f, 0.31637555f, 0.1839188f}, new int[] {1, 3}, ordering());
    double var = arr.varNumber().doubleValue();
    INDArray temp = arr.var(1);
    double var2 = arr.var(1).getDouble(0);
    assertEquals(var, var2, 1e-1);

    double exp = 0.15827888250350952;
    assertEquals(exp, var, 1e-7f);
}
 
Example 5
Source File: OpExecutionerTestsC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testColumnVar() {
    INDArray twoByThree = Nd4j.linspace(1, 600, 600, DataType.DOUBLE).reshape(150, 4);
    INDArray columnStd = twoByThree.var(0);
    INDArray assertion = Nd4j.create(new double[] {30200f, 30200f, 30200f, 30200f});
    assertEquals(assertion, columnStd);
}
 
Example 6
Source File: Nd4jTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testVar() {
    INDArray data = Nd4j.create(new double[] {4., 4., 4., 4., 8., 8., 8., 8., 4., 4., 4., 4., 8., 8., 8., 8., 4.,
                    4., 4., 4., 8., 8., 8., 8., 4., 4., 4., 4., 8., 8., 8., 8, 2., 2., 2., 2., 4., 4., 4., 4., 2.,
                    2., 2., 2., 4., 4., 4., 4., 2., 2., 2., 2., 4., 4., 4., 4., 2., 2., 2., 2., 4., 4., 4., 4.},
            new long[] {2, 2, 4, 4});

    INDArray actualResult = data.var(false, 0);
    INDArray expectedResult = Nd4j.create(new double[] {1., 1., 1., 1., 4., 4., 4., 4., 1., 1., 1., 1., 4., 4., 4.,
            4., 1., 1., 1., 1., 4., 4., 4., 4., 1., 1., 1., 1., 4., 4., 4., 4.}, new long[] {2, 4, 4});
    assertEquals(getFailureMessage(), expectedResult, actualResult);
}
 
Example 7
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMoments() {
    for (int[] axes : new int[][]{{0}, {1}, {0, 1}}) {
        INDArray input = Nd4j.linspace(1, 12, 12).reshape(3, 4);

        SameDiff sd = SameDiff.create();
        SDVariable in = sd.var("in", input);

        SDVariable[] moments = sd.math().moments(in, axes);
        INDArray expMean = input.mean(axes);
        INDArray expVar = input.var(false, axes);

        SDVariable loss;
        if (axes.length < 2) {
            loss = moments[0].add(moments[1]).std(true);
        } else {
            loss = moments[0].add(moments[1]).mean();
        }


        String msg = Arrays.toString(axes);

        TestCase tc = new TestCase(sd)
                .testName(msg)
                .expected(moments[0], expMean)
                .expected(moments[1], expVar);

        String err = OpValidation.validate(tc);
        assertNull(err);
    }
}
 
Example 8
Source File: ShapeTestsC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testColumnVariance() {
    INDArray twoByThree = Nd4j.linspace(1, 4, 4).reshape(2, 2);
    INDArray columnVar = twoByThree.var(true, 0);
    INDArray assertion = Nd4j.create(new double[] {2, 2});
    assertEquals(assertion, columnVar);

}
 
Example 9
Source File: OpExecutionerTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testVariance() {
    val f = new double[] {0.9296161, 0.31637555, 0.1839188};
    INDArray arr = Nd4j.create(f, new int[] {1, 3}, ordering());
    double var = arr.varNumber().doubleValue();

    INDArray var1 = arr.var(1);
    double var2 = var1.getDouble(0);
    assertEquals(var, var2, 1e-3);

    val variance = new org.apache.commons.math3.stat.descriptive.moment.Variance(true);
    double exp = variance.evaluate(arr.toDoubleVector());
    assertEquals(exp, var, 1e-7f);
}
 
Example 10
Source File: OpExecutionerTestsC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testVariance() {

    INDArray arr = Nd4j.create(new float[] {0.9296161f, 0.31637555f, 0.1839188f}, new int[] {1, 3}, ordering());
    double var = arr.varNumber().doubleValue();
    INDArray temp = arr.var(1);
    double var2 = arr.var(1).getDouble(0);
    assertEquals(var, var2, 1e-1);

    double exp = 0.15827888250350952;
    assertEquals(exp, var, 1e-7f);
}
 
Example 11
Source File: OpExecutionerTestsC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testColumnVar() {
    INDArray twoByThree = Nd4j.linspace(1, 600, 600).reshape(150, 4);
    INDArray columnStd = twoByThree.var(0);
    INDArray assertion = Nd4j.create(new float[] {30200f, 30200f, 30200f, 30200f});
    assertEquals(assertion, columnStd);
}
 
Example 12
Source File: AtomicAllocatorTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testGpuVariance() throws Exception {
    INDArray twoByThree = Nd4j.linspace(1, 4, 4).reshape(2, 2);
    INDArray columnVar = twoByThree.var(true,0);
    INDArray assertion = Nd4j.create(new double[]{2, 2});
    assertEquals(assertion, columnVar);
}
 
Example 13
Source File: DistributionStats.java    From nd4j with Apache License 2.0 4 votes vote down vote up
/**
 * Add rows of data to the statistics
 *
 * @param data the matrix containing multiple rows of data to include
 * @param mask (optionally) the mask of the data, useful for e.g. time series
 */
public Builder add(@NonNull INDArray data, INDArray mask) {
    data = DataSetUtil.tailor2d(data, mask);

    // Using https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm
    if (data == null) {
        // Nothing to add. Either data is empty or completely masked. Just skip it, otherwise we will get
        // null pointer exceptions.
        return this;
    }
    INDArray mean = data.mean(0);
    INDArray variance = data.var(false, 0);
    long count = data.size(0);

    if (runningMean == null) {
        // First batch
        runningMean = mean;
        runningVariance = variance;
        runningCount = count;

        if (data.size(0) == 1) {
            //Handle edge case: currently, reduction ops may return the same array
            //But we don't want to modify this array in-place later
            runningMean = runningMean.dup();
            runningVariance = runningVariance.dup();
        }
    } else {
        // Update running variance
        INDArray deltaSquared = Transforms.pow(mean.subRowVector(runningMean), 2);
        INDArray mB = variance.muli(count);
        runningVariance.muli(runningCount).addiRowVector(mB)
                        .addiRowVector(deltaSquared
                                        .muli((float) (runningCount * count) / (runningCount + count)))
                        .divi(runningCount + count);

        // Update running count
        runningCount += count;

        // Update running mean
        INDArray xMinusMean = data.subRowVector(runningMean);
        runningMean.addi(xMinusMean.sum(0).divi(runningCount));
    }

    return this;
}
 
Example 14
Source File: OpExecutionerTestsC.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testVarianceSingleVsMultipleDimensions() {
    // this test should always run in double
    DataBuffer.Type type = Nd4j.dataType();
    DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE);
    Nd4j.getRandom().setSeed(12345);

    //Generate C order random numbers. Strides: [500,100,10,1]
    INDArray fourd = Nd4j.rand('c', new int[] {100, 5, 10, 10}).muli(10);
    INDArray twod = Shape.newShapeNoCopy(fourd, new int[] {100, 5 * 10 * 10}, false);

    //Population variance. These two should be identical
    INDArray var4 = fourd.var(false, 1, 2, 3);
    INDArray var2 = twod.var(false, 1);

    //Manual calculation of population variance, not bias corrected
    //https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Na.C3.AFve_algorithm
    double[] sums = new double[100];
    double[] sumSquares = new double[100];
    NdIndexIterator iter = new NdIndexIterator(fourd.shape());
    while (iter.hasNext()) {
        val next = iter.next();
        double d = fourd.getDouble(next);

        // FIXME: int cast
        sums[(int) next[0]] += d;
        sumSquares[(int) next[0]] += d * d;
    }

    double[] manualVariance = new double[100];
    val N = (fourd.length() / sums.length);
    for (int i = 0; i < sums.length; i++) {
        manualVariance[i] = (sumSquares[i] - (sums[i] * sums[i]) / N) / N;
    }

    INDArray var4bias = fourd.var(true, 1, 2, 3);
    INDArray var2bias = twod.var(true, 1);

    assertArrayEquals(var2.data().asDouble(), var4.data().asDouble(), 1e-5);
    assertArrayEquals(manualVariance, var2.data().asDouble(), 1e-5);
    assertArrayEquals(var2bias.data().asDouble(), var4bias.data().asDouble(), 1e-5);

    DataTypeUtil.setDTypeForContext(type);
}
 
Example 15
Source File: Nd4jTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testVar2() {
    INDArray arr = Nd4j.linspace(1, 6, 6, DataType.DOUBLE).reshape(2, 3);
    INDArray var = arr.var(false, 0);
    assertEquals(Nd4j.create(new double[] {2.25, 2.25, 2.25}), var);
}
 
Example 16
Source File: OpExecutionerTestsC.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testVarianceSingleVsMultipleDimensions() {
    // this test should always run in double
    DataType type = Nd4j.dataType();
    DataTypeUtil.setDTypeForContext(DataType.DOUBLE);
    Nd4j.getRandom().setSeed(12345);

    //Generate C order random numbers. Strides: [500,100,10,1]
    INDArray fourd = Nd4j.rand('c', new int[] {100, 5, 10, 10}).muli(10);
    INDArray twod = Shape.newShapeNoCopy(fourd, new int[] {100, 5 * 10 * 10}, false);

    //Population variance. These two should be identical
    INDArray var4 = fourd.var(false, 1, 2, 3);
    INDArray var2 = twod.var(false, 1);

    //Manual calculation of population variance, not bias corrected
    //https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Na.C3.AFve_algorithm
    double[] sums = new double[100];
    double[] sumSquares = new double[100];
    NdIndexIterator iter = new NdIndexIterator(fourd.shape());
    while (iter.hasNext()) {
        val next = iter.next();
        double d = fourd.getDouble(next);

        sums[(int) next[0]] += d;
        sumSquares[(int) next[0]] += d * d;
    }

    double[] manualVariance = new double[100];
    val N = (fourd.length() / sums.length);
    for (int i = 0; i < sums.length; i++) {
        manualVariance[i] = (sumSquares[i] - (sums[i] * sums[i]) / N) / N;
    }

    INDArray var4bias = fourd.var(true, 1, 2, 3);
    INDArray var2bias = twod.var(true, 1);

    assertArrayEquals(var2.data().asDouble(), var4.data().asDouble(), 1e-5);
    assertArrayEquals(manualVariance, var2.data().asDouble(), 1e-5);
    assertArrayEquals(var2bias.data().asDouble(), var4bias.data().asDouble(), 1e-5);

    DataTypeUtil.setDTypeForContext(type);
}
 
Example 17
Source File: Nd4jTest.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testVar2() {
    INDArray arr = Nd4j.linspace(1, 6, 6).reshape(2, 3);
    INDArray var = arr.var(false, 0);
    assertEquals(Nd4j.create(new double[] {2.25, 2.25, 2.25}), var);
}
 
Example 18
Source File: BatchNormalizationTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testDnnForwardBackward() {
        double eps = 1e-5;
        int nIn = 4;
        int minibatch = 2;
        Nd4j.getRandom().setSeed(12345);
        INDArray input = Nd4j.rand('c', new int[]{minibatch, nIn});

        //TODO: other values for gamma/beta
        INDArray gamma = Nd4j.ones(1, nIn);
        INDArray beta = Nd4j.zeros(1, nIn);

        Layer l = getLayer(nIn, eps, false, -1, -1);

        INDArray mean = input.mean(0);
        INDArray var = input.var(false, 0);
        INDArray xHat = input.subRowVector(mean).divRowVector(Transforms.sqrt(var.add(eps), true));
        INDArray outExpected = xHat.mulRowVector(gamma).addRowVector(beta);

        INDArray out = l.activate(input, true, LayerWorkspaceMgr.noWorkspaces());

//        System.out.println(Arrays.toString(outExpected.data().asDouble()));
//        System.out.println(Arrays.toString(out.data().asDouble()));

        assertEquals(outExpected, out);

        //-------------------------------------------------------------
        //Check backprop
        INDArray epsilon = Nd4j.rand(minibatch, nIn); //dL/dy

        INDArray dldgammaExp = epsilon.mul(xHat).sum(true, 0);
        INDArray dldbetaExp = epsilon.sum(true, 0);

        INDArray dldxhat = epsilon.mulRowVector(gamma);
        INDArray dldvar = dldxhat.mul(input.subRowVector(mean)).mul(-0.5)
                .mulRowVector(Transforms.pow(var.add(eps), -3.0 / 2.0, true)).sum(0);
        INDArray dldmu = dldxhat.mulRowVector(Transforms.pow(var.add(eps), -1.0 / 2.0, true)).neg().sum(0)
                .add(dldvar.mul(input.subRowVector(mean).mul(-2.0).sum(0).div(minibatch)));
        INDArray dldinExp = dldxhat.mulRowVector(Transforms.pow(var.add(eps), -1.0 / 2.0, true))
                .add(input.subRowVector(mean).mul(2.0 / minibatch).mulRowVector(dldvar))
                .addRowVector(dldmu.mul(1.0 / minibatch));

        Pair<Gradient, INDArray> p = l.backpropGradient(epsilon, LayerWorkspaceMgr.noWorkspaces());

        INDArray dldgamma = p.getFirst().getGradientFor("gamma");
        INDArray dldbeta = p.getFirst().getGradientFor("beta");

        assertEquals(dldgammaExp, dldgamma);
        assertEquals(dldbetaExp, dldbeta);

//        System.out.println("EPSILONS");
//        System.out.println(Arrays.toString(dldinExp.data().asDouble()));
//        System.out.println(Arrays.toString(p.getSecond().dup().data().asDouble()));
        assertEquals(dldinExp, p.getSecond());
    }
 
Example 19
Source File: BatchNormalizationTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCnnForwardBackward() {
        double eps = 1e-5;
        int nIn = 4;
        int hw = 3;
        int minibatch = 2;
        Nd4j.getRandom().setSeed(12345);
        INDArray input = Nd4j.rand('c', new int[]{minibatch, nIn, hw, hw});

        //TODO: other values for gamma/beta
        INDArray gamma = Nd4j.ones(1, nIn);
        INDArray beta = Nd4j.zeros(1, nIn);

        Layer l = getLayer(nIn, eps, false, -1, -1);

        INDArray mean = input.mean(0, 2, 3);
        INDArray var = input.var(false, 0, 2, 3);
        INDArray xHat = Nd4j.getExecutioner().exec(new BroadcastSubOp(input, mean, input.dup(), 1));
        Nd4j.getExecutioner().exec(new BroadcastDivOp(xHat, Transforms.sqrt(var.add(eps), true), xHat, 1));

        INDArray outExpected = Nd4j.getExecutioner().exec(new BroadcastMulOp(xHat, gamma, xHat.dup(), 1));
        Nd4j.getExecutioner().exec(new BroadcastAddOp(outExpected, beta, outExpected, 1));

        INDArray out = l.activate(input, true, LayerWorkspaceMgr.noWorkspaces());

//        System.out.println(Arrays.toString(outExpected.data().asDouble()));
//        System.out.println(Arrays.toString(out.data().asDouble()));

        assertEquals(outExpected, out);

        //-------------------------------------------------------------
        //Check backprop
        INDArray epsilon = Nd4j.rand('c', new int[]{minibatch, nIn, hw, hw}); //dL/dy

        int effectiveMinibatch = minibatch * hw * hw;

        INDArray dldgammaExp = epsilon.mul(xHat).sum(0, 2, 3);
        dldgammaExp = dldgammaExp.reshape(1, dldgammaExp.length());
        INDArray dldbetaExp = epsilon.sum(0, 2, 3);
        dldbetaExp = dldbetaExp.reshape(1, dldbetaExp.length());

        INDArray dldxhat = Nd4j.getExecutioner().exec(new BroadcastMulOp(epsilon, gamma, epsilon.dup(), 1)); //epsilon.mulRowVector(gamma);

        INDArray inputSubMean = Nd4j.getExecutioner().exec(new BroadcastSubOp(input, mean, input.dup(), 1));

        INDArray dldvar = dldxhat.mul(inputSubMean).mul(-0.5);
        dldvar = Nd4j.getExecutioner().exec(
                new BroadcastMulOp(dldvar, Transforms.pow(var.add(eps), -3.0 / 2.0, true), dldvar.dup(), 1));
        dldvar = dldvar.sum(0, 2, 3);


        INDArray dldmu = Nd4j
                .getExecutioner().exec(new BroadcastMulOp(dldxhat,
                        Transforms.pow(var.add(eps), -1.0 / 2.0, true), dldxhat.dup(), 1))
                .neg().sum(0, 2, 3);
        dldmu = dldmu.add(dldvar.mul(inputSubMean.mul(-2.0).sum(0, 2, 3).div(effectiveMinibatch)));

        INDArray dldinExp = Nd4j.getExecutioner().exec(
                new BroadcastMulOp(dldxhat, Transforms.pow(var.add(eps), -1.0 / 2.0, true), dldxhat.dup(), 1));
        dldinExp = dldinExp.add(Nd4j.getExecutioner().exec(
                new BroadcastMulOp(inputSubMean.mul(2.0 / effectiveMinibatch), dldvar, inputSubMean.dup(), 1)));
        dldinExp = Nd4j.getExecutioner().exec(
                new BroadcastAddOp(dldinExp, dldmu.mul(1.0 / effectiveMinibatch), dldinExp.dup(), 1));

        Pair<Gradient, INDArray> p = l.backpropGradient(epsilon, LayerWorkspaceMgr.noWorkspaces());

        INDArray dldgamma = p.getFirst().getGradientFor("gamma");
        INDArray dldbeta = p.getFirst().getGradientFor("beta");

        assertEquals(dldgammaExp, dldgamma);
        assertEquals(dldbetaExp, dldbeta);

        //        System.out.println("EPSILONS");
        //        System.out.println(Arrays.toString(dldinExp.data().asDouble()));
        //        System.out.println(Arrays.toString(p.getSecond().dup().data().asDouble()));
        assertEquals(dldinExp, p.getSecond());
    }