Java Code Examples for org.nd4j.autodiff.samediff.SameDiff#standardDeviation()

The following examples show how to use org.nd4j.autodiff.samediff.SameDiff#standardDeviation() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GradCheckMisc.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testPermuteGradient() {
    int[] origShape = new int[]{3, 4, 5};

    for (int[] perm : new int[][]{{0, 1, 2}, {0, 2, 1}, {1, 0, 2}, {1, 2, 0}, {2, 0, 1}, {2, 1, 0}}) {
        for (Pair<INDArray, String> p : NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, origShape)) {
            INDArray inArr = p.getFirst().muli(100);

            SameDiff sd = SameDiff.create();
            SDVariable in = sd.var("in", inArr);
            SDVariable permute = sd.f().permute(in, perm);
            //Using stdev here: mean/sum would backprop the same gradient for each input...
            SDVariable stdev = sd.standardDeviation("out", permute, true);

            INDArray out = sd.execAndEndResult();
            INDArray expOut = in.getArr().std(true, Integer.MAX_VALUE);
            assertEquals(expOut, out);

            String msg = "permute=" + Arrays.toString(perm) + ", source=" + p.getSecond();
            boolean ok = GradCheckUtil.checkGradients(sd);
            assertTrue(msg, ok);
        }
    }
}
 
Example 2
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testLgamma() {

    SameDiff sameDiff = SameDiff.create();

    INDArray in = Nd4j.linspace(DataType.DOUBLE, 1, 12, 1).reshape(3, 4);
    SDVariable sdInput = sameDiff.var(in);

    INDArray expected = Nd4j.createFromArray(new double[]{
            0.0,0.0,0.6931472,1.7917595,3.1780539,4.787492,6.5792513,8.525162,10.604603,12.801827,15.104413,17.502308
    }).reshape(3,4);

    SDVariable output = new Lgamma(sameDiff, sdInput).outputVariable();

    SDVariable loss = sameDiff.standardDeviation(sdInput, true);
    sameDiff.addLossVariable(loss);

    TestCase tc = new TestCase(sameDiff)
            .gradientCheck(true)
            .expectedOutput(output.name(), expected);

    String err = OpValidation.validate(tc);
    assertNull(err);
}
 
Example 3
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDiag() {
    SameDiff sd = SameDiff.create();

    INDArray ia = Nd4j.create(new double[]{1, 2}, new int[]{2});
    SDVariable in = sd.var("in", DataType.DOUBLE, new long[]{2});
    INDArray expOut = Nd4j.create(new double[][]{{1, 0}, {0, 2}});

    INDArray expOut2 = Nd4j.create(DataType.DOUBLE, 2, 2);
    DynamicCustomOp diag = DynamicCustomOp.builder("diag").addInputs(ia).addOutputs(expOut2).build();
    Nd4j.getExecutioner().exec(diag);

    assertEquals(expOut, expOut2);

    SDVariable t = sd.math().diag("diag", in);

    SDVariable loss = sd.standardDeviation("loss", t, false, 0, 1);

    sd.associateArrayWithVariable(ia, in);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("diag", expOut)
            .gradientCheck(true));
    assertNull(err);
}
 
Example 4
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDiagPart() {
    SameDiff sd = SameDiff.create();

    INDArray input = Nd4j.linspace(1, 16, 16, DataType.DOUBLE).reshape(4, 4);
    INDArray expOut = Nd4j.create(new float[]{1, 6, 11, 16}).castTo(DataType.DOUBLE);

    SDVariable in = sd.var("in", input);
    SDVariable t = sd.math().diagPart("dp", in);

    // dimension is 0 here, because output of diagPart is vector, not matrix
    SDVariable loss = sd.standardDeviation("loss", t, true, 0);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("dp", expOut)
            .gradientCheck(true));
    assertNull(err);
}
 
Example 5
Source File: GradCheckMisc.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testStridedSliceGradient() {
    Nd4j.getRandom().setSeed(12345);

    //Order here: original shape, begin, size
    List<SSCase> testCases = new ArrayList<>();
    testCases.add(SSCase.builder().shape(3, 4).begin(0, 0).end(3, 4).strides(1, 1).build());
    testCases.add(SSCase.builder().shape(3, 4).begin(1, 1).end(2, 3).strides(1, 1).build());
    testCases.add(SSCase.builder().shape(3, 4).begin(-999, 0).end(3, 4).strides(1, 1).beginMask(1).build());
    testCases.add(SSCase.builder().shape(3, 4).begin(1, 1).end(3, -999).strides(1, 1).endMask(1 << 1).build());
    testCases.add(SSCase.builder().shape(3, 4).begin(-999, 0).end(-999, 4).strides(1, 1).beginMask(1).endMask(1).build());
    testCases.add(SSCase.builder().shape(3, 4).begin(-999, 0, 0).end(-999, 3, 4).strides(1, 1).newAxisMask(1).build());

    testCases.add(SSCase.builder().shape(3, 4, 5).begin(0, 0, 0).end(3, 4, 5).strides(1, 1, 1).build());
    testCases.add(SSCase.builder().shape(3, 4, 5).begin(1, 2, 3).end(3, 4, 5).strides(1, 1, 1).build());
    testCases.add(SSCase.builder().shape(3, 4, 5).begin(0, 0, 0).end(3, 3, 5).strides(1, 2, 2).build());
    testCases.add(SSCase.builder().shape(3, 4, 5).begin(1, -999, 1).end(3, 3, 4).strides(1, 1, 1).beginMask(1 << 1).build());
    testCases.add(SSCase.builder().shape(3, 4, 5).begin(1, -999, 1).end(3, 3, -999).strides(1, 1, 1).beginMask(1 << 1).endMask(1 << 2).build());
    testCases.add(SSCase.builder().shape(3, 4, 5).begin(1, 2).end(3, 4).strides(1, 1).ellipsisMask(1 << 1).build());   //[1:3,...,2:4]
    testCases.add(SSCase.builder().shape(3, 4, 5).begin(1, -999, 1, 2).end(3, -999, 3, 4).strides(1, -999, 1, 2).newAxisMask(1 << 1).build());
    testCases.add(SSCase.builder().shape(3, 4, 5).begin(1, 0, 1).end(3, -999, 4).strides(1, 1, 1).shrinkAxisMask(1 << 1).build());
    testCases.add(SSCase.builder().shape(3, 4, 5).begin(1, 1, 1).end(3, -999, 4).strides(1, 1, 1).shrinkAxisMask(1 << 1).build());


    for (int i = 0; i < testCases.size(); i++) {
        SSCase t = testCases.get(i);
        INDArray arr = Nd4j.rand(t.getShape());

        SameDiff sd = SameDiff.create();
        SDVariable in = sd.var("in", arr);
        SDVariable slice = sd.stridedSlice(in, t.getBegin(), t.getEnd(), t.getStrides(), t.getBeginMask(),
                t.getEndMask(), t.getEllipsisMask(), t.getNewAxisMask(), t.getShrinkAxisMask());
        SDVariable stdev = sd.standardDeviation(slice, true);

        String msg = "i=" + i + ": " + t;
        log.info("Starting test: " + msg);
        GradCheckUtil.checkGradients(sd);
    }
}
 
Example 6
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testXwPlusB() {
        Nd4j.getRandom().setSeed(12345);

        SameDiff sameDiff = SameDiff.create();
        INDArray input = Nd4j.rand(new long[]{2, 3});
        INDArray weights = Nd4j.rand(new long[]{3, 4});
        INDArray b = Nd4j.rand(new long[]{4});

        SDVariable sdInput = sameDiff.var("input", input);
        SDVariable sdWeights = sameDiff.var("weights", weights);
        SDVariable sdBias = sameDiff.var("bias", b);

        SDVariable res = sameDiff.nn().linear(sdInput, sdWeights, sdBias);
        SDVariable loss = sameDiff.standardDeviation(res, true);

        INDArray exp = input.mmul(weights).addiRowVector(b);

        TestCase tc = new TestCase(sameDiff)
                .gradientCheck(true)
                .expectedOutput(res.name(), exp);

//        System.out.println(sameDiff.summary());
//        System.out.println("============================");
        sameDiff.summary();
        sameDiff.createGradFunction();
//        System.out.println(sameDiff.getFunction("grad").summary());
        sameDiff.getFunction("grad").summary();


        String err = OpValidation.validate(tc);
        assertNull(err);
    }
 
Example 7
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testReluLayer() {
    Nd4j.getRandom().setSeed(12345);

    SameDiff sameDiff = SameDiff.create();
    INDArray input = Nd4j.rand(new long[]{2, 3});
    INDArray weights = Nd4j.rand(new long[]{3, 4});
    INDArray b = Nd4j.rand(new long[]{4});

    SDVariable sdInput = sameDiff.var("input", input);
    SDVariable sdWeights = sameDiff.var("weights", weights);
    SDVariable sdBias = sameDiff.var("bias", b);

    SDVariable res = sameDiff.nn().reluLayer(sdInput, sdWeights, sdBias);
    SDVariable loss = sameDiff.standardDeviation(res, true);

    INDArray exp = input.mmul(weights).addiRowVector(b);
    Transforms.relu(exp, false);

    TestCase tc = new TestCase(sameDiff)
            .gradientCheck(true)
            .expectedOutput(res.name(), exp);


    String err = OpValidation.validate(tc);
    assertNull(err);
}
 
Example 8
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testIm2Col() {
    //OpValidationSuite.ignoreFailing();      //TEMPORARY DUE TO JVM CRASH: https://github.com/deeplearning4j/deeplearning4j/issues/6873
    Nd4j.getRandom().setSeed(12345);

    int[][] inputSizes = new int[][]{{1, 3, 8, 8}, {3, 6, 12, 12}};

    List<String> failed = new ArrayList<>();

    for (int[] inSizeNCHW : inputSizes) {

        SameDiff sd = SameDiff.create();
        SDVariable var = sd.var("in", Nd4j.rand(DataType.DOUBLE, inSizeNCHW));
        SDVariable im2col = sd.cnn().im2Col(var, Conv2DConfig.builder()
                .kH(2).kW(2)
                .sH(1).sW(1)
                .isSameMode(true)
                .build());

        SDVariable loss = sd.standardDeviation("loss", im2col, true);

        String msg = Arrays.toString(inSizeNCHW);

        TestCase tc = new TestCase(sd).gradientCheck(true).testName(msg);
        String error = OpValidation.validate(tc);
        if (error != null) {
            failed.add(msg);
        }
    }

    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 9
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testReshapeGradient() {
    //https://github.com/deeplearning4j/deeplearning4j/issues/6873

    int[] origShape = new int[]{3, 4, 5};

    List<String> failed = new ArrayList<>();

    for (long[] toShape : new long[][]{{3, 4 * 5}, {3 * 4, 5}, {1, 3 * 4 * 5}, {3 * 4 * 5, 1}}) {
        for(char order : new char[]{'c','f'}){
            INDArray inArr = Nd4j.rand(DataType.DOUBLE, origShape, order).muli(100);

            SameDiff sd = SameDiff.create();
            SDVariable in = sd.var("in", inArr);
            SDVariable reshape = sd.reshape(in, toShape);
            //Using stdev here: mean/sum would backprop the same gradient for each input...
            SDVariable stdev = sd.standardDeviation("out", reshape, true);

            INDArray out = stdev.eval();
            INDArray expOut = in.getArr().std(true, Integer.MAX_VALUE);

            String msg = "toShape=" + Arrays.toString(toShape) + ", order=" + order;
            TestCase tc = new TestCase(sd);
            tc.testName(msg)
                    .expectedOutput("out", expOut);

            String error = OpValidation.validate(tc);
            if(error != null){
                failed.add(error);
            }
        }
    }

    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 10
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testShape() {
    SameDiff sameDiff = SameDiff.create();
    val shape = new long[]{2, 3};
    SDVariable x = sameDiff.var("x", shape);
    SDVariable result = sameDiff.shape(x).castTo(DataType.DOUBLE);
    SDVariable loss = sameDiff.standardDeviation(result, true);

    String err = OpValidation.validate(new TestCase(sameDiff)
            .gradientCheck(false)
            .expected(result, Nd4j.create(new double[]{2,3}, new long[]{2})));

    assertNull(err);
}
 
Example 11
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBiasAdd() {

    SameDiff sameDiff = SameDiff.create();

    INDArray in1 = Nd4j.linspace(1, 12, 12);
    INDArray in2 = Nd4j.linspace(1, 12, 12);

    SDVariable input1 = sameDiff.var(in1);
    SDVariable input2 = sameDiff.var(in2);

    INDArray expected = Nd4j.createFromArray(new double[]{
            2.0000,    4.0000,    6.0000,    8.0000,   10.0000,   12.0000,   14.0000,   16.0000,   18.0000,   20.0000,   22.0000,   24.0000
    });

    SDVariable output = new BiasAdd(sameDiff, input1, input2, false).outputVariable();
    SDVariable loss = sameDiff.standardDeviation(input1, true);
    sameDiff.addLossVariable(loss);
    SDVariable loss2 = sameDiff.standardDeviation(input2, true);
    sameDiff.addLossVariable(loss2);

    TestCase tc = new TestCase(sameDiff)
            .gradientCheck(true)
            .expectedOutput(output.name(), expected);

    String err = OpValidation.validate(tc);
    assertNull(err);
}
 
Example 12
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMerge() {
    Nd4j.getRandom().setSeed(12345);

    List<String> failed = new ArrayList<>();

    for (int t = 0; t < 3; t++) {
        for (int numArrays : new int[]{3, 1}) {
            for (long[] shape : new long[][]{{1}, {3, 4}, {3, 4, 5}}) {


                SameDiff sd = SameDiff.create();
                SDVariable[] arr = new SDVariable[numArrays];

                for (int i = 0; i < numArrays; i++) {
                    arr[i] = sd.var(String.valueOf(i), Nd4j.rand(shape));
                }

                INDArray exp = arr[0].getArr().dup();
                SDVariable merge;
                String name;
                switch (t) {
                    case 0:
                        name = "mergeAdd";
                        merge = sd.math().mergeAdd(arr);
                        for( int i=1; i<numArrays; i++ ){
                            exp.addi(arr[i].getArr().dup());
                        }
                        break;
                    case 1:
                        name = "mergeMax";
                        merge = sd.math().mergeMax(arr);
                        for( int i=1; i<numArrays; i++ ){
                            exp = Transforms.max(exp, arr[i].getArr(), true);
                        }
                        break;
                    case 2:
                        name = "mergeAvg";
                        merge = sd.math().mergeAvg(arr);
                        for( int i=1; i<numArrays; i++ ){
                            exp.addi(arr[i].getArr().dup());
                        }
                        exp.divi(numArrays);
                        break;
                    default:
                        throw new RuntimeException();
                }

                String msg = name + " - numArrays=" + numArrays + ", shape=" + Arrays.toString(shape);
                SDVariable loss;
                if(shape.length > 1){
                    loss = sd.standardDeviation("loss", merge, true);
                } else {
                    loss = sd.mean("loss", merge);
                }


                TestCase tc = new TestCase(sd)
                        .expected(merge, exp)
                        .testName(msg);
                String error = OpValidation.validate(tc, true);
                if(error != null){
                    failed.add(msg + " - " + error);
                }
            }
        }
    }

    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 13
Source File: GradCheckReductions.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testReductionGradientsSimple() {
    //Test reductions: final and only function
    Nd4j.getRandom().setSeed(12345);

    for (int i = 0; i < 12; i++) {

        SameDiff sd = SameDiff.create();

        boolean skipBackward = false;

        int nOut = 4;
        int minibatch = 10;
        SDVariable input = sd.var("in", new int[]{-1, nOut});

        SDVariable loss;
        String name;
        switch (i) {
            case 0:
                loss = sd.mean("loss", input);
                name = "mean";
                break;
            case 1:
                loss = sd.sum("loss", input);
                name = "sum";
                break;
            case 2:
                loss = sd.standardDeviation("loss", input, true);
                name = "stdev";
                break;
            case 3:
                loss = sd.min("loss", input);
                name = "min";
                break;
            case 4:
                loss = sd.max("loss", input);
                name = "max";
                break;
            case 5:
                loss = sd.variance("loss", input, true);
                name = "variance";
                break;
            case 6:
                loss = sd.prod("loss", input);
                name = "prod";
                break;
            case 7:
                loss = sd.norm1("loss", input);
                name = "norm1";
                break;
            case 8:
                loss = sd.norm2("loss", input);
                name = "norm2";
                break;
            case 9:
                loss = sd.normmax("loss", input);
                name = "normmax";
                break;
            case 10:
                loss = sd.countNonZero("loss", input);
                name = "countNonZero";
                skipBackward = true;
                break;
            case 11:
                loss = sd.countZero("loss", input);
                name = "countZero";
                skipBackward = true;
                break;
            default:
                throw new RuntimeException();
        }


        String msg = "test: " + i + " - " + name;
        log.info("*** Starting test: " + msg);

        INDArray inputArr = Nd4j.randn(minibatch, nOut).muli(100);
        sd.associateArrayWithVariable(inputArr, input);

        if (!skipBackward) {
            boolean ok = GradCheckUtil.checkGradients(sd);
            assertTrue(msg, ok);
        }
    }
}
 
Example 14
Source File: GradCheckMisc.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSqueezeGradient() {
    val origShape = new long[]{3, 4, 5};

    for (int i = 0; i < 3; i++) {

        val shape = origShape.clone();
        shape[i] = 1;

        for (Pair<INDArray, String> p : NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, shape)) {
            INDArray inArr = p.getFirst().muli(100);

            SameDiff sd = SameDiff.create();
            SDVariable in = sd.var("in", inArr);
            SDVariable squeeze = sd.f().squeeze(in, i);
            //Using stdev here: mean/sum would backprop the same gradient for each input...
            SDVariable stdev = sd.standardDeviation("out", squeeze, true);

            long[] expShapePostSqueeze;
            switch (i) {
                case 0:
                    expShapePostSqueeze = new long[]{4, 5};
                    break;
                case 1:
                    expShapePostSqueeze = new long[]{3, 5};
                    break;
                case 2:
                    expShapePostSqueeze = new long[]{3, 4};
                    break;
                default:
                    throw new RuntimeException();
            }

            sd.execAndEndResult();

            INDArray squeezed = squeeze.getArr();
            assertArrayEquals(expShapePostSqueeze, squeezed.shape());

            INDArray out = sd.execAndEndResult();
            INDArray expOut = in.getArr().std(true, Integer.MAX_VALUE);
            assertEquals(expOut, out);

            String msg = "squeezeDim=" + i + ", source=" + p.getSecond();
            boolean ok = GradCheckUtil.checkGradients(sd);
            assertTrue(msg, ok);
        }
    }
}
 
Example 15
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testSqueezeGradient() {
        val origShape = new long[]{3, 4, 5};

        List<String> failed = new ArrayList<>();

        for (int i = 0; i < 3; i++) {

            val shape = origShape.clone();
            shape[i] = 1;

            for (Pair<INDArray, String> p : NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, shape, DataType.DOUBLE)) {
                INDArray inArr = p.getFirst().muli(100);

                SameDiff sd = SameDiff.create();
                SDVariable in = sd.var("in", inArr);
                SDVariable squeeze = sd.squeeze(in, i);
                //Using stdev here: mean/sum would backprop the same gradient for each input...
                SDVariable stdev = sd.standardDeviation("out", squeeze, true);

                long[] expShapePostSqueeze;
                switch (i) {
                    case 0:
                        expShapePostSqueeze = new long[]{4, 5};
                        break;
                    case 1:
                        expShapePostSqueeze = new long[]{3, 5};
                        break;
                    case 2:
                        expShapePostSqueeze = new long[]{3, 4};
                        break;
                    default:
                        throw new RuntimeException();
                }

                INDArray exp = inArr.dup('c').reshape('c', expShapePostSqueeze);

                Map<String,INDArray> m = sd.outputAll(null);

                INDArray squeezed = m.get(squeeze.name());
//                assertArrayEquals(expShapePostSqueeze, squeezed.shape());

                INDArray out = m.get(stdev.name());
                INDArray expOut = in.getArr().std(true, Integer.MAX_VALUE);
                assertEquals(expOut, out);

                String msg = "squeezeDim=" + i + ", source=" + p.getSecond();
                TestCase tc = new TestCase(sd)
                        .testName(msg)
                        .expected(squeeze.name(), exp)
                        .expectedOutput("out", expOut);


                String error = OpValidation.validate(tc, true);
                if(error != null){
                    failed.add(name);
                }
            }
        }

        assertEquals(failed.toString(), 0, failed.size());
    }
 
Example 16
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testConv3d() {
    //Pooling3d, Conv3D, batch norm
    Nd4j.getRandom().setSeed(12345);

    //NCDHW format
    int[][] inputSizes = new int[][]{{2, 3, 4, 5, 5}};

    List<String> failed = new ArrayList<>();

    for (int[] inSizeNCDHW : inputSizes) {
        for (boolean ncdhw : new boolean[]{true, false}) {
            int nIn = inSizeNCDHW[1];
            int[] shape = (ncdhw ? inSizeNCDHW : ncdhwToNdhwc(inSizeNCDHW));

            for (int i = 0; i < 5; i++) {
                SameDiff sd = SameDiff.create();
                SDVariable in = sd.var("in", shape);

                SDVariable out;
                String msg;
                switch (i) {
                    case 0:
                        //Conv3d, with bias, same
                        msg = "0 - conv3d+bias+same, ncdhw=" + ncdhw + " - input " + Arrays.toString(shape);
                        SDVariable w0 = sd.var("w0", Nd4j.rand(new int[]{2, 2, 2, nIn, 3}).muli(10));  //[kD, kH, kW, iC, oC]
                        SDVariable b0 = sd.var("b0", Nd4j.rand(new long[]{3}).muli(10));
                        out = sd.cnn().conv3d(in, w0, b0, Conv3DConfig.builder()
                                .dataFormat(ncdhw ? Conv3DConfig.NCDHW : Conv3DConfig.NDHWC)
                                .isSameMode(true)
                                .kH(2).kW(2).kD(2)
                                .sD(1).sH(1).sW(1)
                                .build());
                        break;
                    case 1:
                        //Conv3d, no bias, no same
                        msg = "1 - conv3d+no bias+no same, ncdhw=" + ncdhw + " - input " + Arrays.toString(shape);
                        SDVariable w1 = sd.var("w1", Nd4j.rand(new int[]{2, 2, 2, nIn, 3}).muli(10));  //[kD, kH, kW, iC, oC]
                        out = sd.cnn().conv3d(in, w1, Conv3DConfig.builder()
                                .dataFormat(ncdhw ? Conv3DConfig.NCDHW : Conv3DConfig.NDHWC)
                                .isSameMode(false)
                                .kH(2).kW(2).kD(2)
                                .sD(1).sH(1).sW(1)
                                .build());
                        break;
                    case 2:
                        //pooling3d - average, no same
                        msg = "2 - pooling 3d, average, same";
                        out = sd.cnn().avgPooling3d(in, Pooling3DConfig.builder()
                                .kH(2).kW(2).kD(2)
                                .sH(1).sW(1).sD(1)
                                .isSameMode(false)
                                .isNCDHW(ncdhw)
                                .build());
                        break;
                    case 3:
                        //pooling 3d - max, no same
                        msg = "3 - pooling 3d, max, same";
                        out = sd.cnn().maxPooling3d(in, Pooling3DConfig.builder()
                                .kH(2).kW(2).kD(2)
                                .sH(1).sW(1).sD(1)
                                .isSameMode(true)
                                .isNCDHW(ncdhw)
                                .build());
                        break;
                    case 4:
                        //Deconv3d
                        msg = "4 - deconv3d, ncdhw=" + ncdhw;
                        SDVariable wDeconv = sd.var(Nd4j.rand(new int[]{2, 2, 2, 3, nIn}));  //[kD, kH, kW, oC, iC]
                        SDVariable bDeconv = sd.var(Nd4j.rand(new int[]{3}));
                        out = sd.cnn().deconv3d("Deconv3d", in, wDeconv, bDeconv, DeConv3DConfig.builder()
                                .kD(2).kH(2).kW(2)
                                .isSameMode(true)
                                .dataFormat(ncdhw ? DeConv3DConfig.NCDHW : DeConv3DConfig.NDHWC)
                                .build());
                        break;
                    case 5:
                        //Batch norm - 3d input
                        throw new RuntimeException("Batch norm test not yet implemented");
                    default:
                        throw new RuntimeException();
                }

                INDArray inArr = Nd4j.rand(shape).muli(10);
                in.setArray(inArr);
                SDVariable loss = sd.standardDeviation("loss", out, true);

                log.info("Starting test: " + msg);
                TestCase tc = new TestCase(sd).gradientCheck(true);
                tc.testName(msg);
                String error = OpValidation.validate(tc);
                if (error != null) {
                    failed.add(name);
                }
            }
        }
    }

    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 17
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testReductionsBackwards() {
//        for (int i = 0; i < 7; i++) {
        int i=5;
        {

            SameDiff sd = SameDiff.create();

            int nOut = 4;
            int minibatch = 3;
            SDVariable input = sd.var("in", DataType.DOUBLE, new long[]{minibatch, nOut});
            SDVariable label = sd.var("label", DataType.DOUBLE, new long[]{minibatch, nOut});

            SDVariable diff = input.sub(label);
            SDVariable sqDiff = diff.mul(diff);
            SDVariable msePerEx = sd.mean("msePerEx", sqDiff, 1);

            SDVariable loss;    //Scalar value
            String name;
            switch (i) {
                case 0:
                    loss = sd.mean("loss", msePerEx, 0);
                    name = "mean";
                    break;
                case 1:
                    loss = sd.sum("loss", msePerEx, 0);
                    name = "sum";
                    break;
                case 2:
                    loss = sd.standardDeviation("loss", msePerEx, true, 0);
                    name = "stdev";
                    break;
                case 3:
                    loss = sd.min("loss", msePerEx, 0);
                    name = "min";
                    break;
                case 4:
                    loss = sd.max("loss", msePerEx, 0);
                    name = "max";
                    break;
                case 5:
                    loss = sd.variance("loss", msePerEx, true, 0);
                    name = "variance";
                    break;
                case 6:
                    loss = sd.prod("loss", msePerEx, 0);
                    name = "prod";
                    break;
                default:
                    throw new RuntimeException();
            }


            String msg = "test: " + i + " - " + name;
            log.info("*** Starting test: " + msg);

            INDArray inputArr = Nd4j.rand(DataType.DOUBLE, minibatch, nOut);
            INDArray labelArr = Nd4j.rand(DataType.DOUBLE, minibatch, nOut);

            sd.associateArrayWithVariable(inputArr, input);
            sd.associateArrayWithVariable(labelArr, label);

            INDArray result = loss.eval();
            assertEquals(1, result.length());

            sd.calculateGradients(Collections.emptyMap(), sd.getVariables().keySet());
        }
    }
 
Example 18
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSliceGradient() {
    Nd4j.getRandom().setSeed(12345);

    //Order here: original shape, begin, size
    List<Triple<int[], int[], int[]>> testCases = new ArrayList<>();
    testCases.add(new Triple<>(new int[]{3, 4}, new int[]{0, 0}, new int[]{3, 4}));
    testCases.add(new Triple<>(new int[]{3, 4}, new int[]{1, 1}, new int[]{2, 2}));
    testCases.add(new Triple<>(new int[]{3, 4}, new int[]{1, 2}, new int[]{2, 2}));
    testCases.add(new Triple<>(new int[]{3, 4, 5}, new int[]{0, 0, 0}, new int[]{3, 4, 5}));
    testCases.add(new Triple<>(new int[]{3, 4, 5}, new int[]{1, 1, 1}, new int[]{2, 3, 4}));

    Map<Integer,INDArrayIndex[]> indices = new HashMap<>();
    indices.put(0, new INDArrayIndex[]{all(), all()});
    indices.put(1, new INDArrayIndex[]{interval(1,3), interval(1,3)});
    indices.put(2, new INDArrayIndex[]{interval(1,3), interval(2,4)});
    indices.put(3, new INDArrayIndex[]{all(), all(), all()});
    indices.put(4, new INDArrayIndex[]{interval(1,3), interval(1,4), interval(1,5)});

    List<String> failed = new ArrayList<>();

    for (int i = 0; i < testCases.size(); i++) {
        Triple<int[], int[], int[]> t = testCases.get(i);
        int[] os = t.getFirst();
        int[] b = t.getSecond();
        int[] e = t.getThird();
        int prod = ArrayUtil.prod(os);
        INDArray arr = Nd4j.linspace(1, prod, prod, DataType.DOUBLE).reshape(os);

        SameDiff sd = SameDiff.create();
        SDVariable in = sd.var("in", arr);
        SDVariable slice = sd.slice(in, b, e);
        SDVariable stdev = sd.standardDeviation(slice, true);

        String msg = "i=" + i + ": inShape=" + Arrays.toString(os) + ", begin=" + Arrays.toString(b) + ", end=" + Arrays.toString(e);
        log.info("Starting test: " + msg);

        TestCase tc = new TestCase(sd).testName(msg);

        if(indices.containsKey(i)){
            tc.expected(slice, arr.get(indices.get(i)).dup());
        }

        String error = OpValidation.validate(tc, true);
        if(error != null){
            failed.add(error);
        }
    }

    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 19
Source File: GradCheckMisc.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testExpandDimsGradient() {
    val origShape = new long[]{3, 4};

    boolean first = true;
    for (int i = 0; i < 3; i++) {

        long[] expExpandShape;
        switch (i) {
            case 0:
                expExpandShape = new long[]{1, 3, 4};
                break;
            case 1:
                expExpandShape = new long[]{3, 1, 4};
                break;
            case 2:
                expExpandShape = new long[]{3, 4, 1};
                break;
            default:
                throw new RuntimeException();
        }

        for (Pair<INDArray, String> p : NDArrayCreationUtil.getAllTestMatricesWithShape(origShape[0], origShape[1], 12345)) {
            INDArray inArr = p.getFirst().muli(100);

            SameDiff sd = SameDiff.create();
            SDVariable in = sd.var("in", inArr);
            SDVariable expand = sd.f().expandDims(in, i);
            //Using stdev here: mean/sum would backprop the same gradient for each input...
            SDVariable stdev = sd.standardDeviation("out", expand, true);

            INDArray out = sd.execAndEndResult();
            INDArray expOut = in.getArr().std(true, Integer.MAX_VALUE);
            assertEquals(expOut, out);

            assertArrayEquals(expExpandShape, expand.getArr().shape());
            INDArray expExpand = inArr.dup('c').reshape(expExpandShape);
            assertEquals(expExpand, expand.getArr());

            String msg = "expandDim=" + i + ", source=" + p.getSecond();
            log.info("Starting: " + msg);
            boolean ok = GradCheckUtil.checkGradients(sd);
            assertTrue(msg, ok);
        }
    }
}
 
Example 20
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testScalarOps() {
    int d0 = 2;
    int d1 = 3;
    int d2 = 4;

    int n = d0 * d1 * d2;

    List<String> failed = new ArrayList<>();

    for (int i = 0; i < 11; i++) {
        for (char inOrder : new char[]{'c', 'f'}) {
            SameDiff sd = SameDiff.create();

            INDArray inArr = Nd4j.linspace(1, n, n, DataType.DOUBLE).reshape('c', d0, d1, d2).dup(inOrder);
            SDVariable in = sd.var("in", inArr);
            TestCase tc = new TestCase(sd).gradientCheck(true);

            SDVariable out;
            String msg;
            switch (i) {
                case 0:
                    out = in.mul(2);
                    tc.expectedOutput(out.name(), inArr.mul(2));
                    msg = "mul - " + inOrder;
                    break;
                case 1:
                    out = in.div(2);
                    tc.expectedOutput(out.name(), inArr.div(2));
                    msg = "div - " + inOrder;
                    break;
                case 2:
                    out = in.add(2);
                    tc.expectedOutput(out.name(), inArr.add(2));
                    msg = "add - " + inOrder;
                    break;
                case 3:
                    out = in.sub(2);
                    tc.expectedOutput(out.name(), inArr.sub(2));
                    msg = "sub - " + inOrder;
                    break;
                case 4:
                    out = in.rdiv(2);
                    tc.expectedOutput(out.name(), inArr.rdiv(2));
                    msg = "rdiv - " + inOrder;
                    break;
                case 5:
                    out = in.rsub(2);
                    tc.expectedOutput(out.name(), inArr.rsub(2));
                    msg = "rsub - " + inOrder;
                    break;
                case 6:
                    out = sd.math().pow(in, 2);
                    tc.expectedOutput(out.name(), Transforms.pow(inArr, 2));
                    msg = "pow - " + inOrder;
                    break;
                case 7:
                    inArr.assign(Nd4j.rand(inArr.dataType(), inArr.shape()).muli(5).subi(2.5));
                    out = sd.math().floorMod(in, 2.0);
                    tc.expected(out, Nd4j.getExecutioner().exec(new ScalarFMod(inArr.dup(), 2.0)));
                    msg = "scalarFloorMod - " + inOrder;
                    break;
                case 8:
                    inArr.assign(Nd4j.rand(inArr.shape()));
                    out = sd.scalarMax(in, 0.5);
                    tc.expected(out, Transforms.max(inArr.dup(), 0.5));
                    msg = "scalarMax - " + inOrder;
                    break;
                case 9:
                    inArr.assign(Nd4j.rand(inArr.shape()));
                    out = sd.scalarMin(in, 0.5);
                    tc.expected(out, Transforms.min(inArr.dup(), 0.5));
                    msg = "scalarMin - " + inOrder;
                    break;
                case 10:
                    out = in.assign(0.5);
                    tc.expected(out, Nd4j.valueArrayOf(inArr.shape(), 0.5));
                    msg = "scalarSet - " + inOrder;
                    break;
                default:
                    throw new RuntimeException();
            }

            tc.testName(msg);

            SDVariable loss = sd.standardDeviation(out, true);

            log.info("Starting test: " + msg);
            String err = OpValidation.validate(tc, true);
            if (err != null) {
                failed.add(err);
            }
        }
    }
    assertEquals(failed.toString(), 0, failed.size());
}