Java Code Examples for org.nd4j.autodiff.samediff.SameDiff#mean()

The following examples show how to use org.nd4j.autodiff.samediff.SameDiff#mean() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDepthToSpace() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 128;
    int blockSize = 4;
    int[] inputShape = new int[]{miniBatch, 2, 2, blockSize * blockSize};

    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 2 * blockSize, 2 * blockSize, 1);
    DynamicCustomOp op = new DepthToSpace(input, expOut, blockSize, DataFormat.NHWC);
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.cnn().depthToSpace("dts", sdInput, blockSize, DataFormat.NHWC);
    SDVariable loss = sd.mean("loss", t);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("dts", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 2
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLrn2d() {
    Nd4j.getRandom().setSeed(12345);

    int[][] inputSizes = new int[][]{{1, 3, 8, 8}, {3, 6, 12, 12}};

    List<String> failed = new ArrayList<>();

    for (int[] inSizeNCHW : inputSizes) {

        SameDiff sd = SameDiff.create();
        SDVariable in = null;

        int[] inSize;

        //LRN
        String msg = "LRN with NCHW - input" + Arrays.toString(inSizeNCHW);
        inSize = inSizeNCHW;
        in = sd.var("in", inSize);
        SDVariable out = sd.cnn().localResponseNormalization(in, LocalResponseNormalizationConfig.builder()
                .depth(3)
                .bias(1)
                .alpha(1)
                .beta(0.5)
                .build());

        INDArray inArr = Nd4j.rand(inSize).muli(10);
        in.setArray(inArr);
        SDVariable loss = sd.mean("loss", out);

        log.info("Starting test: " + msg);
        TestCase tc = new TestCase(sd).gradientCheck(true);
        String error = OpValidation.validate(tc);
        if (error != null) {
            failed.add(msg);
        }

    }
    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 3
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDynamicPartition() {
    SameDiff sd = SameDiff.create();

    INDArray ia = Nd4j.create(new double[]{4, 3, 5, 7, 8, 0});
    INDArray partitions = Nd4j.create(new double[]{1, 0, 1, 0, 0, 1}).castTo(DataType.INT);
    int numPartitions = 2;

    SDVariable in = sd.var("in", DataType.DOUBLE, new long[]{6});
    SDVariable sdPartitions = sd.var("partitions", DataType.INT, new long[]{6});

    INDArray expOut1 = Nd4j.create(DataType.DOUBLE, 3L);
    INDArray expOut2 = Nd4j.create(DataType.DOUBLE, 3L);
    INDArray[] expOut = new INDArray[]{expOut1, expOut2};

    DynamicCustomOp dynamicPartition = DynamicCustomOp.builder("dynamic_partition")
            .addInputs(ia, partitions)
            .addIntegerArguments(numPartitions)
            .addOutputs(expOut1, expOut2).build();
    Nd4j.getExecutioner().exec(dynamicPartition);

    SDVariable[] parts = sd.dynamicPartition(new String[]{"dp0", "dp1"}, in, sdPartitions, numPartitions);

    // merge the output partitions together again, to retrieve a single
    // tensor and finally a scalar.
    SDVariable t = sd.math().mergeAdd(parts);
    SDVariable loss = sd.mean("loss", t);

    sd.associateArrayWithVariable(ia, in);
    sd.associateArrayWithVariable(partitions, sdPartitions);

    String err = OpValidation.validate(new TestCase(sd)
            .gradientCheck(true)
            .gradCheckSkipVariables("partitions")
            .expectedOutput("dp0", expOut[0])
            .expectedOutput("dp1", expOut[1])
            .gradientCheck(true));
    assertNull(err);
}
 
Example 4
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSpaceToBatch() {
    //OpValidationSuite.ignoreFailing();          //TODO: https://github.com/deeplearning4j/deeplearning4j/issues/6863

    Nd4j.getRandom().setSeed(7331);

    int miniBatch = 4;
    int[] inputShape = new int[]{1, 2, 2, 1};

    int M = 2;
    int[] blockShape = new int[]{M, 1};
    int[] paddingShape = new int[]{M, 2};

    INDArray input = Nd4j.randn(inputShape).castTo(DataType.DOUBLE);
    INDArray padding = Nd4j.create(new float[]{0, 0, 0, 0}, paddingShape).castTo(DataType.INT);

    SameDiff sd = SameDiff.create();

    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(DataType.DOUBLE, miniBatch, 1, 1, 1);
    DynamicCustomOp op = DynamicCustomOp.builder("space_to_batch")
            .addIntegerArguments(2)
            .addInputs(input, padding)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.cnn().spaceToBatch("stb", sdInput, new int[]{2, 2}, new int[]{0, 0}, new int[]{0, 0});
    SDVariable loss = sd.mean("loss", t);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("stb", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 5
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBatchToSpace() {
    //OpValidationSuite.ignoreFailing();          //TODO: https://github.com/deeplearning4j/deeplearning4j/issues/6863
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 4;
    int[] inputShape = new int[]{miniBatch, 1, 1, 1};

    int M = 2;
    int[] blockShape = new int[]{M, 1};
    int[] cropShape = new int[]{M, 2};

    INDArray input = Nd4j.randn(inputShape).castTo(DataType.DOUBLE);
    INDArray crops = Nd4j.create(new float[]{0, 0, 0, 0}, cropShape).castTo(DataType.INT);

    SameDiff sd = SameDiff.create();

    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(DataType.DOUBLE, 1, 2, 2, 1);
    DynamicCustomOp op = DynamicCustomOp.builder("batch_to_space")
            .addInputs(input, crops)
            .addIntegerArguments(2)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.cnn().batchToSpace("bts", sdInput, new int[]{2, 2}, new int[]{0, 0}, new int[]{0, 0});
    SDVariable loss = sd.mean("loss", t);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("bts", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 6
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSpaceToDepth() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 128;
    int blockSize = 4;
    int[] inputShape = new int[]{miniBatch, 2 * blockSize, 2 * blockSize, 1};

    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 2, 2, blockSize * blockSize);
    DynamicCustomOp op = new SpaceToDepth(input, expOut, blockSize, DataFormat.NHWC);
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.cnn().spaceToDepth("std", sdInput, blockSize, DataFormat.NHWC);
    //new SpaceToDepth(sd, sdInput, blockSize, dataFormat).outputVariable();
    SDVariable loss = sd.mean("loss", t);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("std", expOut)
            .gradientCheck(true));
    assertNull(err);
}
 
Example 7
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCross() {
    INDArray a = Nd4j.create(new double[]{4, 2, 1}, new int[]{1, 3});
    INDArray b = Nd4j.create(new double[]{1, 3, 4}, new int[]{1, 3});

    INDArray expOut = Nd4j.create(DataType.DOUBLE, 1, 3);

    val op = new Cross(a, b, expOut);
    Nd4j.getExecutioner().exec(op);

    SameDiff sd = SameDiff.create();

    SDVariable sdA = sd.var("a", expOut.shape());
    SDVariable sdB = sd.var("b", expOut.shape());


    sd.associateArrayWithVariable(a, sdA);
    sd.associateArrayWithVariable(b, sdB);

    SDVariable t = sd.math().cross("cross", sdA, sdB);
    SDVariable loss = sd.mean("loss", t);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("cross", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 8
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDynamicStitch() {
    SameDiff sd = SameDiff.create();

    INDArray ia = Nd4j.create(new float[]{5, 1, 3}, new int[]{1, 3});
    INDArray ib = Nd4j.create(new float[]{7, 2, 4}, new int[]{1, 3});
    INDArray indexA = Nd4j.create(new float[]{0, 1, 4}, new int[]{1, 3});
    INDArray indexB = Nd4j.create(new float[]{2, 3, 5}, new int[]{1, 3});

    INDArray expOut = Nd4j.create(new int[]{1, 6});

    DynamicCustomOp dynamicStitch = DynamicCustomOp.builder("dynamic_stitch")
            .addInputs(indexA, indexB, ia, ib)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(dynamicStitch);

    SDVariable in1 = sd.var("in1", new int[]{1, 3});
    SDVariable in2 = sd.var("in2", new int[]{1, 3});

    SDVariable index1 = sd.var("index1", new int[]{1, 3});
    SDVariable index2 = sd.var("index2", new int[]{1, 3});

    sd.associateArrayWithVariable(ia, in1);
    sd.associateArrayWithVariable(ib, in2);
    sd.associateArrayWithVariable(indexA, index1);
    sd.associateArrayWithVariable(indexB, index2);

    SDVariable t = sd.dynamicStitch(new SDVariable[]{index1, index2}, new SDVariable[]{in1, in2});

    SDVariable loss = sd.mean("loss", t);

    sd.exec();
    INDArray out = t.getArr();

    if (!expOut.equals(out)) {
        log.error("forward failed");
    }
}
 
Example 9
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDynamicPartition() {
    SameDiff sd = SameDiff.create();

    INDArray ia = Nd4j.create(new float[]{4, 3, 5, 7, 8, 0}, new int[]{1, 6});
    INDArray partitions = Nd4j.create(new float[]{1, 0, 1, 0, 0, 1});
    int numPartitions = 2;

    SDVariable in = sd.var("in", new int[]{1, 6});
    SDVariable sdPartitions = sd.var("partitions", new int[]{1, 6});

    INDArray expOut1 = Nd4j.create(new int[]{1, 3});
    INDArray expOut2 = Nd4j.create(new int[]{1, 3});
    INDArray[] expOut = new INDArray[]{expOut1, expOut2};

    DynamicCustomOp dynamicPartition = DynamicCustomOp.builder("dynamic_partition")
            .addInputs(ia, partitions)
            .addIntegerArguments(numPartitions)
            .addOutputs(expOut1, expOut2).build();
    Nd4j.getExecutioner().exec(dynamicPartition);

    SDVariable[] parts = sd.dynamicPartition(in, sdPartitions, numPartitions);

    // merge the output partitions together again, to retrieve a single
    // tensor and finally a scalar.
    SDVariable t = sd.mergeAdd(parts);
    SDVariable loss = sd.mean("loss", t);

    sd.associateArrayWithVariable(ia, in);
    sd.exec();
    INDArray[] out = new INDArray[numPartitions];
    for (int i = 0; i < parts.length; i++) {
        out[i] = parts[i].getArr();
    }

    if (!expOut.equals(out)) {
        log.error("forward failed");
    }
}
 
Example 10
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDepthToSpace() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 128;
    int blockSize = 4;
    String dataFormat = "NHWC";
    int isNHWC = dataFormat.equals("NHWC") ? 1 : 0;
    int[] inputShape = new int[]{miniBatch, 2, 2, blockSize * blockSize};

    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 2 * blockSize, 2 * blockSize, 1);
    DynamicCustomOp op = DynamicCustomOp.builder("depth_to_space")
            .addInputs(input)
            .addIntegerArguments(blockSize, isNHWC)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.depthToSpace(sdInput, blockSize, dataFormat);
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();

    if (!expOut.equals(out)) {
        log.info("depth to space failed on forward");
    }

    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 11
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSpaceToDepth() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 128;
    int blockSize = 4;
    String dataFormat = "NHWC";
    int isNHWC = dataFormat.equals("NHWC") ? 1 : 0;
    int[] inputShape = new int[]{miniBatch, 2 * blockSize, 2 * blockSize, 1};

    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 2, 2, blockSize * blockSize);
    DynamicCustomOp op = DynamicCustomOp.builder("space_to_depth")
            .addInputs(input)
            .addIntegerArguments(blockSize, isNHWC)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.spaceToDepth(sdInput, blockSize, dataFormat);
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();

    if (!expOut.equals(out)) {
        log.info("depth to space failed on forward");
    }

    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 12
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCross() {
    INDArray a = Nd4j.create(new float[]{4, 2, 1}, new int[]{1, 3});
    INDArray b = Nd4j.create(new float[]{1, 3, 4}, new int[]{1, 3});

    INDArray expOut = Nd4j.create(1, 3);

    DynamicCustomOp op = DynamicCustomOp.builder("cross").addInputs(a, b).addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    SameDiff sd = SameDiff.create();

    SDVariable sdA = sd.var("a", expOut.shape());
    SDVariable sdB = sd.var("b", expOut.shape());


    sd.associateArrayWithVariable(a, sdA);
    sd.associateArrayWithVariable(b, sdB);

    SDVariable t = sd.cross(sdA, sdB);
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();

    if (!expOut.equals(out)) {
        log.info("batch to space failed on forward");
    }

    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 13
Source File: GradCheckReductions.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testReductionGradientsSimple() {
    //Test reductions: final and only function
    Nd4j.getRandom().setSeed(12345);

    for (int i = 0; i < 12; i++) {

        SameDiff sd = SameDiff.create();

        boolean skipBackward = false;

        int nOut = 4;
        int minibatch = 10;
        SDVariable input = sd.var("in", new int[]{-1, nOut});

        SDVariable loss;
        String name;
        switch (i) {
            case 0:
                loss = sd.mean("loss", input);
                name = "mean";
                break;
            case 1:
                loss = sd.sum("loss", input);
                name = "sum";
                break;
            case 2:
                loss = sd.standardDeviation("loss", input, true);
                name = "stdev";
                break;
            case 3:
                loss = sd.min("loss", input);
                name = "min";
                break;
            case 4:
                loss = sd.max("loss", input);
                name = "max";
                break;
            case 5:
                loss = sd.variance("loss", input, true);
                name = "variance";
                break;
            case 6:
                loss = sd.prod("loss", input);
                name = "prod";
                break;
            case 7:
                loss = sd.norm1("loss", input);
                name = "norm1";
                break;
            case 8:
                loss = sd.norm2("loss", input);
                name = "norm2";
                break;
            case 9:
                loss = sd.normmax("loss", input);
                name = "normmax";
                break;
            case 10:
                loss = sd.countNonZero("loss", input);
                name = "countNonZero";
                skipBackward = true;
                break;
            case 11:
                loss = sd.countZero("loss", input);
                name = "countZero";
                skipBackward = true;
                break;
            default:
                throw new RuntimeException();
        }


        String msg = "test: " + i + " - " + name;
        log.info("*** Starting test: " + msg);

        INDArray inputArr = Nd4j.randn(minibatch, nOut).muli(100);
        sd.associateArrayWithVariable(inputArr, input);

        if (!skipBackward) {
            boolean ok = GradCheckUtil.checkGradients(sd);
            assertTrue(msg, ok);
        }
    }
}
 
Example 14
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testReductionsBackwards() {
//        for (int i = 0; i < 7; i++) {
        int i=5;
        {

            SameDiff sd = SameDiff.create();

            int nOut = 4;
            int minibatch = 3;
            SDVariable input = sd.var("in", DataType.DOUBLE, new long[]{minibatch, nOut});
            SDVariable label = sd.var("label", DataType.DOUBLE, new long[]{minibatch, nOut});

            SDVariable diff = input.sub(label);
            SDVariable sqDiff = diff.mul(diff);
            SDVariable msePerEx = sd.mean("msePerEx", sqDiff, 1);

            SDVariable loss;    //Scalar value
            String name;
            switch (i) {
                case 0:
                    loss = sd.mean("loss", msePerEx, 0);
                    name = "mean";
                    break;
                case 1:
                    loss = sd.sum("loss", msePerEx, 0);
                    name = "sum";
                    break;
                case 2:
                    loss = sd.standardDeviation("loss", msePerEx, true, 0);
                    name = "stdev";
                    break;
                case 3:
                    loss = sd.min("loss", msePerEx, 0);
                    name = "min";
                    break;
                case 4:
                    loss = sd.max("loss", msePerEx, 0);
                    name = "max";
                    break;
                case 5:
                    loss = sd.variance("loss", msePerEx, true, 0);
                    name = "variance";
                    break;
                case 6:
                    loss = sd.prod("loss", msePerEx, 0);
                    name = "prod";
                    break;
                default:
                    throw new RuntimeException();
            }


            String msg = "test: " + i + " - " + name;
            log.info("*** Starting test: " + msg);

            INDArray inputArr = Nd4j.rand(DataType.DOUBLE, minibatch, nOut);
            INDArray labelArr = Nd4j.rand(DataType.DOUBLE, minibatch, nOut);

            sd.associateArrayWithVariable(inputArr, input);
            sd.associateArrayWithVariable(labelArr, label);

            INDArray result = loss.eval();
            assertEquals(1, result.length());

            sd.calculateGradients(Collections.emptyMap(), sd.getVariables().keySet());
        }
    }
 
Example 15
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMerge() {
    Nd4j.getRandom().setSeed(12345);

    List<String> failed = new ArrayList<>();

    for (int t = 0; t < 3; t++) {
        for (int numArrays : new int[]{3, 1}) {
            for (long[] shape : new long[][]{{1}, {3, 4}, {3, 4, 5}}) {


                SameDiff sd = SameDiff.create();
                SDVariable[] arr = new SDVariable[numArrays];

                for (int i = 0; i < numArrays; i++) {
                    arr[i] = sd.var(String.valueOf(i), Nd4j.rand(shape));
                }

                INDArray exp = arr[0].getArr().dup();
                SDVariable merge;
                String name;
                switch (t) {
                    case 0:
                        name = "mergeAdd";
                        merge = sd.math().mergeAdd(arr);
                        for( int i=1; i<numArrays; i++ ){
                            exp.addi(arr[i].getArr().dup());
                        }
                        break;
                    case 1:
                        name = "mergeMax";
                        merge = sd.math().mergeMax(arr);
                        for( int i=1; i<numArrays; i++ ){
                            exp = Transforms.max(exp, arr[i].getArr(), true);
                        }
                        break;
                    case 2:
                        name = "mergeAvg";
                        merge = sd.math().mergeAvg(arr);
                        for( int i=1; i<numArrays; i++ ){
                            exp.addi(arr[i].getArr().dup());
                        }
                        exp.divi(numArrays);
                        break;
                    default:
                        throw new RuntimeException();
                }

                String msg = name + " - numArrays=" + numArrays + ", shape=" + Arrays.toString(shape);
                SDVariable loss;
                if(shape.length > 1){
                    loss = sd.standardDeviation("loss", merge, true);
                } else {
                    loss = sd.mean("loss", merge);
                }


                TestCase tc = new TestCase(sd)
                        .expected(merge, exp)
                        .testName(msg);
                String error = OpValidation.validate(tc, true);
                if(error != null){
                    failed.add(msg + " - " + error);
                }
            }
        }
    }

    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 16
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSpaceToBatch() {
    Nd4j.getRandom().setSeed(7331);

    int miniBatch = 4;
    int[] inputShape = new int[]{1, 2, 2, 1};

    int M = 2;
    int[] blockShape = new int[]{M, 1};
    int[] paddingShape = new int[]{M, 2};

    INDArray input = Nd4j.randn(inputShape);
    INDArray blocks = Nd4j.create(new float[]{2, 2}, blockShape);
    INDArray padding = Nd4j.create(new float[]{0, 0, 0, 0}, paddingShape);

    SameDiff sd = SameDiff.create();

    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 1, 1, 1);
    DynamicCustomOp op = DynamicCustomOp.builder("space_to_batch")
            .addInputs(input, blocks, padding)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.spaceToBatch(sdInput, new int[]{2, 2}, new int[][]{{0, 0}, {0, 0}});
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();

    if (!expOut.equals(out)) {
        log.info("space to batch failed on forward");
    }

    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 17
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testBatchToSpace() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 4;
    int[] inputShape = new int[]{miniBatch, 1, 1, 1};

    int M = 2;
    int[] blockShape = new int[]{M, 1};
    int[] cropShape = new int[]{M, 2};

    INDArray input = Nd4j.randn(inputShape);
    INDArray blocks = Nd4j.create(new float[]{2, 2}, blockShape);
    INDArray crops = Nd4j.create(new float[]{0, 0, 0, 0}, cropShape);

    SameDiff sd = SameDiff.create();

    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(1, 2, 2, 1);
    DynamicCustomOp op = DynamicCustomOp.builder("batch_to_space")
            .addInputs(input, blocks, crops)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.batchToSpace(sdInput, new int[]{2, 2}, new int[][]{{0, 0}, {0, 0}});
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();

    if (!expOut.equals(out)) {
        log.info("batch to space failed on forward");
    }

    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 18
Source File: GradCheckReductions.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testReductionGradients1() {
        //Test reductions: final, but *not* the only function
        Nd4j.getRandom().setSeed(12345);

        List<String> allFailed = new ArrayList<>();

        for (int dim : new int[]{0, Integer.MAX_VALUE}) {    //These two cases are equivalent here

            for (int i = 0; i < 10; i++) {

                SameDiff sd = SameDiff.create();

                int nOut = 4;
                int minibatch = 10;
                SDVariable input = sd.var("in", new int[]{-1, nOut});
                SDVariable label = sd.var("label", new int[]{-1, nOut});

                SDVariable diff = input.sub(label);
                SDVariable sqDiff = diff.mul(diff);
                SDVariable msePerEx = sd.mean("msePerEx", sqDiff, 1);

                SDVariable loss;
                String name;
                switch (i) {
                    case 0:
                        loss = sd.mean("loss", msePerEx, dim);
                        name = "mean";
                        break;
                    case 1:
                        loss = sd.sum("loss", msePerEx, dim);
                        name = "sum";
                        break;
                    case 2:
                        loss = sd.standardDeviation("loss", msePerEx, true, dim);
                        name = "stdev";
                        break;
                    case 3:
                        loss = sd.min("loss", msePerEx, dim);
                        name = "min";
                        break;
                    case 4:
                        loss = sd.max("loss", msePerEx, dim);
                        name = "max";
                        break;
                    case 5:
                        loss = sd.variance("loss", msePerEx, true, dim);
                        name = "variance";
                        break;
                    case 6:
                        loss = sd.prod("loss", msePerEx, dim);
                        name = "prod";
                        break;
                    case 7:
                        loss = sd.norm1("loss", msePerEx, dim);
                        name = "norm1";
                        break;
                    case 8:
                        loss = sd.norm2("loss", msePerEx, dim);
                        name = "norm2";
                        break;
                    case 9:
                        loss = sd.normmax("loss", msePerEx, dim);
                        name = "normmax";
                        break;
                    default:
                        throw new RuntimeException();
                }


                String msg = "(test " + i + " - " + name + ", dimension=" + dim + ")";
                log.info("*** Starting test: " + msg);

                INDArray inputArr = Nd4j.randn(minibatch, nOut).muli(100);
                INDArray labelArr = Nd4j.randn(minibatch, nOut).muli(100);

                sd.associateArrayWithVariable(inputArr, input);
                sd.associateArrayWithVariable(labelArr, label);

                try {
                    INDArray out = sd.execAndEndResult();
                    assertNotNull(out);
                    assertArrayEquals(new int[]{1, 1}, out.shape());

//                    System.out.println(sd.asFlatPrint());

                    boolean ok = GradCheckUtil.checkGradients(sd);
                    if (!ok) {
                        allFailed.add(msg);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    allFailed.add(msg + " - EXCEPTION");
                }
            }
        }

        assertEquals("Failed: " + allFailed, 0, allFailed.size());
    }