Java Code Examples for org.nd4j.autodiff.samediff.SameDiff#sum()

The following examples show how to use org.nd4j.autodiff.samediff.SameDiff#sum() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GraphExecutionerTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
/**
 * VarSpace should dump everything. 4 variables in our case
 * @throws Exception
 */
@Test
public void testEquality1() throws Exception {
    GraphExecutioner executionerA = new BasicGraphExecutioner();
    GraphExecutioner executionerB = new NativeGraphExecutioner();

    SameDiff sameDiff = SameDiff.create();
    INDArray ones = Nd4j.ones(4);
    SDVariable sdVariable = sameDiff.var("ones",ones);
    SDVariable scalarOne = sameDiff.var("add1",Nd4j.scalar(1.0));
    SDVariable result = sdVariable.addi(scalarOne);
    SDVariable total = sameDiff.sum(result,Integer.MAX_VALUE);

    log.info("TOTAL: {}; Id: {}", total.getVarName(), total);

    INDArray[] resB = executionerB.executeGraph(sameDiff, configVarSpace);

    assertEquals(6, resB.length);
    assertEquals(Nd4j.create(new float[]{2f, 2f, 2f, 2f}), resB[4]);
    assertEquals(Nd4j.scalar(1), resB[1]);
    assertEquals(Nd4j.scalar(8.0), resB[5]);
}
 
Example 2
Source File: GraphExecutionerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
     * Implicit should return tree edges. So, one variable
     * @throws Exception
     */
    @Test
    public void testEquality2() {
        OpValidationSuite.ignoreFailing();  //Failing 2019/01/24
        GraphExecutioner executionerA = new BasicGraphExecutioner();
        GraphExecutioner executionerB = new NativeGraphExecutioner();

        SameDiff sameDiff = SameDiff.create();
        INDArray ones = Nd4j.ones(4);
        SDVariable sdVariable = sameDiff.var("ones",ones);
        SDVariable scalarOne = sameDiff.var("add1",Nd4j.scalar(1.0));
        SDVariable result = sdVariable.add(scalarOne);
        SDVariable total = sameDiff.sum(result,Integer.MAX_VALUE);

//        log.info("ID: {}",sameDiff.getGraph().getVertex(1).getValue().getId());

        INDArray[] resB = executionerB.executeGraph(sameDiff, configImplicit);

        assertEquals(1, resB.length);
        assertEquals(Nd4j.scalar(8.0), resB[0]);

        //INDArray resA = executionerA.executeGraph(sameDiff)[0];

        //assertEquals(resA, resB);
    }
 
Example 3
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSigmoidBackwards() {
    SameDiff sameDiff = SameDiff.create();
    INDArray sumInput = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2);
    Map<String, INDArray> inputs = new HashMap<>();
    inputs.put("x", sumInput);
    SDVariable input = sameDiff.var("x", inputs.get("x"));
    SDVariable sigmoid = sameDiff.nn().sigmoid(input);
    SDVariable sum = sameDiff.sum(sigmoid, Integer.MAX_VALUE);
    Map<String, INDArray> m = sameDiff.calculateGradients(Collections.emptyMap(), sameDiff.getVariables().keySet());
    INDArray arr = m.get(input.name());
    assertTrue(Nd4j.create(new double[][]{
            {0.1966, 0.1050},
            {0.0452, 0.0177}
    }).equalsWithEps(arr, 1e-2));
}
 
Example 4
Source File: CosineSimilarity.java    From nd4j with Apache License 2.0 6 votes vote down vote up
public static List<SDVariable> doDiff(SameDiff sameDiff, DifferentialFunctionFactory f, SDVariable x, SDVariable y,
                                      SDVariable gradOut, int... dimensions){
    SDVariable a = sameDiff.sum(x.mul(y),dimensions);
    SDVariable l2x = f.norm2(x, dimensions);
    SDVariable l2y = f.norm2(y, dimensions);
    SDVariable b = l2x.mul(l2y);

    int origRank = Shape.rankFromShape(x.getShape());
    SDVariable broadcastableA = f.reductionBroadcastableWithOrigShape(origRank, dimensions, a);
    SDVariable broadcastableB = f.reductionBroadcastableWithOrigShape(origRank, dimensions, b);
    SDVariable broadcastableL2xSq = f.reductionBroadcastableWithOrigShape(origRank, dimensions, sameDiff.square(l2x));
    SDVariable broadcastableL2ySq = f.reductionBroadcastableWithOrigShape(origRank, dimensions, sameDiff.square(l2y));
    SDVariable broadcastableGrad = f.reductionBroadcastableWithOrigShape(origRank, dimensions, gradOut);

    SDVariable dcdx = y.sub(x.mul(broadcastableA).div(broadcastableL2xSq)).div(broadcastableB);
    SDVariable dcdy = x.sub(y.mul(broadcastableA).div(broadcastableL2ySq)).div(broadcastableB);

    return Arrays.asList(dcdx.mul(broadcastableGrad), dcdy.mul(broadcastableGrad));
}
 
Example 5
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMergeRank1(){
    SameDiff sd = SameDiff.create();
    SDVariable var = sd.var("in", Nd4j.create(new long[]{1}).assign(5));

    SDVariable merged = sd.math().mergeAvg("merged", new SDVariable[]{var});
    SDVariable sum = sd.sum(merged);

    Map<String,INDArray> m = sd.output(Collections.emptyMap(), "merged");
    Map<String,INDArray> gm = sd.calculateGradients(null, "in");

    INDArray out = m.get("merged");
    assertEquals(1, out.rank());

    INDArray inGrad = gm.get("in");
    assertEquals(1, inGrad.rank());
}
 
Example 6
Source File: GraphExecutionerTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
/**
     * Implicit should return tree edges. So, one variable
     * @throws Exception
     */
    @Test
    public void testEquality2() throws Exception {
        GraphExecutioner executionerA = new BasicGraphExecutioner();
        GraphExecutioner executionerB = new NativeGraphExecutioner();

        SameDiff sameDiff = SameDiff.create();
        INDArray ones = Nd4j.ones(4);
        SDVariable sdVariable = sameDiff.var("ones",ones);
        SDVariable scalarOne = sameDiff.var("add1",Nd4j.scalar(1.0));
        SDVariable result = sdVariable.addi(scalarOne);
        SDVariable total = sameDiff.sum(result,Integer.MAX_VALUE);

//        log.info("ID: {}",sameDiff.getGraph().getVertex(1).getValue().getId());

        INDArray[] resB = executionerB.executeGraph(sameDiff, configImplicit);

        assertEquals(1, resB.length);
        assertEquals(Nd4j.scalar(8.0), resB[0]);

        //INDArray resA = executionerA.executeGraph(sameDiff)[0];

        //assertEquals(resA, resB);
    }
 
Example 7
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLogGrad() {
    SameDiff sameDiff = SameDiff.create();
    SDVariable input = sameDiff.var("x", Nd4j.linspace(1, 4, 4, DataType.DOUBLE));
    SDVariable log = sameDiff.math().log(input);
    SDVariable sum = sameDiff.sum(log, Integer.MAX_VALUE);
    INDArray result = null;
    sameDiff.calculateGradients(Collections.emptyMap(), sameDiff.getVariables().keySet());
}
 
Example 8
Source File: LossFunctions.java    From nd4j with Apache License 2.0 5 votes vote down vote up
/**
 * Determine the number of weight entries that are non-zero, after broadcasting
 *
 * @param weights
 * @param labels
 * @return
 */
private static SDVariable nonZeroCount(SDVariable weights, SDVariable labels){
    SameDiff sd = weights.getSameDiff();

    SDVariable present = sd.neq(weights, 0.0);
    SDVariable presentBroadcast = sd.zerosLike(labels).add(present);

    return sd.sum(presentBroadcast);
}
 
Example 9
Source File: GraphExecutionerTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
@Ignore
public void testSums1() throws Exception {
    SameDiff sameDiff = SameDiff.create();
    INDArray ones = Nd4j.ones(4);
    SDVariable sdVariable = sameDiff.var("ones",ones);
    SDVariable result = sdVariable.addi(1.0);
    SDVariable total = sameDiff.sum(result,Integer.MAX_VALUE);

    val executioner = new NativeGraphExecutioner();

    INDArray[] res = executioner.executeGraph(sameDiff);
    assertEquals(8.0, res[0].getDouble(0), 1e-5);
}
 
Example 10
Source File: GraphExecutionerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
@Ignore
public void testConversion() throws Exception {
    SameDiff sameDiff = SameDiff.create();
    INDArray ones = Nd4j.ones(4);
    SDVariable sdVariable = sameDiff.var("ones",ones);
    SDVariable result = sdVariable.add(1.0);
    SDVariable total = sameDiff.sum(result,Integer.MAX_VALUE);

    val executioner = new NativeGraphExecutioner();

    ByteBuffer buffer = executioner.convertToFlatBuffers(sameDiff, ExecutorConfiguration.builder().profilingMode(OpExecutioner.ProfilingMode.DISABLED).executionMode(ExecutionMode.SEQUENTIAL).outputMode(OutputMode.IMPLICIT).build());

    val offset = buffer.position();
    val array = buffer.array();

    try (val fos = new FileOutputStream("../../libnd4j/tests/resources/adam_sum.fb"); val dos = new DataOutputStream(fos)) {
        dos.write(array, offset, array.length - offset);
    }


    //INDArray[] res = executioner.executeGraph(sameDiff);
    //assertEquals(8.0, res[0].getDouble(0), 1e-5);
    /*
    INDArray output = null;
    for(int i = 0; i < 5; i++) {
        output = sameDiff.execAndEndResult(ops);
        System.out.println("Ones " + ones);
        System.out.println(output);
    }

    assertEquals(Nd4j.valueArrayOf(4,7),ones);
    assertEquals(28,output.getDouble(0),1e-1);
    */
}
 
Example 11
Source File: GraphExecutionerTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
@Ignore
public void testConversion() throws Exception {
    SameDiff sameDiff = SameDiff.create();
    INDArray ones = Nd4j.ones(4);
    SDVariable sdVariable = sameDiff.var("ones",ones);
    SDVariable result = sdVariable.addi(1.0);
    SDVariable total = sameDiff.sum(result,Integer.MAX_VALUE);

    val executioner = new NativeGraphExecutioner();

    ByteBuffer buffer = executioner.convertToFlatBuffers(sameDiff, ExecutorConfiguration.builder().profilingMode(OpExecutioner.ProfilingMode.DISABLED).executionMode(ExecutionMode.SEQUENTIAL).outputMode(OutputMode.IMPLICIT).build());

    val offset = buffer.position();
    val array = buffer.array();

    try (val fos = new FileOutputStream("../../libnd4j/tests/resources/adam_sum.fb"); val dos = new DataOutputStream(fos)) {
        dos.write(array, offset, array.length - offset);
    }


    //INDArray[] res = executioner.executeGraph(sameDiff);
    //assertEquals(8.0, res[0].getDouble(0), 1e-5);
    /*
    INDArray output = null;
    for(int i = 0; i < 5; i++) {
        output = sameDiff.execAndEndResult(ops);
        System.out.println("Ones " + ones);
        System.out.println(output);
    }

    assertEquals(Nd4j.valueArrayOf(4,7),ones);
    assertEquals(28,output.getDouble(0),1e-1);
    */
}
 
Example 12
Source File: GraphExecutionerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
@Ignore
public void testSums1() {
    SameDiff sameDiff = SameDiff.create();
    INDArray ones = Nd4j.ones(4);
    SDVariable sdVariable = sameDiff.var("ones",ones);
    SDVariable result = sdVariable.add(1.0);
    SDVariable total = sameDiff.sum(result,Integer.MAX_VALUE);

    val executioner = new NativeGraphExecutioner();

    INDArray[] res = executioner.executeGraph(sameDiff);
    assertEquals(8.0, res[0].getDouble(0), 1e-5);
}
 
Example 13
Source File: GradCheckMisc.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testGradientAutoBroadcast2() {

        Nd4j.getRandom().setSeed(12345);

        List<String> allFailed = new ArrayList<>();

        for (int[] dim_sz1s : new int[][]{{0, 1}, {0, 2}, {1, 2}, {0,1,2}}) {

            int[] otherShape = {3, 4, 5};
            otherShape[dim_sz1s[0]] = 1;
            otherShape[dim_sz1s[1]] = 1;
            if(dim_sz1s.length == 3){
                otherShape[dim_sz1s[2]] = 1;
            }

            for (int i = 0; i < 6; i++) {

                SameDiff sd = SameDiff.create();

                SDVariable in3 = sd.var("in3", new int[]{3, 4, 5});
                SDVariable in2 = sd.var("inToBc", otherShape);

                String name;
                SDVariable bcOp;
                switch (i) {
                    case 0:
                        bcOp = in3.add(in2);
                        name = "add";
                        break;
                    case 1:
                        bcOp = in3.sub(in2);
                        name = "sub";
                        break;
                    case 2:
                        bcOp = in3.mul(in2);
                        name = "mul";
                        break;
                    case 3:
                        bcOp = in3.div(in2);
                        name = "div";
                        break;
                    case 4:
                        bcOp = in3.rsub(in2);
                        name = "rsub";
                        break;
                    case 5:
                        bcOp = in3.rdiv(in2);
                        name = "rdiv";
                        break;
                    case 6:
                        bcOp = sd.f().floorDiv(in3, in2);
                        name = "floordiv";
                        break;
                    case 7:
                        bcOp = sd.f().floorMod(in3, in2);
                        name = "floormod";
                        break;
                    default:
                        throw new RuntimeException();
                }

                SDVariable outVar = sd.sum(bcOp);

                String msg = "(test " + i + ": " + name + ", dimensions=" + Arrays.toString(dim_sz1s) + ")";
                log.info("*** Starting test: " + msg);

                INDArray in3Arr = Nd4j.randn(new int[]{3,4,5}).muli(100);
                INDArray in2Arr = Nd4j.randn(otherShape).muli(100);

                sd.associateArrayWithVariable(in3Arr, in3);
                sd.associateArrayWithVariable(in2Arr, in2);

                try {
                    INDArray out = sd.execAndEndResult();
                    assertNotNull(out);
                    assertArrayEquals(new long[]{1, 1}, out.shape());

//                    System.out.println(sd.asFlatPrint());

                    boolean ok = GradCheckUtil.checkGradients(sd);
                    if (!ok) {
                        allFailed.add(msg);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    allFailed.add(msg + " - EXCEPTION");
                }
            }
        }

        assertEquals("Failed: " + allFailed, 0, allFailed.size());
    }
 
Example 14
Source File: GradCheckReductions.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testReductionGradients1() {
        //Test reductions: final, but *not* the only function
        Nd4j.getRandom().setSeed(12345);

        List<String> allFailed = new ArrayList<>();

        for (int dim : new int[]{0, Integer.MAX_VALUE}) {    //These two cases are equivalent here

            for (int i = 0; i < 10; i++) {

                SameDiff sd = SameDiff.create();

                int nOut = 4;
                int minibatch = 10;
                SDVariable input = sd.var("in", new int[]{-1, nOut});
                SDVariable label = sd.var("label", new int[]{-1, nOut});

                SDVariable diff = input.sub(label);
                SDVariable sqDiff = diff.mul(diff);
                SDVariable msePerEx = sd.mean("msePerEx", sqDiff, 1);

                SDVariable loss;
                String name;
                switch (i) {
                    case 0:
                        loss = sd.mean("loss", msePerEx, dim);
                        name = "mean";
                        break;
                    case 1:
                        loss = sd.sum("loss", msePerEx, dim);
                        name = "sum";
                        break;
                    case 2:
                        loss = sd.standardDeviation("loss", msePerEx, true, dim);
                        name = "stdev";
                        break;
                    case 3:
                        loss = sd.min("loss", msePerEx, dim);
                        name = "min";
                        break;
                    case 4:
                        loss = sd.max("loss", msePerEx, dim);
                        name = "max";
                        break;
                    case 5:
                        loss = sd.variance("loss", msePerEx, true, dim);
                        name = "variance";
                        break;
                    case 6:
                        loss = sd.prod("loss", msePerEx, dim);
                        name = "prod";
                        break;
                    case 7:
                        loss = sd.norm1("loss", msePerEx, dim);
                        name = "norm1";
                        break;
                    case 8:
                        loss = sd.norm2("loss", msePerEx, dim);
                        name = "norm2";
                        break;
                    case 9:
                        loss = sd.normmax("loss", msePerEx, dim);
                        name = "normmax";
                        break;
                    default:
                        throw new RuntimeException();
                }


                String msg = "(test " + i + " - " + name + ", dimension=" + dim + ")";
                log.info("*** Starting test: " + msg);

                INDArray inputArr = Nd4j.randn(minibatch, nOut).muli(100);
                INDArray labelArr = Nd4j.randn(minibatch, nOut).muli(100);

                sd.associateArrayWithVariable(inputArr, input);
                sd.associateArrayWithVariable(labelArr, label);

                try {
                    INDArray out = sd.execAndEndResult();
                    assertNotNull(out);
                    assertArrayEquals(new int[]{1, 1}, out.shape());

//                    System.out.println(sd.asFlatPrint());

                    boolean ok = GradCheckUtil.checkGradients(sd);
                    if (!ok) {
                        allFailed.add(msg);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    allFailed.add(msg + " - EXCEPTION");
                }
            }
        }

        assertEquals("Failed: " + allFailed, 0, allFailed.size());
    }
 
Example 15
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMulGradient() {
    INDArray arr1 = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2);
    INDArray arr2 = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2);

    INDArray gradAssertion = Nd4j.ones(arr1.shape());
    INDArray scalar = Nd4j.scalar(1.0);
    INDArray aGradAssertion = Nd4j.create(new double[][]{
            {1, 4},
            {9, 16}
    });

    INDArray cGradAssertion = Nd4j.create(new double[][]{
            {1, 2},
            {3, 4}
    });

    INDArray wGradAssertion = Nd4j.create(new double[][]{
            {2, 8},
            {18, 32}
    });

    INDArray dGradAssertion = Nd4j.ones(2, 2);

    SameDiff sameDiff = SameDiff.create();

    SDVariable sdVariable = sameDiff.var("a", arr1);
    SDVariable sdVariable1 = sameDiff.var("w", arr2);
    SDVariable varMulPre = sdVariable.mul("c", sdVariable1);
    SDVariable varMul = varMulPre.mul("d", sdVariable1);
    SDVariable sum = sameDiff.sum("ret", varMul, Integer.MAX_VALUE);

    Map<String,INDArray> m = sameDiff.outputAll(null);
    Map<String,INDArray> gm = sameDiff.calculateGradients(null, m.keySet());

    SDVariable finalResult = sameDiff.grad(sum.name());

    SDVariable cGrad = sameDiff.grad(varMulPre.name());

    SDVariable mulGradResult = sameDiff.grad(varMul.name());
    SDVariable aGrad = sameDiff.grad(sdVariable.name());
    SDVariable wGrad = sameDiff.grad(sdVariable1.name());
    SDVariable dGrad = sameDiff.grad(varMul.name());

    INDArray scalarGradTest = gm.get(sum.name());
    assertEquals(scalar, scalarGradTest);


    INDArray gradTest = mulGradResult.getArr();
    assertEquals(gradAssertion, gradTest);

    INDArray aGradTest = aGrad.getArr();
    assertEquals(aGradAssertion, aGradTest);

    INDArray cGradTest = cGrad.getArr();
    assertEquals(cGradAssertion, cGradTest);

    INDArray wGradTest = wGrad.getArr();
    assertEquals(wGradAssertion, wGradTest);

    INDArray dGradTest = dGrad.getArr();
    assertEquals(dGradAssertion, dGradTest);
}
 
Example 16
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testReductionsBackwards() {
//        for (int i = 0; i < 7; i++) {
        int i=5;
        {

            SameDiff sd = SameDiff.create();

            int nOut = 4;
            int minibatch = 3;
            SDVariable input = sd.var("in", DataType.DOUBLE, new long[]{minibatch, nOut});
            SDVariable label = sd.var("label", DataType.DOUBLE, new long[]{minibatch, nOut});

            SDVariable diff = input.sub(label);
            SDVariable sqDiff = diff.mul(diff);
            SDVariable msePerEx = sd.mean("msePerEx", sqDiff, 1);

            SDVariable loss;    //Scalar value
            String name;
            switch (i) {
                case 0:
                    loss = sd.mean("loss", msePerEx, 0);
                    name = "mean";
                    break;
                case 1:
                    loss = sd.sum("loss", msePerEx, 0);
                    name = "sum";
                    break;
                case 2:
                    loss = sd.standardDeviation("loss", msePerEx, true, 0);
                    name = "stdev";
                    break;
                case 3:
                    loss = sd.min("loss", msePerEx, 0);
                    name = "min";
                    break;
                case 4:
                    loss = sd.max("loss", msePerEx, 0);
                    name = "max";
                    break;
                case 5:
                    loss = sd.variance("loss", msePerEx, true, 0);
                    name = "variance";
                    break;
                case 6:
                    loss = sd.prod("loss", msePerEx, 0);
                    name = "prod";
                    break;
                default:
                    throw new RuntimeException();
            }


            String msg = "test: " + i + " - " + name;
            log.info("*** Starting test: " + msg);

            INDArray inputArr = Nd4j.rand(DataType.DOUBLE, minibatch, nOut);
            INDArray labelArr = Nd4j.rand(DataType.DOUBLE, minibatch, nOut);

            sd.associateArrayWithVariable(inputArr, input);
            sd.associateArrayWithVariable(labelArr, label);

            INDArray result = loss.eval();
            assertEquals(1, result.length());

            sd.calculateGradients(Collections.emptyMap(), sd.getVariables().keySet());
        }
    }
 
Example 17
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testGradientAutoBroadcast1() {

    Nd4j.getRandom().setSeed(12345);

    List<String> failed = new ArrayList<>();

    for (int dim_sz1 : new int[]{0, 1, 2}) {

        int[] in2Shape = {3, 4, 5};
        in2Shape[dim_sz1] = 1;

        for (int i = 0; i < 8; i++) {

            SameDiff sd = SameDiff.create();

            SDVariable in3 = sd.var("in3", Nd4j.rand(new int[]{3, 4, 5}));
            SDVariable in2 = sd.var("in2", in2Shape);

            SDVariable bcOp;
            String name;
            switch (i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    //bcOp = sd.scalarFloorDiv(in3, in2);
                    bcOp = new FloorDivOp(sd, in3, in2).outputVariable();
                    name = "floordiv";
                    break;
                case 7:
                    //bcOp = sd.scalarFloorMod(in3, in2);
                    bcOp = new FloorModOp(sd, in3, in2).outputVariable();
                    name = "floormod";
                    if(OpValidationSuite.IGNORE_FAILING){
                        //https://github.com/deeplearning4j/deeplearning4j/issues/5976
                        continue;
                    }
                    break;
                default:
                    throw new RuntimeException();
            }

            SDVariable outVar = sd.sum(bcOp);

            String msg = "(test " + i + ": " + name + ", dimension=" + dim_sz1 + ")";
            log.info("*** Starting test: " + msg);

            INDArray in3Arr = Nd4j.randn(new int[]{3, 4, 5}).muli(100);
            INDArray in2Arr = Nd4j.randn(in2Shape).muli(100);

            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);

            TestCase tc = new TestCase(sd);

            String error = OpValidation.validate(tc);
            if(error != null){
                failed.add(name);
            }
        }
    }

    assertEquals("Failed: " + failed, 0, failed.size());
}
 
Example 18
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testGradientAutoBroadcast2() {
    Nd4j.getRandom().setSeed(12345);

    List<String> failed = new ArrayList<>();

    for (int[] dim_sz1s : new int[][]{{0, 1}, {0, 2}, {1, 2}, {0, 1, 2}}) {

        long[] otherShape = {3, 4, 5};
        otherShape[dim_sz1s[0]] = 1;
        otherShape[dim_sz1s[1]] = 1;
        if (dim_sz1s.length == 3) {
            otherShape[dim_sz1s[2]] = 1;
        }

        for (int i = 0; i < 8; i++) {

            SameDiff sd = SameDiff.create();

            SDVariable in3 = sd.var("in3", DataType.DOUBLE, 3, 4, 5);
            SDVariable in2 = sd.var("inToBc", DataType.DOUBLE, otherShape);

            String name;
            SDVariable bcOp;
            switch (i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    //bcOp = sd.scalarFloorDiv(in3, in2);
                    bcOp = new FloorDivOp(sd, in3, in2).outputVariable();
                    name = "floordiv";
                    break;
                case 7:
                    //bcOp = sd.scalarFloorMod(in3, in2);
                    bcOp = new FloorModOp(sd, in3, in2).outputVariable();
                    name = "floormod";
                    if(OpValidationSuite.IGNORE_FAILING){
                        //https://github.com/deeplearning4j/deeplearning4j/issues/5976
                        continue;
                    }
                    break;
                default:
                    throw new RuntimeException();
            }

            SDVariable outVar = sd.sum(bcOp);

            String msg = "(test " + i + ": " + name + ", dimensions=" + Arrays.toString(dim_sz1s) + ")";
            log.info("*** Starting test: " + msg);

            INDArray in3Arr = Nd4j.randn(DataType.DOUBLE, 3, 4, 5).muli(100);
            INDArray in2Arr = Nd4j.randn(DataType.DOUBLE, otherShape).muli(100);

            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);

            TestCase tc = new TestCase(sd);
            String error = OpValidation.validate(tc);
            if(error != null){
                failed.add(name);
            }
        }
    }

    assertEquals("Failed: " + failed, 0, failed.size());
}
 
Example 19
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testGradientAutoBroadcast3() {
    //These tests: output size > input sizes

    Nd4j.getRandom().setSeed(12345);

    List<String> failed = new ArrayList<>();

    //Test cases: in1Shape, in2Shape, shapeOf(op(in1,in2))
    List<Triple<long[], long[], long[]>> testCases = new ArrayList<>();
    testCases.add(new Triple<>(new long[]{3, 1}, new long[]{1, 4}, new long[]{3, 4}));
    testCases.add(new Triple<>(new long[]{3, 1}, new long[]{3, 4}, new long[]{3, 4}));
    testCases.add(new Triple<>(new long[]{3, 4}, new long[]{1, 4}, new long[]{3, 4}));
    testCases.add(new Triple<>(new long[]{3, 4, 1}, new long[]{1, 1, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 4, 1}, new long[]{3, 1, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{1, 4, 1}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{1, 4, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{3, 4, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 1, 1}, new long[]{1, 4, 5, 6}, new long[]{3, 4, 5, 6}));
    testCases.add(new Triple<>(new long[]{1, 1, 1, 6}, new long[]{3, 4, 5, 6}, new long[]{3, 4, 5, 6}));
    testCases.add(new Triple<>(new long[]{1, 4, 5, 1}, new long[]{3, 1, 1, 6}, new long[]{3, 4, 5, 6}));
    if(!OpValidationSuite.IGNORE_FAILING) {
        testCases.add(new Triple<>(new long[]{1, 6}, new long[]{3, 4, 5, 1}, new long[]{3, 4, 5, 6}));
    }

    for (val p : testCases) {

        for (int i = 0; i < 8; i++) {

            SameDiff sd = SameDiff.create();

            SDVariable in3 = sd.var("in1", DataType.DOUBLE, p.getFirst());
            SDVariable in2 = sd.var("in2", DataType.DOUBLE, p.getSecond());

            String name;
            SDVariable bcOp;
            switch (i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    //bcOp = sd.scalarFloorDiv(in3, in2);
                    bcOp = new FloorDivOp(sd, in3, in2).outputVariable();
                    name = "floordiv";
                    break;
                case 7:
                    //bcOp = sd.scalarFloorMod(in3, in2);
                    bcOp = new FloorModOp(sd, in3, in2).outputVariable();
                    name = "floormod";
                    if(OpValidationSuite.IGNORE_FAILING){
                        //https://github.com/deeplearning4j/deeplearning4j/issues/5976
                        continue;
                    }
                    break;
                default:
                    throw new RuntimeException();
            }

            SDVariable outVar = sd.sum(bcOp);

            String msg = "(test " + i + ": " + name + ", array 1 size =" + Arrays.toString(p.getFirst())
                    + ", array 2 size = " + Arrays.toString(p.getSecond()) + ")";
            log.info("*** Starting test: " + msg);

            INDArray in3Arr = Nd4j.rand(DataType.DOUBLE, p.getFirst()).muli(100);
            INDArray in2Arr = Nd4j.rand(DataType.DOUBLE, p.getSecond()).muli(100);

            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);

            TestCase tc = new TestCase(sd);
            String error = OpValidation.validate(tc);
            if(error != null){
                failed.add(name + " " + i +  " - " + error);
            }
        }
    }

    assertEquals("Failed: " + failed, 0, failed.size());
}
 
Example 20
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testScatterOpGradients() {
    List<String> failed = new ArrayList<>();

    for (int i = 0; i < 7; i++) {
        Nd4j.getRandom().setSeed(12345);

        SameDiff sd = SameDiff.create();

        SDVariable in = sd.var("in", DataType.DOUBLE, 20, 10);
        SDVariable indices = sd.var("indices", DataType.INT, new long[]{5});
        SDVariable updates = sd.var("updates", DataType.DOUBLE, 5, 10);


        in.setArray(Nd4j.rand(DataType.DOUBLE, 20, 10));
        indices.setArray(Nd4j.create(new double[]{3, 4, 5, 10, 18}).castTo(DataType.INT));
        updates.setArray(Nd4j.rand(DataType.DOUBLE, 5, 10).muli(2).subi(1));

        SDVariable scatter;
        String name;
        switch (i) {
            case 0:
                scatter = sd.scatterAdd("s", in, indices, updates);
                name = "scatterAdd";
                break;
            case 1:
                scatter = sd.scatterSub("s", in, indices, updates);
                name = "scatterSub";
                break;
            case 2:
                scatter = sd.scatterMul("s", in, indices, updates);
                name = "scatterMul";
                break;
            case 3:
                scatter = sd.scatterDiv("s", in, indices, updates);
                name = "scatterDiv";
                break;
            case 4:
                scatter = sd.scatterUpdate("s", in, indices, updates);
                name = "scatterUpdate";
                break;
            case 5:
                scatter = sd.scatterMax("s", in, indices, updates);
                name = "scatterMax";
                break;
            case 6:
                scatter = sd.scatterMin("s", in, indices, updates);
                name = "scatterMin";
                break;
            default:
                throw new RuntimeException();
        }

        INDArray exp = in.getArr().dup();
        int[] indicesInt = indices.getArr().dup().data().asInt();
        for( int j=0; j<indicesInt.length; j++ ){
            INDArray updateRow = updates.getArr().getRow(j);
            INDArray destinationRow = exp.getRow(indicesInt[j]);
            switch (i){
                case 0:
                    destinationRow.addi(updateRow);
                    break;
                case 1:
                    destinationRow.subi(updateRow);
                    break;
                case 2:
                    destinationRow.muli(updateRow);
                    break;
                case 3:
                    destinationRow.divi(updateRow);
                    break;
                case 4:
                    destinationRow.assign(updateRow);
                    break;
                case 5:
                    destinationRow.assign(Transforms.max(destinationRow, updateRow, true));
                    break;
                case 6:
                    destinationRow.assign(Transforms.min(destinationRow, updateRow, true));
                    break;
                default:
                    throw new RuntimeException();
            }
        }

        SDVariable loss = sd.sum(scatter);  //.standardDeviation(scatter, true);  //.sum(scatter);  //TODO stdev might be better here as gradients are non-symmetrical...


        TestCase tc = new TestCase(sd)
                .expected(scatter, exp)
                .gradCheckSkipVariables(indices.name());

        String error = OpValidation.validate(tc);
        if(error != null){
            failed.add(name);
        }
    }

    assertEquals(failed.toString(), 0, failed.size());
}