Java Code Examples for org.nd4j.autodiff.samediff.SDVariable#div()

The following examples show how to use org.nd4j.autodiff.samediff.SDVariable#div() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ATan2.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Override
public List<SDVariable> doDiff(List<SDVariable> i_v) {
    //Let z=atan2(r), with r=y/x
    //dz/dr = 1/(r^2+1), dr/dy = 1/x, dr/dx = -y/x^2
    SDVariable y = rarg();
    SDVariable x = larg();
    SDVariable r = y.div(x);

    SDVariable dOutdr = f().square(r).add(1.0).rdiv(1.0);
    SDVariable drdy = x.rdiv(1.0);
    SDVariable drdx = f().neg(y).div(f().square(x));

    SDVariable xGrad = dOutdr.mul(drdx).mul(i_v.get(0));
    SDVariable yGrad = dOutdr.mul(drdy).mul(i_v.get(0));

    return Arrays.asList(xGrad, yGrad);
}
 
Example 2
Source File: StandardDeviation.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Override
public List<SDVariable> doDiff(List<SDVariable> i_v1) {
    //Here: calculating dL/dIn given dL/dOut (i.e., i_v1) and input/output
    //If out = stdev(in) then:
    //dL/dIn = dL/dOut * dOut/dIn
    //dOut/dIn_i = (in_i-mean)/(stdev * (n-1))
    int origRank = Shape.rankFromShape(arg().getShape());
    long n = f().getReductionLength(this);
    SDVariable broadcastableStdevOut = f().reductionBroadcastableWithOrigShape(origRank, dimensions, outputVariables()[0]);
    SDVariable broadcastableMean = f().reductionBroadcastableWithOrigShape(origRank, dimensions, f().mean(arg(), dimensions));
    SDVariable diff = arg().sub(broadcastableMean);

    SDVariable dOutdIn = diff.div(broadcastableStdevOut);
    if (this.biasCorrected) {
        dOutdIn = dOutdIn.div(n - 1);
    } else {
        dOutdIn = dOutdIn.div(n);
    }


    SDVariable broadcastableGrad = f().reductionBroadcastableWithOrigShape(origRank, dimensions, i_v1.get(0));

    SDVariable dLdIn = dOutdIn.mul(broadcastableGrad);
    return Arrays.asList(dLdIn);
}
 
Example 3
Source File: Prod.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public List<SDVariable> doDiff(List<SDVariable> i_v1) {
    SDVariable prod = outputVariables()[0];
    int origRank = Shape.rankFromShape(arg().getShape());   //TODO shape may not always be defined?
    SDVariable broadcastableGrad = sameDiff.f().reductionBroadcastableWithOrigShape(origRank, dimensions, i_v1.get(0));
    SDVariable broadcastableProd = sameDiff.f().reductionBroadcastableWithOrigShape(origRank, dimensions, prod);
    SDVariable mul = broadcastableGrad.div(arg());
    SDVariable ret = broadcastableProd.mul(mul);
    return Arrays.asList(ret);
}
 
Example 4
Source File: GradCheckMisc.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testGradientAutoBroadcast1() {

        Nd4j.getRandom().setSeed(12345);

        List<String> allFailed = new ArrayList<>();

        for (int dim_sz1 : new int[]{0, 1, 2}) {

            int[] in2Shape = {3, 4, 5};
            in2Shape[dim_sz1] = 1;

            for (int i = 2; i < 3; i++) {

                SameDiff sd = SameDiff.create();

                SDVariable in3 = sd.var("in3", Nd4j.rand(new int[]{3, 4, 5}));
                SDVariable in2 = sd.var("in2", in2Shape);

                SDVariable bcOp;
                String name;
                switch (i) {
                    case 0:
                        bcOp = in3.add(in2);
                        name = "add";
                        break;
                    case 1:
                        bcOp = in3.sub(in2);
                        name = "sub";
                        break;
                    case 2:
                        bcOp = in3.mul(in2);
                        name = "mul";
                        break;
                    case 3:
                        bcOp = in3.div(in2);
                        name = "div";
                        break;
                    case 4:
                        bcOp = in3.rsub(in2);
                        name = "rsub";
                        break;
                    case 5:
                        bcOp = in3.rdiv(in2);
                        name = "rdiv";
                        break;
                    case 6:
                        bcOp = sd.f().floorDiv(in3, in2);
                        name = "floordiv";
                        break;
                    case 7:
                        bcOp = sd.f().floorMod(in3, in2);
                        name = "floormod";
                        break;
                    default:
                        throw new RuntimeException();
                }

                SDVariable outVar = sd.sum(bcOp);

                String msg = "(test " + i + ": " + name + ", dimension=" + dim_sz1 + ")";
                log.info("*** Starting test: " + msg);

                INDArray in3Arr = Nd4j.randn(new int[]{3, 4, 5}).muli(100);
                INDArray in2Arr = Nd4j.randn(in2Shape).muli(100);

                sd.associateArrayWithVariable(in3Arr, in3);
                sd.associateArrayWithVariable(in2Arr, in2);

                try {
                    INDArray out = sd.execAndEndResult();
                    assertNotNull(out);
                    assertArrayEquals(new long[]{1, 1}, out.shape());

//                    System.out.println(sd.asFlatPrint());

                    boolean ok = GradCheckUtil.checkGradients(sd);
                    if (!ok) {
                        allFailed.add(msg);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    allFailed.add(msg + " - EXCEPTION");
                }
            }
        }

        assertEquals("Failed: " + allFailed, 0, allFailed.size());
    }
 
Example 5
Source File: GradCheckMisc.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testGradientAutoBroadcast2() {

        Nd4j.getRandom().setSeed(12345);

        List<String> allFailed = new ArrayList<>();

        for (int[] dim_sz1s : new int[][]{{0, 1}, {0, 2}, {1, 2}, {0,1,2}}) {

            int[] otherShape = {3, 4, 5};
            otherShape[dim_sz1s[0]] = 1;
            otherShape[dim_sz1s[1]] = 1;
            if(dim_sz1s.length == 3){
                otherShape[dim_sz1s[2]] = 1;
            }

            for (int i = 0; i < 6; i++) {

                SameDiff sd = SameDiff.create();

                SDVariable in3 = sd.var("in3", new int[]{3, 4, 5});
                SDVariable in2 = sd.var("inToBc", otherShape);

                String name;
                SDVariable bcOp;
                switch (i) {
                    case 0:
                        bcOp = in3.add(in2);
                        name = "add";
                        break;
                    case 1:
                        bcOp = in3.sub(in2);
                        name = "sub";
                        break;
                    case 2:
                        bcOp = in3.mul(in2);
                        name = "mul";
                        break;
                    case 3:
                        bcOp = in3.div(in2);
                        name = "div";
                        break;
                    case 4:
                        bcOp = in3.rsub(in2);
                        name = "rsub";
                        break;
                    case 5:
                        bcOp = in3.rdiv(in2);
                        name = "rdiv";
                        break;
                    case 6:
                        bcOp = sd.f().floorDiv(in3, in2);
                        name = "floordiv";
                        break;
                    case 7:
                        bcOp = sd.f().floorMod(in3, in2);
                        name = "floormod";
                        break;
                    default:
                        throw new RuntimeException();
                }

                SDVariable outVar = sd.sum(bcOp);

                String msg = "(test " + i + ": " + name + ", dimensions=" + Arrays.toString(dim_sz1s) + ")";
                log.info("*** Starting test: " + msg);

                INDArray in3Arr = Nd4j.randn(new int[]{3,4,5}).muli(100);
                INDArray in2Arr = Nd4j.randn(otherShape).muli(100);

                sd.associateArrayWithVariable(in3Arr, in3);
                sd.associateArrayWithVariable(in2Arr, in2);

                try {
                    INDArray out = sd.execAndEndResult();
                    assertNotNull(out);
                    assertArrayEquals(new long[]{1, 1}, out.shape());

//                    System.out.println(sd.asFlatPrint());

                    boolean ok = GradCheckUtil.checkGradients(sd);
                    if (!ok) {
                        allFailed.add(msg);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    allFailed.add(msg + " - EXCEPTION");
                }
            }
        }

        assertEquals("Failed: " + allFailed, 0, allFailed.size());
    }
 
Example 6
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testGradientAutoBroadcast1() {

    Nd4j.getRandom().setSeed(12345);

    List<String> failed = new ArrayList<>();

    for (int dim_sz1 : new int[]{0, 1, 2}) {

        int[] in2Shape = {3, 4, 5};
        in2Shape[dim_sz1] = 1;

        for (int i = 0; i < 8; i++) {

            SameDiff sd = SameDiff.create();

            SDVariable in3 = sd.var("in3", Nd4j.rand(new int[]{3, 4, 5}));
            SDVariable in2 = sd.var("in2", in2Shape);

            SDVariable bcOp;
            String name;
            switch (i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    //bcOp = sd.scalarFloorDiv(in3, in2);
                    bcOp = new FloorDivOp(sd, in3, in2).outputVariable();
                    name = "floordiv";
                    break;
                case 7:
                    //bcOp = sd.scalarFloorMod(in3, in2);
                    bcOp = new FloorModOp(sd, in3, in2).outputVariable();
                    name = "floormod";
                    if(OpValidationSuite.IGNORE_FAILING){
                        //https://github.com/deeplearning4j/deeplearning4j/issues/5976
                        continue;
                    }
                    break;
                default:
                    throw new RuntimeException();
            }

            SDVariable outVar = sd.sum(bcOp);

            String msg = "(test " + i + ": " + name + ", dimension=" + dim_sz1 + ")";
            log.info("*** Starting test: " + msg);

            INDArray in3Arr = Nd4j.randn(new int[]{3, 4, 5}).muli(100);
            INDArray in2Arr = Nd4j.randn(in2Shape).muli(100);

            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);

            TestCase tc = new TestCase(sd);

            String error = OpValidation.validate(tc);
            if(error != null){
                failed.add(name);
            }
        }
    }

    assertEquals("Failed: " + failed, 0, failed.size());
}
 
Example 7
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testGradientAutoBroadcast2() {
    Nd4j.getRandom().setSeed(12345);

    List<String> failed = new ArrayList<>();

    for (int[] dim_sz1s : new int[][]{{0, 1}, {0, 2}, {1, 2}, {0, 1, 2}}) {

        long[] otherShape = {3, 4, 5};
        otherShape[dim_sz1s[0]] = 1;
        otherShape[dim_sz1s[1]] = 1;
        if (dim_sz1s.length == 3) {
            otherShape[dim_sz1s[2]] = 1;
        }

        for (int i = 0; i < 8; i++) {

            SameDiff sd = SameDiff.create();

            SDVariable in3 = sd.var("in3", DataType.DOUBLE, 3, 4, 5);
            SDVariable in2 = sd.var("inToBc", DataType.DOUBLE, otherShape);

            String name;
            SDVariable bcOp;
            switch (i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    //bcOp = sd.scalarFloorDiv(in3, in2);
                    bcOp = new FloorDivOp(sd, in3, in2).outputVariable();
                    name = "floordiv";
                    break;
                case 7:
                    //bcOp = sd.scalarFloorMod(in3, in2);
                    bcOp = new FloorModOp(sd, in3, in2).outputVariable();
                    name = "floormod";
                    if(OpValidationSuite.IGNORE_FAILING){
                        //https://github.com/deeplearning4j/deeplearning4j/issues/5976
                        continue;
                    }
                    break;
                default:
                    throw new RuntimeException();
            }

            SDVariable outVar = sd.sum(bcOp);

            String msg = "(test " + i + ": " + name + ", dimensions=" + Arrays.toString(dim_sz1s) + ")";
            log.info("*** Starting test: " + msg);

            INDArray in3Arr = Nd4j.randn(DataType.DOUBLE, 3, 4, 5).muli(100);
            INDArray in2Arr = Nd4j.randn(DataType.DOUBLE, otherShape).muli(100);

            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);

            TestCase tc = new TestCase(sd);
            String error = OpValidation.validate(tc);
            if(error != null){
                failed.add(name);
            }
        }
    }

    assertEquals("Failed: " + failed, 0, failed.size());
}
 
Example 8
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testGradientAutoBroadcast3() {
    //These tests: output size > input sizes

    Nd4j.getRandom().setSeed(12345);

    List<String> failed = new ArrayList<>();

    //Test cases: in1Shape, in2Shape, shapeOf(op(in1,in2))
    List<Triple<long[], long[], long[]>> testCases = new ArrayList<>();
    testCases.add(new Triple<>(new long[]{3, 1}, new long[]{1, 4}, new long[]{3, 4}));
    testCases.add(new Triple<>(new long[]{3, 1}, new long[]{3, 4}, new long[]{3, 4}));
    testCases.add(new Triple<>(new long[]{3, 4}, new long[]{1, 4}, new long[]{3, 4}));
    testCases.add(new Triple<>(new long[]{3, 4, 1}, new long[]{1, 1, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 4, 1}, new long[]{3, 1, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{1, 4, 1}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{1, 4, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{3, 4, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 1, 1}, new long[]{1, 4, 5, 6}, new long[]{3, 4, 5, 6}));
    testCases.add(new Triple<>(new long[]{1, 1, 1, 6}, new long[]{3, 4, 5, 6}, new long[]{3, 4, 5, 6}));
    testCases.add(new Triple<>(new long[]{1, 4, 5, 1}, new long[]{3, 1, 1, 6}, new long[]{3, 4, 5, 6}));
    if(!OpValidationSuite.IGNORE_FAILING) {
        testCases.add(new Triple<>(new long[]{1, 6}, new long[]{3, 4, 5, 1}, new long[]{3, 4, 5, 6}));
    }

    for (val p : testCases) {

        for (int i = 0; i < 8; i++) {

            SameDiff sd = SameDiff.create();

            SDVariable in3 = sd.var("in1", DataType.DOUBLE, p.getFirst());
            SDVariable in2 = sd.var("in2", DataType.DOUBLE, p.getSecond());

            String name;
            SDVariable bcOp;
            switch (i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    //bcOp = sd.scalarFloorDiv(in3, in2);
                    bcOp = new FloorDivOp(sd, in3, in2).outputVariable();
                    name = "floordiv";
                    break;
                case 7:
                    //bcOp = sd.scalarFloorMod(in3, in2);
                    bcOp = new FloorModOp(sd, in3, in2).outputVariable();
                    name = "floormod";
                    if(OpValidationSuite.IGNORE_FAILING){
                        //https://github.com/deeplearning4j/deeplearning4j/issues/5976
                        continue;
                    }
                    break;
                default:
                    throw new RuntimeException();
            }

            SDVariable outVar = sd.sum(bcOp);

            String msg = "(test " + i + ": " + name + ", array 1 size =" + Arrays.toString(p.getFirst())
                    + ", array 2 size = " + Arrays.toString(p.getSecond()) + ")";
            log.info("*** Starting test: " + msg);

            INDArray in3Arr = Nd4j.rand(DataType.DOUBLE, p.getFirst()).muli(100);
            INDArray in2Arr = Nd4j.rand(DataType.DOUBLE, p.getSecond()).muli(100);

            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);

            TestCase tc = new TestCase(sd);
            String error = OpValidation.validate(tc);
            if(error != null){
                failed.add(name + " " + i +  " - " + error);
            }
        }
    }

    assertEquals("Failed: " + failed, 0, failed.size());
}
 
Example 9
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testScalarOps() {
    int d0 = 2;
    int d1 = 3;
    int d2 = 4;

    int n = d0 * d1 * d2;

    List<String> failed = new ArrayList<>();

    for (int i = 0; i < 11; i++) {
        for (char inOrder : new char[]{'c', 'f'}) {
            SameDiff sd = SameDiff.create();

            INDArray inArr = Nd4j.linspace(1, n, n, DataType.DOUBLE).reshape('c', d0, d1, d2).dup(inOrder);
            SDVariable in = sd.var("in", inArr);
            TestCase tc = new TestCase(sd).gradientCheck(true);

            SDVariable out;
            String msg;
            switch (i) {
                case 0:
                    out = in.mul(2);
                    tc.expectedOutput(out.name(), inArr.mul(2));
                    msg = "mul - " + inOrder;
                    break;
                case 1:
                    out = in.div(2);
                    tc.expectedOutput(out.name(), inArr.div(2));
                    msg = "div - " + inOrder;
                    break;
                case 2:
                    out = in.add(2);
                    tc.expectedOutput(out.name(), inArr.add(2));
                    msg = "add - " + inOrder;
                    break;
                case 3:
                    out = in.sub(2);
                    tc.expectedOutput(out.name(), inArr.sub(2));
                    msg = "sub - " + inOrder;
                    break;
                case 4:
                    out = in.rdiv(2);
                    tc.expectedOutput(out.name(), inArr.rdiv(2));
                    msg = "rdiv - " + inOrder;
                    break;
                case 5:
                    out = in.rsub(2);
                    tc.expectedOutput(out.name(), inArr.rsub(2));
                    msg = "rsub - " + inOrder;
                    break;
                case 6:
                    out = sd.math().pow(in, 2);
                    tc.expectedOutput(out.name(), Transforms.pow(inArr, 2));
                    msg = "pow - " + inOrder;
                    break;
                case 7:
                    inArr.assign(Nd4j.rand(inArr.dataType(), inArr.shape()).muli(5).subi(2.5));
                    out = sd.math().floorMod(in, 2.0);
                    tc.expected(out, Nd4j.getExecutioner().exec(new ScalarFMod(inArr.dup(), 2.0)));
                    msg = "scalarFloorMod - " + inOrder;
                    break;
                case 8:
                    inArr.assign(Nd4j.rand(inArr.shape()));
                    out = sd.scalarMax(in, 0.5);
                    tc.expected(out, Transforms.max(inArr.dup(), 0.5));
                    msg = "scalarMax - " + inOrder;
                    break;
                case 9:
                    inArr.assign(Nd4j.rand(inArr.shape()));
                    out = sd.scalarMin(in, 0.5);
                    tc.expected(out, Transforms.min(inArr.dup(), 0.5));
                    msg = "scalarMin - " + inOrder;
                    break;
                case 10:
                    out = in.assign(0.5);
                    tc.expected(out, Nd4j.valueArrayOf(inArr.shape(), 0.5));
                    msg = "scalarSet - " + inOrder;
                    break;
                default:
                    throw new RuntimeException();
            }

            tc.testName(msg);

            SDVariable loss = sd.standardDeviation(out, true);

            log.info("Starting test: " + msg);
            String err = OpValidation.validate(tc, true);
            if (err != null) {
                failed.add(err);
            }
        }
    }
    assertEquals(failed.toString(), 0, failed.size());
}