Java Code Examples for org.nd4j.autodiff.samediff.SameDiff#calculateGradients()

The following examples show how to use org.nd4j.autodiff.samediff.SameDiff#calculateGradients() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMergeRank1(){
    SameDiff sd = SameDiff.create();
    SDVariable var = sd.var("in", Nd4j.create(new long[]{1}).assign(5));

    SDVariable merged = sd.math().mergeAvg("merged", new SDVariable[]{var});
    SDVariable sum = sd.sum(merged);

    Map<String,INDArray> m = sd.output(Collections.emptyMap(), "merged");
    Map<String,INDArray> gm = sd.calculateGradients(null, "in");

    INDArray out = m.get("merged");
    assertEquals(1, out.rank());

    INDArray inGrad = gm.get("in");
    assertEquals(1, inGrad.rank());
}
 
Example 2
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testStopGradient(){

        SameDiff sd = SameDiff.create();
        SDVariable w = sd.var("w", Nd4j.rand(DataType.DOUBLE, 3, 4));
        SDVariable v = new StopGradient(sd, w).outputVariable();
        SDVariable loss = v.std(true);

        Map<String,INDArray> gm = sd.calculateGradients(null, v.name(), w.name());

        INDArray vArr = gm.get(v.name());
        INDArray wArr = gm.get(w.name());

//        System.out.println(vArr);
//        System.out.println(wArr);

        assertEquals(Nd4j.zeros(DataType.DOUBLE, 3, 4), wArr);
    }
 
Example 3
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSigmoidBackwards() {
    SameDiff sameDiff = SameDiff.create();
    INDArray sumInput = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2);
    Map<String, INDArray> inputs = new HashMap<>();
    inputs.put("x", sumInput);
    SDVariable input = sameDiff.var("x", inputs.get("x"));
    SDVariable sigmoid = sameDiff.nn().sigmoid(input);
    SDVariable sum = sameDiff.sum(sigmoid, Integer.MAX_VALUE);
    Map<String, INDArray> m = sameDiff.calculateGradients(Collections.emptyMap(), sameDiff.getVariables().keySet());
    INDArray arr = m.get(input.name());
    assertTrue(Nd4j.create(new double[][]{
            {0.1966, 0.1050},
            {0.0452, 0.0177}
    }).equalsWithEps(arr, 1e-2));
}
 
Example 4
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLogGrad() {
    SameDiff sameDiff = SameDiff.create();
    SDVariable input = sameDiff.var("x", Nd4j.linspace(1, 4, 4, DataType.DOUBLE));
    SDVariable log = sameDiff.math().log(input);
    SDVariable sum = sameDiff.sum(log, Integer.MAX_VALUE);
    INDArray result = null;
    sameDiff.calculateGradients(Collections.emptyMap(), sameDiff.getVariables().keySet());
}
 
Example 5
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testReductionsBackwards() {
//        for (int i = 0; i < 7; i++) {
        int i=5;
        {

            SameDiff sd = SameDiff.create();

            int nOut = 4;
            int minibatch = 3;
            SDVariable input = sd.var("in", DataType.DOUBLE, new long[]{minibatch, nOut});
            SDVariable label = sd.var("label", DataType.DOUBLE, new long[]{minibatch, nOut});

            SDVariable diff = input.sub(label);
            SDVariable sqDiff = diff.mul(diff);
            SDVariable msePerEx = sd.mean("msePerEx", sqDiff, 1);

            SDVariable loss;    //Scalar value
            String name;
            switch (i) {
                case 0:
                    loss = sd.mean("loss", msePerEx, 0);
                    name = "mean";
                    break;
                case 1:
                    loss = sd.sum("loss", msePerEx, 0);
                    name = "sum";
                    break;
                case 2:
                    loss = sd.standardDeviation("loss", msePerEx, true, 0);
                    name = "stdev";
                    break;
                case 3:
                    loss = sd.min("loss", msePerEx, 0);
                    name = "min";
                    break;
                case 4:
                    loss = sd.max("loss", msePerEx, 0);
                    name = "max";
                    break;
                case 5:
                    loss = sd.variance("loss", msePerEx, true, 0);
                    name = "variance";
                    break;
                case 6:
                    loss = sd.prod("loss", msePerEx, 0);
                    name = "prod";
                    break;
                default:
                    throw new RuntimeException();
            }


            String msg = "test: " + i + " - " + name;
            log.info("*** Starting test: " + msg);

            INDArray inputArr = Nd4j.rand(DataType.DOUBLE, minibatch, nOut);
            INDArray labelArr = Nd4j.rand(DataType.DOUBLE, minibatch, nOut);

            sd.associateArrayWithVariable(inputArr, input);
            sd.associateArrayWithVariable(labelArr, label);

            INDArray result = loss.eval();
            assertEquals(1, result.length());

            sd.calculateGradients(Collections.emptyMap(), sd.getVariables().keySet());
        }
    }
 
Example 6
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMulGradient() {
    INDArray arr1 = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2);
    INDArray arr2 = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2);

    INDArray gradAssertion = Nd4j.ones(arr1.shape());
    INDArray scalar = Nd4j.scalar(1.0);
    INDArray aGradAssertion = Nd4j.create(new double[][]{
            {1, 4},
            {9, 16}
    });

    INDArray cGradAssertion = Nd4j.create(new double[][]{
            {1, 2},
            {3, 4}
    });

    INDArray wGradAssertion = Nd4j.create(new double[][]{
            {2, 8},
            {18, 32}
    });

    INDArray dGradAssertion = Nd4j.ones(2, 2);

    SameDiff sameDiff = SameDiff.create();

    SDVariable sdVariable = sameDiff.var("a", arr1);
    SDVariable sdVariable1 = sameDiff.var("w", arr2);
    SDVariable varMulPre = sdVariable.mul("c", sdVariable1);
    SDVariable varMul = varMulPre.mul("d", sdVariable1);
    SDVariable sum = sameDiff.sum("ret", varMul, Integer.MAX_VALUE);

    Map<String,INDArray> m = sameDiff.outputAll(null);
    Map<String,INDArray> gm = sameDiff.calculateGradients(null, m.keySet());

    SDVariable finalResult = sameDiff.grad(sum.name());

    SDVariable cGrad = sameDiff.grad(varMulPre.name());

    SDVariable mulGradResult = sameDiff.grad(varMul.name());
    SDVariable aGrad = sameDiff.grad(sdVariable.name());
    SDVariable wGrad = sameDiff.grad(sdVariable1.name());
    SDVariable dGrad = sameDiff.grad(varMul.name());

    INDArray scalarGradTest = gm.get(sum.name());
    assertEquals(scalar, scalarGradTest);


    INDArray gradTest = mulGradResult.getArr();
    assertEquals(gradAssertion, gradTest);

    INDArray aGradTest = aGrad.getArr();
    assertEquals(aGradAssertion, aGradTest);

    INDArray cGradTest = cGrad.getArr();
    assertEquals(cGradAssertion, cGradTest);

    INDArray wGradTest = wGrad.getArr();
    assertEquals(wGradAssertion, wGradTest);

    INDArray dGradTest = dGrad.getArr();
    assertEquals(dGradAssertion, dGradTest);
}
 
Example 7
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testDistancesExec(){
        //https://github.com/deeplearning4j/deeplearning4j/issues/7001
        for(String s : new String[]{"euclidean", "manhattan", "cosinesim", "cosinedist", "jaccard"}) {
            log.info("Starting: {}", s);
            INDArray defaultTestCase = Nd4j.create(4, 4);
            defaultTestCase.putRow(0, Nd4j.create(new float[]{0, 2, -2, 0}));
            defaultTestCase.putRow(1, Nd4j.create(new float[]{0, 1, -1, 0}));
            defaultTestCase.putRow(2, Nd4j.create(new float[]{0, -1, 1, 0}));
            defaultTestCase.putRow(3, Nd4j.create(new float[]{0, -2, 2, 0}));
            long singleEmbeddingSize = defaultTestCase.size(1) / 2L;

            // Split vectors
            INDArray x = defaultTestCase.get(NDArrayIndex.all(), NDArrayIndex.interval(0, singleEmbeddingSize));
            INDArray y = defaultTestCase.get(NDArrayIndex.all(), NDArrayIndex.interval(singleEmbeddingSize, defaultTestCase.size(1)));

            log.info(y.shapeInfoToString());

            SameDiff sd = SameDiff.create();
            sd.enableDebugMode();

            SDVariable xSd = sd.var("x", x);
            SDVariable ySd = sd.var("y", y);

            ySd = ySd.add(ySd);
            SDVariable dist;
            switch (s){
                case "euclidean":
                    dist = sd.math().euclideanDistance(s, ySd, xSd, 0);
                    break;
                case "manhattan":
                    dist = sd.math().manhattanDistance(s, ySd, xSd, 0);
                    break;
                case "cosinesim":
                    dist = sd.math().cosineSimilarity(s, ySd, xSd, 0);
                    break;
                case "cosinedist":
                    dist = sd.math().cosineDistance(s, ySd, xSd, 0);
                    break;
                case "jaccard":
                    dist = sd.math().jaccardDistance(s, ySd, xSd, 0);
                    break;
                default:
                    throw new RuntimeException();
            }

            SDVariable loss = dist.sum();


//            log.info(sd.summary());
            sd.output(Collections.emptyMap(), Lists.newArrayList(s));
            sd.calculateGradients(Collections.emptyMap(), sd.getVariables().keySet());
        }
    }