Java Code Examples for org.nd4j.autodiff.samediff.SameDiff#associateArrayWithVariable()

The following examples show how to use org.nd4j.autodiff.samediff.SameDiff#associateArrayWithVariable() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: NDLossTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSparseSoftmaxCrossEntropy() {
    SameDiff sd = SameDiff.create();

    int nOut = 4;
    int minibatch = 10;
    SDVariable predictions = sd.var("in", DataType.DOUBLE, minibatch, nOut);
    SDVariable labels = sd.var("labels", DataType.INT32, -1);


    INDArray predictionsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    INDArray labelsArr = Nd4j.create(DataType.INT32, minibatch);
    for( int i=0; i<minibatch; i++ ){
        labelsArr.putScalar(i, i%nOut);
    }

    SDVariable loss = sd.loss().sparseSoftmaxCrossEntropy("loss", predictions, labels);
    sd.associateArrayWithVariable(predictionsArr, predictions);
    sd.associateArrayWithVariable(labelsArr, labels);

    INDArray y_exp = loss.eval();

    INDArray y = Nd4j.loss().sparseSoftmaxCrossEntropy(predictionsArr, labelsArr);
    assertEquals(y_exp, y);
}
 
Example 2
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDepthToSpace() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 128;
    int blockSize = 4;
    int[] inputShape = new int[]{miniBatch, 2, 2, blockSize * blockSize};

    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 2 * blockSize, 2 * blockSize, 1);
    DynamicCustomOp op = new DepthToSpace(input, expOut, blockSize, DataFormat.NHWC);
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.cnn().depthToSpace("dts", sdInput, blockSize, DataFormat.NHWC);
    SDVariable loss = sd.mean("loss", t);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("dts", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 3
Source File: NDLossTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testL2Loss() {
    SameDiff sd = SameDiff.create();

    int nOut = 4;
    int minibatch = 10;
    SDVariable predictions = sd.var("in", DataType.DOUBLE, minibatch, nOut);
    INDArray predictionsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);

    SDVariable loss = sd.loss().l2Loss("loss", predictions);
    sd.associateArrayWithVariable(predictionsArr, predictions);

    INDArray y_exp = loss.eval();

    INDArray y = Nd4j.loss().l2Loss(predictionsArr);
    assertEquals(y_exp, y);
}
 
Example 4
Source File: NDLossTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testAbsoluteDifference() {
    SameDiff sd = SameDiff.create();

    int nOut = 4;
    int minibatch = 10;
    SDVariable predictions = sd.var("in", DataType.DOUBLE, minibatch, nOut);
    SDVariable labels = sd.var("labels", DataType.DOUBLE, -1, nOut);

    INDArray wArr = Nd4j.create(new double[][]{
            {0, 0, 0, 0}, {0, 0, 1, 1}, {1, 1, 0, 0}, {1, 1, 1, 1}, {1, 1, 1, 1},
            {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}});
    SDVariable w = sd.var("weights", wArr);

    LossReduce reduction = LossReduce.MEAN_BY_NONZERO_WEIGHT_COUNT;

    INDArray predictionsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    INDArray labelsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);


    SDVariable loss = sd.loss().absoluteDifference("loss", labels, predictions, w, reduction);
    SDVariable loss2 = sd.loss().absoluteDifference("loss2", labels, predictions,null, reduction);
    sd.associateArrayWithVariable(predictionsArr, predictions);
    sd.associateArrayWithVariable(labelsArr, labels);

    INDArray y_exp = loss.eval();
    INDArray y_exp2 = loss2.eval();

    INDArray y = Nd4j.loss().absoluteDifference(labelsArr, predictionsArr, wArr, reduction);
    INDArray y2 = Nd4j.loss().absoluteDifference(labelsArr, predictionsArr, null, reduction);
    assertEquals(y_exp, y);
    assertEquals(y_exp2, y2);
}
 
Example 5
Source File: NDLossTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testHingeLoss() {
    SameDiff sd = SameDiff.create();

    int nOut = 4;
    int minibatch = 10;
    SDVariable predictions = sd.var("in", DataType.DOUBLE, minibatch, nOut);
    SDVariable labels = sd.var("labels", DataType.DOUBLE, -1, nOut);

    INDArray wArr = Nd4j.create(new double[][]{
            {0, 0, 0, 0}, {0, 0, 1, 1}, {1, 1, 0, 0}, {1, 1, 1, 1}, {1, 1, 1, 1},
            {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}});
    SDVariable w = sd.var("weights", wArr);

    LossReduce reduction = LossReduce.MEAN_BY_NONZERO_WEIGHT_COUNT;

    INDArray predictionsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    INDArray labelsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);

    SDVariable loss = sd.loss().hingeLoss("loss", labels, predictions, w, reduction);
    SDVariable loss2 = sd.loss().hingeLoss("loss2", labels, predictions, null, reduction);
    sd.associateArrayWithVariable(predictionsArr, predictions);
    sd.associateArrayWithVariable(labelsArr, labels);

    INDArray y_exp = loss.eval();
    INDArray y_exp2 = loss2.eval();

    INDArray y = Nd4j.loss().hingeLoss(labelsArr, predictionsArr, wArr, reduction);
    INDArray y2 = Nd4j.loss().hingeLoss(labelsArr, predictionsArr, null, reduction);
    assertEquals(y_exp, y);
    assertEquals(y_exp2, y2);
}
 
Example 6
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testZeroFraction() {
    List<String> allFailed = new ArrayList<>();
    for (int i = 0; i < 2; i++) {
        SameDiff sd = SameDiff.create();

        INDArray ia;
        if (i == 0) {
            //Not gradient checkable for 0 and 1 values
            ia = Nd4j.create(new int[]{2, 2}, new float[]{0, 1, 0, 1});
        } else {
            ia = Nd4j.rand(DataType.FLOAT, 2, 2);
        }

        SDVariable input = sd.var("in", 2, 2);
        sd.associateArrayWithVariable(ia, input);

        SDVariable zeroFraction = sd.math().zeroFraction(input);

        String error = OpValidation.validate(new TestCase(sd)
                .expectedOutput(zeroFraction.name(), Nd4j.scalar(i == 0 ? 0.5f : 0.0f))
                .gradientCheck(i != 0)
        );
        if (error != null)
            allFailed.add(error);
    }

    assertEquals(allFailed.toString(), 0, allFailed.size());
}
 
Example 7
Source File: NDLossTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSigmoidCrossEntropy() {
    SameDiff sd = SameDiff.create();

    int nOut = 4;
    int minibatch = 10;
    SDVariable predictions = sd.var("in", DataType.DOUBLE, minibatch, nOut);
    SDVariable labels = sd.var("labels", DataType.DOUBLE, -1, nOut);

    INDArray wArr = Nd4j.create(new double[][]{
            {0, 0, 0, 0}, {0, 0, 1, 1}, {1, 1, 0, 0}, {1, 1, 1, 1}, {1, 1, 1, 1},
            {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}});
    SDVariable w = sd.var("weights", wArr);

    LossReduce reduction = LossReduce.MEAN_BY_NONZERO_WEIGHT_COUNT;

    INDArray predictionsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    INDArray labelsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    double labelSmoothing = 0.01;

    SDVariable loss = sd.loss().sigmoidCrossEntropy("loss", labels, predictions, w, reduction, labelSmoothing);
    SDVariable loss2 = sd.loss().sigmoidCrossEntropy("loss2", labels, predictions,
            null, reduction, labelSmoothing);
    sd.associateArrayWithVariable(predictionsArr, predictions);
    sd.associateArrayWithVariable(labelsArr, labels);

    INDArray y_exp = loss.eval();
    INDArray y_exp2 = loss2.eval();

    INDArray y = Nd4j.loss().sigmoidCrossEntropy(labelsArr, predictionsArr, wArr, reduction, labelSmoothing);
    INDArray y2 = Nd4j.loss().sigmoidCrossEntropy(labelsArr, predictionsArr, null, reduction, labelSmoothing);
    assertEquals(y_exp, y);
    assertEquals(y_exp2, y2);
}
 
Example 8
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDepthToSpace() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 128;
    int blockSize = 4;
    String dataFormat = "NHWC";
    int isNHWC = dataFormat.equals("NHWC") ? 1 : 0;
    int[] inputShape = new int[]{miniBatch, 2, 2, blockSize * blockSize};

    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 2 * blockSize, 2 * blockSize, 1);
    DynamicCustomOp op = DynamicCustomOp.builder("depth_to_space")
            .addInputs(input)
            .addIntegerArguments(blockSize, isNHWC)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.depthToSpace(sdInput, blockSize, dataFormat);
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();

    if (!expOut.equals(out)) {
        log.info("depth to space failed on forward");
    }

    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 9
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSpaceToDepth() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 128;
    int blockSize = 4;
    String dataFormat = "NHWC";
    int isNHWC = dataFormat.equals("NHWC") ? 1 : 0;
    int[] inputShape = new int[]{miniBatch, 2 * blockSize, 2 * blockSize, 1};

    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 2, 2, blockSize * blockSize);
    DynamicCustomOp op = DynamicCustomOp.builder("space_to_depth")
            .addInputs(input)
            .addIntegerArguments(blockSize, isNHWC)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.spaceToDepth(sdInput, blockSize, dataFormat);
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();

    if (!expOut.equals(out)) {
        log.info("depth to space failed on forward");
    }

    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 10
Source File: NDLossTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLogPoisson() {
    SameDiff sd = SameDiff.create();

    int nOut = 4;
    int minibatch = 10;
    SDVariable predictions = sd.var("in", DataType.DOUBLE, minibatch, nOut);
    SDVariable labels = sd.var("labels", DataType.DOUBLE, -1, nOut);

    INDArray wArr = Nd4j.create(new double[][]{
            {0, 0, 0, 0}, {0, 0, 1, 1}, {1, 1, 0, 0}, {1, 1, 1, 1}, {1, 1, 1, 1},
            {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}});
    SDVariable w = sd.var("weights", wArr);

    LossReduce reduction = LossReduce.MEAN_BY_NONZERO_WEIGHT_COUNT;

    INDArray predictionsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    INDArray labelsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);

    SDVariable loss = sd.loss().logPoisson("loss", labels, predictions, w, reduction, false);
    SDVariable loss2 = sd.loss().logPoisson("loss2", labels, predictions, null, reduction, false);
    sd.associateArrayWithVariable(predictionsArr, predictions);
    sd.associateArrayWithVariable(labelsArr, labels);

    INDArray y_exp = loss.eval();
    INDArray y_exp2 = loss2.eval();

    INDArray y = Nd4j.loss().logPoisson(labelsArr, predictionsArr, wArr, reduction, false);
    INDArray y2 = Nd4j.loss().logPoisson(labelsArr, predictionsArr, null, reduction, false);
    assertEquals(y_exp, y);
    assertEquals(y_exp2, y2);
}
 
Example 11
Source File: NDLossTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCosineDistance() {
    SameDiff sd = SameDiff.create();

    int nOut = 4;
    int minibatch = 10;
    SDVariable predictions = sd.var("in", DataType.DOUBLE, minibatch, nOut);
    SDVariable labels = sd.var("labels", DataType.DOUBLE, -1, nOut);

    INDArray wArr = Nd4j.create(new double[][]{
            {0, 0, 0, 0}, {0, 0, 1, 1}, {1, 1, 0, 0}, {1, 1, 1, 1}, {1, 1, 1, 1},
            {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}});
    SDVariable w = sd.var("weights", wArr);

    LossReduce reduction = LossReduce.MEAN_BY_NONZERO_WEIGHT_COUNT;

    INDArray predictionsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    INDArray labelsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);

    predictionsArr.diviColumnVector(predictionsArr.norm2(1));
    labelsArr.diviColumnVector(labelsArr.norm2(1));

    SDVariable loss = sd.loss().cosineDistance("loss", labels, predictions, w, reduction, 0);
    SDVariable loss2 = sd.loss().cosineDistance("loss2", labels, predictions, null, reduction, 0);
    sd.associateArrayWithVariable(predictionsArr, predictions);
    sd.associateArrayWithVariable(labelsArr, labels);

    INDArray y_exp = loss.eval();
    INDArray y_exp2 = loss2.eval();

    INDArray y = Nd4j.loss().cosineDistance(labelsArr, predictionsArr, wArr, reduction, 0);
    INDArray y2 = Nd4j.loss().cosineDistance(labelsArr, predictionsArr, null, reduction, 0);
    assertEquals(y_exp, y);
    assertEquals(y_exp2, y2);
}
 
Example 12
Source File: GradCheckReductions.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testReduce3() {

    Nd4j.getRandom().setSeed(12345);

    int d0 = 3;
    int d1 = 4;
    int d2 = 5;

    List<String> allFailed = new ArrayList<>();
    for (int[] reduceDims : new int[][]{{Integer.MAX_VALUE}, {0, 1, 2}, {0}, {1}, {2}, {0, 1}, {0, 2}, {1, 2}}) {
        for (int i = 0; i < 6; i++) {

            SameDiff sd = SameDiff.create();
            sd.setLogExecution(false);


            SDVariable in = sd.var("in", new int[]{-1, d1, d2});
            SDVariable in2 = sd.var("in2", new int[]{-1, d1, d2});

            INDArray inArr = Nd4j.randn(new int[]{d0, d1, d2}).muli(100);
            INDArray in2Arr = Nd4j.randn(inArr.shape()).muli(100);

            SDVariable reduced;
            String name;
            switch (i) {
                case 0:
                    reduced = sd.manhattanDistance(in, in2, reduceDims);
                    name = "manhattan";
                    break;
                case 1:
                    reduced = sd.euclideanDistance(in, in2, reduceDims);
                    name = "euclidean";
                    break;
                case 2:
                    reduced = sd.cosineSimilarity(in, in2, reduceDims);
                    name = "cosine";
                    break;
                case 3:
                    reduced = sd.cosineDistance(in, in2, reduceDims);
                    name = "cosinedistance";
                    break;
                case 4:
                    reduced = sd.hammingDistance(in, in2, reduceDims);
                    name = "hamming";
                    break;
                case 5:
                    name = "jaccard";
                    reduced = sd.jaccardDistance(name, in, in2, reduceDims);
                    inArr.divi(100).addi(0.1);
                    in2Arr.divi(100).addi(0.1);
                    break;
                default:
                    throw new RuntimeException();
            }

            //Sum: note that this should be a no-op for the full array cases
            SDVariable sum = sd.sum(reduced, Integer.MAX_VALUE);


            String msg = "(test " + i + " - " + name + ", dimensions=" + Arrays.toString(reduceDims) + ")";
            log.info("*** Starting test: " + msg);

            sd.associateArrayWithVariable(inArr, in);
            sd.associateArrayWithVariable(in2Arr, in2);

            sd.execAndEndResult();

            // FIXME: we can't swallow exceptions here now, but once release out and stuff stabilized - we can
            //try {
                boolean ok = GradCheckUtil.checkGradients(sd, 1e-5, 1e-5, 1e-4, true, false);
                if (!ok) {
                    allFailed.add(msg);
                }
            /*
            } catch (Exception e) {
                e.printStackTrace();
                allFailed.add(msg + " - EXCEPTION");
            }
            */
        }
    }

    assertEquals("Failed: " + allFailed, 0, allFailed.size());
}
 
Example 13
Source File: GradCheckLoss.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLossWeights2d() {

    String[] weightTypes = new String[]{"none", "per-example", "per-output", "per-example-output"};

    Nd4j.getRandom().setSeed(12345);

    int nOut = 4;
    int minibatch = 10;

    for (String weightType : weightTypes) {

        for (boolean binary : new boolean[]{true, false}) {  //Binary mask (like DL4J) or arbitrary weights?

            int[] weightShape;
            switch (weightType) {
                case "none":
                    weightShape = null;
                    break;
                case "per-example":
                    weightShape = new int[]{minibatch, 1};
                    break;
                case "per-output":
                    weightShape = new int[]{1, nOut};
                    break;
                case "per-example-output":
                    weightShape = new int[]{minibatch, nOut};
                    break;
                default:
                    throw new RuntimeException("Unknown type: " + weightType);
            }

            INDArray weightArr = null;
            if (!"none".equals(weightType)) {
                if (binary) {
                    weightArr = Nd4j.getExecutioner().exec(new BernoulliDistribution(Nd4j.createUninitialized(weightShape), 0.5));
                } else {
                    weightArr = Nd4j.rand(weightShape).muli(2.0);
                }
            }

            for (LossFunctions.Reduction reduction : new LossFunctions.Reduction[]{
                    LossFunctions.Reduction.MEAN_BY_COUNT, LossFunctions.Reduction.MEAN_BY_WEIGHT, LossFunctions.Reduction.SUM}) {

                for (String fn : new String[]{"mse", "l1", "l2", "mcxent"}) {

                    SameDiff sd = SameDiff.create();


                    SDVariable input = sd.var("in", new int[]{-1, nOut});
                    SDVariable labels = sd.var("labels", new int[]{-1, nOut});
                    SDVariable weight = null;
                    if (!"none".equals(weightType)) {
                        weight = sd.var("weights", weightArr);
                    }

                    INDArray inputArr = Nd4j.randn(minibatch, nOut).muli(100);
                    INDArray labelsArr = Nd4j.randn(minibatch, nOut).muli(100);

                    LossInfo lossInfo;
                    switch (fn) {
                        case "mse":
                            lossInfo = LossFunctions.mse("out", input, labels, weight, reduction, 1);
                            break;
                        case "l1":
                            lossInfo = LossFunctions.l1("out", input, labels, weight, reduction, 1);
                            //L1 = sum abs error
                            break;
                        case "l2":
                            lossInfo = LossFunctions.l2("out", input, labels, weight, reduction, 1);
                            //L2 = sum squared error
                            break;
                        case "mcxent":
                            lossInfo = LossFunctions.mcxent("out", input, labels, weight, reduction, 1);
                            //mcxent = sum label * log(prob)
                            break;
                        default:
                            throw new RuntimeException();
                    }


                    String msg = "lossFn=" + fn + ", reduction=" + reduction + ", weightType=" + weightType + ", binaryWeight=" + binary;
                    log.info("*** Starting test: " + msg);

                    sd.associateArrayWithVariable(inputArr, input);
                    sd.associateArrayWithVariable(labelsArr, labels);
                    if (weight != null) {
                        sd.associateArrayWithVariable(weightArr, weight);
                    }

                    INDArray out = sd.execAndEndResult();
                    assertEquals(1, out.length());

                    boolean ok = GradCheckUtil.checkGradients(sd);

                    assertTrue(msg, ok);
                }
            }
        }
    }
}
 
Example 14
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testBatchToSpace() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 4;
    int[] inputShape = new int[]{miniBatch, 1, 1, 1};

    int M = 2;
    int[] blockShape = new int[]{M, 1};
    int[] cropShape = new int[]{M, 2};

    INDArray input = Nd4j.randn(inputShape);
    INDArray blocks = Nd4j.create(new float[]{2, 2}, blockShape);
    INDArray crops = Nd4j.create(new float[]{0, 0, 0, 0}, cropShape);

    SameDiff sd = SameDiff.create();

    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(1, 2, 2, 1);
    DynamicCustomOp op = DynamicCustomOp.builder("batch_to_space")
            .addInputs(input, blocks, crops)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.batchToSpace(sdInput, new int[]{2, 2}, new int[][]{{0, 0}, {0, 0}});
    SDVariable loss = sd.mean("loss", t);
    sd.exec();
    INDArray out = t.getArr();

    if (!expOut.equals(out)) {
        log.info("batch to space failed on forward");
    }

    try {
        GradCheckUtil.checkGradients(sd);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example 15
Source File: GradCheckMisc.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testGradientAutoBroadcast2() {

        Nd4j.getRandom().setSeed(12345);

        List<String> allFailed = new ArrayList<>();

        for (int[] dim_sz1s : new int[][]{{0, 1}, {0, 2}, {1, 2}, {0,1,2}}) {

            int[] otherShape = {3, 4, 5};
            otherShape[dim_sz1s[0]] = 1;
            otherShape[dim_sz1s[1]] = 1;
            if(dim_sz1s.length == 3){
                otherShape[dim_sz1s[2]] = 1;
            }

            for (int i = 0; i < 6; i++) {

                SameDiff sd = SameDiff.create();

                SDVariable in3 = sd.var("in3", new int[]{3, 4, 5});
                SDVariable in2 = sd.var("inToBc", otherShape);

                String name;
                SDVariable bcOp;
                switch (i) {
                    case 0:
                        bcOp = in3.add(in2);
                        name = "add";
                        break;
                    case 1:
                        bcOp = in3.sub(in2);
                        name = "sub";
                        break;
                    case 2:
                        bcOp = in3.mul(in2);
                        name = "mul";
                        break;
                    case 3:
                        bcOp = in3.div(in2);
                        name = "div";
                        break;
                    case 4:
                        bcOp = in3.rsub(in2);
                        name = "rsub";
                        break;
                    case 5:
                        bcOp = in3.rdiv(in2);
                        name = "rdiv";
                        break;
                    case 6:
                        bcOp = sd.f().floorDiv(in3, in2);
                        name = "floordiv";
                        break;
                    case 7:
                        bcOp = sd.f().floorMod(in3, in2);
                        name = "floormod";
                        break;
                    default:
                        throw new RuntimeException();
                }

                SDVariable outVar = sd.sum(bcOp);

                String msg = "(test " + i + ": " + name + ", dimensions=" + Arrays.toString(dim_sz1s) + ")";
                log.info("*** Starting test: " + msg);

                INDArray in3Arr = Nd4j.randn(new int[]{3,4,5}).muli(100);
                INDArray in2Arr = Nd4j.randn(otherShape).muli(100);

                sd.associateArrayWithVariable(in3Arr, in3);
                sd.associateArrayWithVariable(in2Arr, in2);

                try {
                    INDArray out = sd.execAndEndResult();
                    assertNotNull(out);
                    assertArrayEquals(new long[]{1, 1}, out.shape());

//                    System.out.println(sd.asFlatPrint());

                    boolean ok = GradCheckUtil.checkGradients(sd);
                    if (!ok) {
                        allFailed.add(msg);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    allFailed.add(msg + " - EXCEPTION");
                }
            }
        }

        assertEquals("Failed: " + allFailed, 0, allFailed.size());
    }
 
Example 16
Source File: GradCheckMisc.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testGradientAutoBroadcast1() {

        Nd4j.getRandom().setSeed(12345);

        List<String> allFailed = new ArrayList<>();

        for (int dim_sz1 : new int[]{0, 1, 2}) {

            int[] in2Shape = {3, 4, 5};
            in2Shape[dim_sz1] = 1;

            for (int i = 2; i < 3; i++) {

                SameDiff sd = SameDiff.create();

                SDVariable in3 = sd.var("in3", Nd4j.rand(new int[]{3, 4, 5}));
                SDVariable in2 = sd.var("in2", in2Shape);

                SDVariable bcOp;
                String name;
                switch (i) {
                    case 0:
                        bcOp = in3.add(in2);
                        name = "add";
                        break;
                    case 1:
                        bcOp = in3.sub(in2);
                        name = "sub";
                        break;
                    case 2:
                        bcOp = in3.mul(in2);
                        name = "mul";
                        break;
                    case 3:
                        bcOp = in3.div(in2);
                        name = "div";
                        break;
                    case 4:
                        bcOp = in3.rsub(in2);
                        name = "rsub";
                        break;
                    case 5:
                        bcOp = in3.rdiv(in2);
                        name = "rdiv";
                        break;
                    case 6:
                        bcOp = sd.f().floorDiv(in3, in2);
                        name = "floordiv";
                        break;
                    case 7:
                        bcOp = sd.f().floorMod(in3, in2);
                        name = "floormod";
                        break;
                    default:
                        throw new RuntimeException();
                }

                SDVariable outVar = sd.sum(bcOp);

                String msg = "(test " + i + ": " + name + ", dimension=" + dim_sz1 + ")";
                log.info("*** Starting test: " + msg);

                INDArray in3Arr = Nd4j.randn(new int[]{3, 4, 5}).muli(100);
                INDArray in2Arr = Nd4j.randn(in2Shape).muli(100);

                sd.associateArrayWithVariable(in3Arr, in3);
                sd.associateArrayWithVariable(in2Arr, in2);

                try {
                    INDArray out = sd.execAndEndResult();
                    assertNotNull(out);
                    assertArrayEquals(new long[]{1, 1}, out.shape());

//                    System.out.println(sd.asFlatPrint());

                    boolean ok = GradCheckUtil.checkGradients(sd);
                    if (!ok) {
                        allFailed.add(msg);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                    allFailed.add(msg + " - EXCEPTION");
                }
            }
        }

        assertEquals("Failed: " + allFailed, 0, allFailed.size());
    }
 
Example 17
Source File: TensorFlowImportTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
@Ignore
public void importGraph4() throws Exception {
    SameDiff graph = TFGraphMapper.importGraph(new ClassPathResource("tf_graphs/max_multiply.pb.txt").getInputStream());

    assertNotNull(graph);

    val p0 = Nd4j.create(10, 10).assign(2.0);
    val p1 = Nd4j.create(10, 10).assign(3.0);

    graph.associateArrayWithVariable(p0,graph.variableMap().get("Placeholder"));
    graph.associateArrayWithVariable(p1, graph.variableMap().get("Placeholder_1"));


    graph.var("Placeholder", p0);
    graph.var("Placeholder_1", p1);

    val res = graph.outputAll(null).get(graph.outputs().get(0));



    assertEquals(6.0, res.meanNumber().doubleValue(), 1e-5);
}
 
Example 18
Source File: GradCheckTransforms.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testConditions() {

    SameDiff sd = SameDiff.create();

    INDArray ia = Nd4j.create(new float[]{4, 2});
    SDVariable in = sd.var("in", new int[]{1, 2});
    sd.associateArrayWithVariable(ia, in);


    INDArray expFinite = Nd4j.create(new float[]{1, 1});
    SDVariable finite = sd.isFinite(in);

    INDArray expInfinite = Nd4j.create(new float[]{0, 0});
    SDVariable infinite = sd.isInfinite(in);

    INDArray expNaN =  Nd4j.create(new float[]{0, 0});
    SDVariable isnan = sd.isNaN(in);

    sd.exec();
    assert(expFinite.equals(finite.getArr()));
    assert(expInfinite.equals(infinite.getArr()));
    assert(expNaN.equals(isnan.getArr()));

}
 
Example 19
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testGradientAutoBroadcast1() {

    Nd4j.getRandom().setSeed(12345);

    List<String> failed = new ArrayList<>();

    for (int dim_sz1 : new int[]{0, 1, 2}) {

        int[] in2Shape = {3, 4, 5};
        in2Shape[dim_sz1] = 1;

        for (int i = 0; i < 8; i++) {

            SameDiff sd = SameDiff.create();

            SDVariable in3 = sd.var("in3", Nd4j.rand(new int[]{3, 4, 5}));
            SDVariable in2 = sd.var("in2", in2Shape);

            SDVariable bcOp;
            String name;
            switch (i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    //bcOp = sd.scalarFloorDiv(in3, in2);
                    bcOp = new FloorDivOp(sd, in3, in2).outputVariable();
                    name = "floordiv";
                    break;
                case 7:
                    //bcOp = sd.scalarFloorMod(in3, in2);
                    bcOp = new FloorModOp(sd, in3, in2).outputVariable();
                    name = "floormod";
                    if(OpValidationSuite.IGNORE_FAILING){
                        //https://github.com/deeplearning4j/deeplearning4j/issues/5976
                        continue;
                    }
                    break;
                default:
                    throw new RuntimeException();
            }

            SDVariable outVar = sd.sum(bcOp);

            String msg = "(test " + i + ": " + name + ", dimension=" + dim_sz1 + ")";
            log.info("*** Starting test: " + msg);

            INDArray in3Arr = Nd4j.randn(new int[]{3, 4, 5}).muli(100);
            INDArray in2Arr = Nd4j.randn(in2Shape).muli(100);

            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);

            TestCase tc = new TestCase(sd);

            String error = OpValidation.validate(tc);
            if(error != null){
                failed.add(name);
            }
        }
    }

    assertEquals("Failed: " + failed, 0, failed.size());
}
 
Example 20
Source File: NativeGraphExecutioner.java    From nd4j with Apache License 2.0 4 votes vote down vote up
/**
   * This method executes given graph and returns results
   *
   * @param sd
   * @return
   */
  @Override
  public INDArray[] executeGraph(SameDiff sd, ExecutorConfiguration configuration) {

      Map<Integer, Node> intermediate = new HashMap<>();

      ByteBuffer buffer = convertToFlatBuffers(sd, configuration, intermediate);

      BytePointer bPtr = new BytePointer(buffer);

      log.info("Buffer length: {}", buffer.limit());

      Pointer res  = NativeOpsHolder.getInstance().getDeviceNativeOps().executeFlatGraphFloat(null, bPtr);
      if (res == null)
          throw new ND4JIllegalStateException("Graph execution failed");

      // FIXME: this is BAD
      PagedPointer pagedPointer = new PagedPointer(res,1024 * 1024L);
      FlatResult fr = FlatResult.getRootAsFlatResult(pagedPointer.asBytePointer().asByteBuffer());


      log.info("VarMap: {}", sd.variableMap());

      INDArray[] results = new INDArray[fr.variablesLength()];

      for (int e = 0; e < fr.variablesLength(); e++) {
          FlatVariable var = fr.variables(e);
          log.info("Var received: id: [{}:{}/<{}>];", var.id().first(), var.id().second(), var.name());
          FlatArray ndarray = var.ndarray();


          INDArray val = Nd4j.createFromFlatArray(ndarray);
          results[e] = val;

          if (var.name() != null && sd.variableMap().containsKey(var.name())) {
              //log.info("VarName: {}; Exists: {}; NDArrayInfo: {};", var.opName(), sd.variableMap().containsKey(var.opName()), sd.getVertexToArray().containsKey(var.opName()));
//              log.info("storing: {}; array: {}", var.name(), val);
              sd.associateArrayWithVariable(val, sd.variableMap().get(var.name()));

          } else {
              //log.info("Original id: {}; out: {}; out2: {}", original, sd.getVertexIdxToInfo().get(original), graph.getVariableForVertex(original));
              if (sd.variableMap().get(var.name()) != null) {
                  sd.associateArrayWithVariable(val,sd.getVariable(var.name()));
              } else {
  //                log.info("BAD");
                  //sd.var("",val);

                  throw new ND4JIllegalStateException("Unknown variable received as result: ["+ var.name() +"]");
              }
          }
      }


      return results;
  }