Java Code Examples for org.nd4j.autodiff.samediff.SameDiff#var()

The following examples show how to use org.nd4j.autodiff.samediff.SameDiff#var() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMergeMaxIndex() {

    Nd4j.getRandom().setSeed(12345);
    SameDiff sd = SameDiff.create();
    SDVariable inputX = sd.var(Nd4j.createFromArray(new float[] {1, 0, 0}));
    SDVariable inputY = sd.var(Nd4j.createFromArray(new float[] {0, 1, 0}));
    SDVariable inputZ = sd.var(Nd4j.createFromArray(new float[] {0, 0, 1}));
    SDVariable out = new MergeMaxIndex(sd, new SDVariable[]{inputX, inputY, inputZ},DataType.INT32).outputVariable();
    INDArray expected = Nd4j.createFromArray(0,1,2);
    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("mergemaxindex", expected)
            .gradientCheck(false));
    assertNull(err);

}
 
Example 2
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDiag() {
    SameDiff sd = SameDiff.create();

    INDArray ia = Nd4j.create(new double[]{1, 2}, new int[]{2});
    SDVariable in = sd.var("in", DataType.DOUBLE, new long[]{2});
    INDArray expOut = Nd4j.create(new double[][]{{1, 0}, {0, 2}});

    INDArray expOut2 = Nd4j.create(DataType.DOUBLE, 2, 2);
    DynamicCustomOp diag = DynamicCustomOp.builder("diag").addInputs(ia).addOutputs(expOut2).build();
    Nd4j.getExecutioner().exec(diag);

    assertEquals(expOut, expOut2);

    SDVariable t = sd.math().diag("diag", in);

    SDVariable loss = sd.standardDeviation("loss", t, false, 0, 1);

    sd.associateArrayWithVariable(ia, in);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("diag", expOut)
            .gradientCheck(true));
    assertNull(err);
}
 
Example 3
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testTrace(){
    //TODO need to work out how to handle shape_op for scalars...
    //OpValidationSuite.ignoreFailing();
    Nd4j.getRandom().setSeed(12345);
    for( int[] inShape : new int[][]{{3,3}}){

        INDArray in = Nd4j.rand(inShape);
        SameDiff sd = SameDiff.create();
        SDVariable i = sd.var("in", in);
        SDVariable trace = sd.math().trace(i);

        double exp = Nd4j.diag(in).sumNumber().doubleValue();

        TestCase tc = new TestCase(sd)
                .expected(trace, Nd4j.scalar(exp))
                .testName(Arrays.toString(inShape));

        String err = OpValidation.validate(tc);

        assertNull(err);
    }
}
 
Example 4
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testNorm2() {

    SameDiff sameDiff = SameDiff.create();

    INDArray in = Nd4j.linspace(1, 12, 12).reshape(3, 4);
    SDVariable input = sameDiff.var(in);
    INDArray expected = Nd4j.createFromArray(new double[]{
            10.3441,   11.8322,   13.3791,   14.9666
    });

    SDVariable output = new Norm2(sameDiff, input, false, new int[]{0}).outputVariable();

    TestCase tc = new TestCase(sameDiff)
            .gradientCheck(true)
            .expectedOutput(output.name(), expected);

    String err = OpValidation.validate(tc);
    assertNull(err);
}
 
Example 5
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testClipByAvgNorm() {

    Nd4j.getRandom().setSeed(12345);
    INDArray inputArr = Nd4j.rand(DataType.DOUBLE, 2, 2, 2);
    SameDiff sd = SameDiff.create();
    SDVariable in = sd.var(inputArr);
    SDVariable out = new ClipByAvgNorm(sd, in, 1e-2, 0, 1, 2).outputVariable();
    SDVariable expected = sd.math.clipByNorm(in, 1e-2, 0, 1, 2).mul(inputArr.length());

    SDVariable loss = sd.standardDeviation("loss", out, true);
    loss.markAsLoss();

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("clipbyavgnorm", expected.eval())
            .gradientCheck(false)
    );
    assertNull(err);

}
 
Example 6
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMatMulTensor() {
    final INDArray a = Nd4j.rand(new int[]{1, 2, 3, 4, 5});
    final INDArray b = Nd4j.rand(new int[]{1, 2, 3, 5, 6});

    final INDArray z = Nd4j.matmul(a, b);

    assertArrayEquals(z.shape(), new long[]{1, 2, 3, 4, 6});

    SameDiff sd = SameDiff.create();
    SDVariable sdA = sd.var("a", a);
    SDVariable sdB = sd.var("b", b);
    SDVariable t = sd.mmul(sdA, sdB);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 7
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testConstant(){
    //OpValidationSuite.ignoreFailing();

    //Case 0: no shape
    SameDiff sd = SameDiff.create();
    INDArray ia = Nd4j.create(new double[]{1,2,3});
    SDVariable in = sd.var(ia);
    SDVariable loss = in.std(true);

    assertNull(OpValidation.validate(new TestCase(sd).expected(in, ia)));

    //Case 1: shape is provided + scalar

    sd = SameDiff.create();
    ia = Nd4j.scalar(3.0);
    in = sd.var(ia);
    SDVariable constant = sd.constant(Nd4j.create(DataType.FLOAT, 3,4,5));
    INDArray exp = Nd4j.valueArrayOf(new long[]{3,4,5}, 3.0);
    loss = constant.std(true);

    assertNull(OpValidation.validate(new TestCase(sd)
            .gradientCheck(false)
            .expected(constant, Nd4j.create(DataType.FLOAT, 3,4,5))));
}
 
Example 8
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testPermute3(){
        INDArray in = Nd4j.linspace(DataType.FLOAT, 1, 6, 1).reshape(3,2);
        INDArray permute = Nd4j.createFromArray(1,0);

//        System.out.println(in);

        SameDiff sd = SameDiff.create();
        SDVariable v = sd.var(in);
        SDVariable v2 = sd.constant(permute);

        SDVariable out = v.permute(v2);

        INDArray exp = in.transpose();
        INDArray outArr = out.eval();
        assertEquals(exp, outArr);
    }
 
Example 9
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSlice2d() {
    INDArray inArr = Nd4j.linspace(1, 12, 12).reshape('c', 3, 4);

    SameDiff sd = SameDiff.create();
    SDVariable in = sd.var("in", inArr);
    SDVariable slice_full = sd.slice(in, new int[]{0, 0}, new int[]{3, 4});
    SDVariable subPart = sd.slice(in, new int[]{1, 2}, new int[]{2, 2});

    Map<String,INDArray> m = sd.outputAll(Collections.emptyMap());

    assertEquals(inArr, m.get(slice_full.name()));
    assertEquals(inArr.get(interval(1, 3), interval(2, 4)), m.get(subPart.name()));
}
 
Example 10
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testZeroFraction() {
    List<String> allFailed = new ArrayList<>();
    for (int i = 0; i < 2; i++) {
        SameDiff sd = SameDiff.create();

        INDArray ia;
        if (i == 0) {
            //Not gradient checkable for 0 and 1 values
            ia = Nd4j.create(new int[]{2, 2}, new float[]{0, 1, 0, 1});
        } else {
            ia = Nd4j.rand(DataType.FLOAT, 2, 2);
        }

        SDVariable input = sd.var("in", 2, 2);
        sd.associateArrayWithVariable(ia, input);

        SDVariable zeroFraction = sd.math().zeroFraction(input);

        String error = OpValidation.validate(new TestCase(sd)
                .expectedOutput(zeroFraction.name(), Nd4j.scalar(i == 0 ? 0.5f : 0.0f))
                .gradientCheck(i != 0)
        );
        if (error != null)
            allFailed.add(error);
    }

    assertEquals(allFailed.toString(), 0, allFailed.size());
}
 
Example 11
Source File: NDLossTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMeanSquaredError() {
    SameDiff sd = SameDiff.create();

    int nOut = 4;
    int minibatch = 10;
    SDVariable predictions = sd.var("in", DataType.DOUBLE, minibatch, nOut);
    SDVariable labels = sd.var("labels", DataType.DOUBLE, -1, nOut);

    INDArray wArr = Nd4j.create(new double[][]{
            {0, 0, 0, 0}, {0, 0, 1, 1}, {1, 1, 0, 0}, {1, 1, 1, 1}, {1, 1, 1, 1},
            {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}, {2, 2, 2, 2}});
    SDVariable w = sd.var("weights", wArr);

    LossReduce reduction = LossReduce.MEAN_BY_NONZERO_WEIGHT_COUNT;

    INDArray predictionsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);
    INDArray labelsArr = Nd4j.randn(DataType.DOUBLE, minibatch, nOut);

    SDVariable loss = sd.loss().meanSquaredError("loss", labels, predictions, w, reduction);
    SDVariable loss2 = sd.loss().meanSquaredError("loss2", labels, predictions,
            null, reduction);
    sd.associateArrayWithVariable(predictionsArr, predictions);
    sd.associateArrayWithVariable(labelsArr, labels);

    INDArray y_exp = loss.eval();
    INDArray y_exp2 = loss2.eval();

    INDArray y = Nd4j.loss().meanSquaredError(labelsArr, predictionsArr, wArr, reduction);
    INDArray y2 = Nd4j.loss().meanSquaredError(labelsArr, predictionsArr, null, reduction);
    assertEquals(y_exp, y);
    assertEquals(y_exp2, y2);
}
 
Example 12
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEmbeddingLookup() {

    Nd4j.getRandom().setSeed(12345);
    SameDiff sd = SameDiff.create();
    SDVariable input = sd.var("in", Nd4j.rand(1024, 10));
    SDVariable indices = sd.constant("indices", Nd4j.createFromArray(new long[]{0, 5, 17, 33}));
    SDVariable out = new EmbeddingLookup(sd, input, indices, PartitionMode.MOD).outputVariable();
    // should be matrix of shape [4, 10]
    assertArrayEquals(new long[]{4, 10}, out.eval().shape());

}
 
Example 13
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test(expected = IllegalArgumentException.class)
public void exceptionThrown_WhenConf3DInvalid() {
    Nd4j.getRandom().setSeed(12345);

    //NCDHW format
    int[] inSizeNCDHW = {2, 3, 4, 5, 5};

    List<String> failed = new ArrayList<>();

    for (boolean ncdhw : new boolean[]{true, false}) {
        int nIn = inSizeNCDHW[1];
        int[] shape = (ncdhw ? inSizeNCDHW : ncdhwToNdhwc(inSizeNCDHW));

        SameDiff sd = SameDiff.create();
        SDVariable in = sd.var("in", shape);

        SDVariable out;
        String msg = "0 - conv3d+bias+same, ncdhw=" + ncdhw + " - input " + Arrays.toString(shape);

        SDVariable w0 = sd.var("w0", Nd4j.rand(new int[]{2, 2, 2, nIn, 3}).muli(10));  //[kD, kH, kW, iC, oC]
        SDVariable b0 = sd.var("b0", Nd4j.rand(new long[]{3}).muli(10));
        out = sd.cnn().conv3d(in, w0, b0, Conv3DConfig.builder()
                .dataFormat(ncdhw ? Conv3DConfig.NCDHW : Conv3DConfig.NDHWC)
                .isSameMode(true)
                .kH(2).kW(2).kD(2)
                .sD(1).sH(1).sW(-1).dW(-1)
                .build());
    }
}
 
Example 14
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLrn2d() {
    Nd4j.getRandom().setSeed(12345);

    int[][] inputSizes = new int[][]{{1, 3, 8, 8}, {3, 6, 12, 12}};

    List<String> failed = new ArrayList<>();

    for (int[] inSizeNCHW : inputSizes) {

        SameDiff sd = SameDiff.create();
        SDVariable in = null;

        int[] inSize;

        //LRN
        String msg = "LRN with NCHW - input" + Arrays.toString(inSizeNCHW);
        inSize = inSizeNCHW;
        in = sd.var("in", inSize);
        SDVariable out = sd.cnn().localResponseNormalization(in, LocalResponseNormalizationConfig.builder()
                .depth(3)
                .bias(1)
                .alpha(1)
                .beta(0.5)
                .build());

        INDArray inArr = Nd4j.rand(inSize).muli(10);
        in.setArray(inArr);
        SDVariable loss = sd.mean("loss", out);

        log.info("Starting test: " + msg);
        TestCase tc = new TestCase(sd).gradientCheck(true);
        String error = OpValidation.validate(tc);
        if (error != null) {
            failed.add(msg);
        }

    }
    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 15
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLogSumExp() {
    Nd4j.getRandom().setSeed(12345);
    INDArray inputArr = Nd4j.rand(DataType.FLOAT, 1, 4);
    SameDiff sd = SameDiff.create();
    SDVariable in = sd.var(inputArr);
    SDVariable lse = sd.math().logSumExp(in);
    INDArray out = lse.eval();

    INDArray exp = Transforms.exp(inputArr, true);
    INDArray sum = exp.sum();
    INDArray log = Transforms.log(sum);
    assertEquals(log, out);
}
 
Example 16
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMatrixDeterminant3(){
    OpValidationSuite.ignoreFailing();  //Gradient checks failing
    Nd4j.getRandom().setSeed(12345);
    INDArray in = Nd4j.rand(3,3);
    //System.out.println(in.shapeInfoToString());   //Rank: 2,Offset: 0 Order: c Shape: [3,3],  stride: [3,1]
    //System.out.println(Arrays.toString(in.data().asFloat())); //[0.27620894, 0.21801452, 0.062078513, 7.348895E-4, 0.24149609, 0.4948205, 0.93483436, 0.52035654, 0.30292067]

    SameDiff sd = SameDiff.create();
    SDVariable var = sd.var("in", in);
    SDVariable md = sd.math().matrixDeterminant(var);

    double d = new LUDecomposition(CheckUtil.convertToApacheMatrix(in)).getDeterminant();

    //https://en.wikipedia.org/wiki/Determinant
    double[][] a = in.toDoubleMatrix();
    double d2 = a[0][0] * a[1][1] * a[2][2]
            + a[0][1] * a[1][2] * a[2][0]
            + a[0][2] * a[1][0] * a[2][1]
            - a[0][2] * a[1][1] * a[2][0]
            - a[0][1] * a[1][0] * a[2][2]
            - a[0][0] * a[1][2] * a[2][1];
    assertEquals(d, d2, 1e-6);          //Manual calc and Apache commons both match:    0.03589524995561552

    INDArray outExp = Nd4j.scalar(d);

    String err = OpValidation.validate(new TestCase(sd)
            .expected(md.name(), outExp));
    assertNull(err);
}
 
Example 17
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMmulGradients(){
    int[] aShape = new int[]{2,3};
    int[] bShape = new int[]{3,4};
    List<String> failed = new ArrayList<>();

    for( char aOrder : new char[]{'c', 'f'}) {
        for (char bOrder : new char[]{'c', 'f'}) {
            for (boolean transposeA : new boolean[]{false, true}) {
                for (boolean transposeB : new boolean[]{false, true}) {
                    for (boolean transposeResult : new boolean[]{false, true}) {    //https://github.com/deeplearning4j/deeplearning4j/issues/5648
                        Nd4j.getRandom().setSeed(12345);

                        INDArray aArr = Nd4j.rand(DataType.DOUBLE, t(transposeA, aShape)).dup(aOrder);
                        INDArray bArr = Nd4j.rand(DataType.DOUBLE, t(transposeB, bShape)).dup(bOrder);

                        SameDiff sd = SameDiff.create();
                        SDVariable a = sd.var("a", aArr);
                        SDVariable b = sd.var("b", bArr);

                        SDVariable mmul = sd.mmul(a, b, transposeA, transposeB, transposeResult);

                        INDArray exp = (transposeA ? aArr.transpose() : aArr);
                        exp = exp.mmul(transposeB ? bArr.transpose() : bArr);
                        exp = (transposeResult ? exp.transpose() : exp);

                        SDVariable loss = mmul.std(true);

                        String name = aOrder + "," + bOrder + ",tA=" + transposeA + ",tB=" + transposeB +
                                ",tRes=" + transposeResult;
                        TestCase tc = new TestCase(sd).testName(name)
                                .expected(mmul, exp);

                        String err = OpValidation.validate(tc, true);
                        if(err != null)
                            failed.add(err);
                    }
                }
            }
        }
    }

    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 18
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testSqueezeGradient() {
        val origShape = new long[]{3, 4, 5};

        List<String> failed = new ArrayList<>();

        for (int i = 0; i < 3; i++) {

            val shape = origShape.clone();
            shape[i] = 1;

            for (Pair<INDArray, String> p : NDArrayCreationUtil.getAll3dTestArraysWithShape(12345, shape, DataType.DOUBLE)) {
                INDArray inArr = p.getFirst().muli(100);

                SameDiff sd = SameDiff.create();
                SDVariable in = sd.var("in", inArr);
                SDVariable squeeze = sd.squeeze(in, i);
                //Using stdev here: mean/sum would backprop the same gradient for each input...
                SDVariable stdev = sd.standardDeviation("out", squeeze, true);

                long[] expShapePostSqueeze;
                switch (i) {
                    case 0:
                        expShapePostSqueeze = new long[]{4, 5};
                        break;
                    case 1:
                        expShapePostSqueeze = new long[]{3, 5};
                        break;
                    case 2:
                        expShapePostSqueeze = new long[]{3, 4};
                        break;
                    default:
                        throw new RuntimeException();
                }

                INDArray exp = inArr.dup('c').reshape('c', expShapePostSqueeze);

                Map<String,INDArray> m = sd.outputAll(null);

                INDArray squeezed = m.get(squeeze.name());
//                assertArrayEquals(expShapePostSqueeze, squeezed.shape());

                INDArray out = m.get(stdev.name());
                INDArray expOut = in.getArr().std(true, Integer.MAX_VALUE);
                assertEquals(expOut, out);

                String msg = "squeezeDim=" + i + ", source=" + p.getSecond();
                TestCase tc = new TestCase(sd)
                        .testName(msg)
                        .expected(squeeze.name(), exp)
                        .expectedOutput("out", expOut);


                String error = OpValidation.validate(tc, true);
                if(error != null){
                    failed.add(name);
                }
            }
        }

        assertEquals(failed.toString(), 0, failed.size());
    }
 
Example 19
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testExpandDimsGradient() {
    val origShape = new long[]{3, 4};

    List<String> failed = new ArrayList<>();

    boolean first = true;
    for (int i = 0; i < 3; i++) {

        long[] expExpandShape;
        switch (i) {
            case 0:
                expExpandShape = new long[]{1, 3, 4};
                break;
            case 1:
                expExpandShape = new long[]{3, 1, 4};
                break;
            case 2:
                expExpandShape = new long[]{3, 4, 1};
                break;
            default:
                throw new RuntimeException();
        }

        for (Pair<INDArray, String> p : NDArrayCreationUtil.getAllTestMatricesWithShape(origShape[0], origShape[1], 12345, DataType.DOUBLE)) {
            INDArray inArr = p.getFirst().muli(100);

            SameDiff sd = SameDiff.create();
            SDVariable in = sd.var("in", inArr);
            SDVariable expand = sd.expandDims(in, i);
            //Using stdev here: mean/sum would backprop the same gradient for each input...
            SDVariable stdev = sd.standardDeviation("out", expand, true);

            Map<String,INDArray> m = sd.outputAll(null);
            INDArray expOut = in.getArr().std(true);

            assertArrayEquals(expExpandShape, m.get(expand.name()).shape());
            INDArray expExpand = inArr.dup('c').reshape(expExpandShape);

            String msg = "expandDim=" + i + ", source=" + p.getSecond();
            log.info("Starting: " + msg);

            TestCase tc = new TestCase(sd);
            tc.testName(msg)
                    .expectedOutput("out", expOut)
                    .expectedOutput(expand.name(), expExpand);

            String error = OpValidation.validate(tc);
            if(error != null){
                failed.add(error);
            }
        }
    }
    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 20
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testGradientAutoBroadcast3() {
    //These tests: output size > input sizes

    Nd4j.getRandom().setSeed(12345);

    List<String> failed = new ArrayList<>();

    //Test cases: in1Shape, in2Shape, shapeOf(op(in1,in2))
    List<Triple<long[], long[], long[]>> testCases = new ArrayList<>();
    testCases.add(new Triple<>(new long[]{3, 1}, new long[]{1, 4}, new long[]{3, 4}));
    testCases.add(new Triple<>(new long[]{3, 1}, new long[]{3, 4}, new long[]{3, 4}));
    testCases.add(new Triple<>(new long[]{3, 4}, new long[]{1, 4}, new long[]{3, 4}));
    testCases.add(new Triple<>(new long[]{3, 4, 1}, new long[]{1, 1, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 4, 1}, new long[]{3, 1, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{1, 4, 1}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{1, 4, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 5}, new long[]{3, 4, 5}, new long[]{3, 4, 5}));
    testCases.add(new Triple<>(new long[]{3, 1, 1, 1}, new long[]{1, 4, 5, 6}, new long[]{3, 4, 5, 6}));
    testCases.add(new Triple<>(new long[]{1, 1, 1, 6}, new long[]{3, 4, 5, 6}, new long[]{3, 4, 5, 6}));
    testCases.add(new Triple<>(new long[]{1, 4, 5, 1}, new long[]{3, 1, 1, 6}, new long[]{3, 4, 5, 6}));
    if(!OpValidationSuite.IGNORE_FAILING) {
        testCases.add(new Triple<>(new long[]{1, 6}, new long[]{3, 4, 5, 1}, new long[]{3, 4, 5, 6}));
    }

    for (val p : testCases) {

        for (int i = 0; i < 8; i++) {

            SameDiff sd = SameDiff.create();

            SDVariable in3 = sd.var("in1", DataType.DOUBLE, p.getFirst());
            SDVariable in2 = sd.var("in2", DataType.DOUBLE, p.getSecond());

            String name;
            SDVariable bcOp;
            switch (i) {
                case 0:
                    bcOp = in3.add(in2);
                    name = "add";
                    break;
                case 1:
                    bcOp = in3.sub(in2);
                    name = "sub";
                    break;
                case 2:
                    bcOp = in3.mul(in2);
                    name = "mul";
                    break;
                case 3:
                    bcOp = in3.div(in2);
                    name = "div";
                    break;
                case 4:
                    bcOp = in3.rsub(in2);
                    name = "rsub";
                    break;
                case 5:
                    bcOp = in3.rdiv(in2);
                    name = "rdiv";
                    break;
                case 6:
                    //bcOp = sd.scalarFloorDiv(in3, in2);
                    bcOp = new FloorDivOp(sd, in3, in2).outputVariable();
                    name = "floordiv";
                    break;
                case 7:
                    //bcOp = sd.scalarFloorMod(in3, in2);
                    bcOp = new FloorModOp(sd, in3, in2).outputVariable();
                    name = "floormod";
                    if(OpValidationSuite.IGNORE_FAILING){
                        //https://github.com/deeplearning4j/deeplearning4j/issues/5976
                        continue;
                    }
                    break;
                default:
                    throw new RuntimeException();
            }

            SDVariable outVar = sd.sum(bcOp);

            String msg = "(test " + i + ": " + name + ", array 1 size =" + Arrays.toString(p.getFirst())
                    + ", array 2 size = " + Arrays.toString(p.getSecond()) + ")";
            log.info("*** Starting test: " + msg);

            INDArray in3Arr = Nd4j.rand(DataType.DOUBLE, p.getFirst()).muli(100);
            INDArray in2Arr = Nd4j.rand(DataType.DOUBLE, p.getSecond()).muli(100);

            sd.associateArrayWithVariable(in3Arr, in3);
            sd.associateArrayWithVariable(in2Arr, in2);

            TestCase tc = new TestCase(sd);
            String error = OpValidation.validate(tc);
            if(error != null){
                failed.add(name + " " + i +  " - " + error);
            }
        }
    }

    assertEquals("Failed: " + failed, 0, failed.size());
}