Java Code Examples for org.nd4j.autodiff.samediff.SDVariable#norm1()

The following examples show how to use org.nd4j.autodiff.samediff.SDVariable#norm1() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testLayerNorm() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray bias = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain).addRowVector(bias);
    final INDArray expOut = res.norm1();

    final int[] axis = new int[]{1};
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("input", standardized);
    SDVariable sdGain = sd.var("gain", gain);
    SDVariable sdBias = sd.var("bias", bias);
    SDVariable out = sd.nn.layerNorm(sdInput, sdGain, sdBias, true, axis);
    out.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err);
}
 
Example 2
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testLayerNormNoBias() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain);
    final INDArray expOut = res.norm1();

    final int[] axis = new int[]{1};
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("input", standardized);
    SDVariable sdGain = sd.var("gain", gain);
    SDVariable out = sd.nn.layerNorm(sdInput, sdGain, true, axis);
    out.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 3
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDotProductAttention(){
    final INDArray keys = Nd4j.rand(new int[]{10, 4, 3});
    final INDArray values = Nd4j.rand(new int[]{10, 4, 3});
    final INDArray query = Nd4j.rand(new int[]{10, 4, 1});

    final INDArray exec = Nd4j.matmul(keys, query, true, false, false)
            .divi(Math.sqrt(keys.size(1)));
    Nd4j.exec((CustomOp) new SoftMax(exec, exec, 1));
    final INDArray finalOut = Nd4j.matmul(values, exec).norm1();

    SameDiff sd = SameDiff.create();
    SDVariable sdQ = sd.var("q", query);
    SDVariable sdK = sd.var("k", keys);
    SDVariable sdV = sd.var("v", values);

    SDVariable t = sd.nn.dotProductAttention(sdQ, sdK, sdV, null, true);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
                .expectedOutput("out", finalOut)
                .gradientCheck(true));
    assertNull(err);
}
 
Example 4
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDotProductAttentionMultiHeadInput(){
    final INDArray keys = Nd4j.rand(new int[]{2, 5, 4, 3});
    final INDArray values = Nd4j.rand(new int[]{2, 5, 4, 3});
    final INDArray query = Nd4j.rand(new int[]{2, 5, 4, 1});

    final INDArray exec = Nd4j.matmul(keys, query, true, false, false)
            .divi(Math.sqrt(keys.size(-2)));
    Nd4j.exec((CustomOp) new SoftMax(exec, exec, -2));
    final INDArray finalOut = Nd4j.matmul(values, exec).norm1();

    SameDiff sd = SameDiff.create();
    SDVariable sdQ = sd.var("q", query);
    SDVariable sdK = sd.var("k", keys);
    SDVariable sdV = sd.var("v", values);

    SDVariable t = sd.nn.dotProductAttention(sdQ, sdK, sdV, null, true);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", finalOut)
            .gradientCheck(true));
    assertNull(err);
}
 
Example 5
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testStandardize() {
    final INDArray random = Nd4j.rand(new int[]{10, 4});

    final int[] axis = new int[]{1};
    final INDArray means = random.mean(axis);
    final INDArray std = random.std(false, axis);
    final INDArray res = random.subColumnVector(means).divColumnVector(std);
    final INDArray expOut = res.norm1();

    SameDiff sd = SameDiff.create();
    SDVariable sdA = sd.var("a", random);
    SDVariable t = sd.math.standardize(sdA, axis);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 6
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testStandardizeNoDeviation() {
    final INDArray random = Nd4j.rand(new int[]{10, 4});
    for (int i = 0; i < 4; i++) {
        random.putScalar(1, i, 7);
    }

    final int[] axis = new int[]{1};
    final INDArray means = random.mean(axis);
    final INDArray std = random.std(false, axis);
    std.addi(std.eq(0).castTo(DataType.DOUBLE));

    final INDArray res = random.subColumnVector(means).divColumnVector(std);
    final INDArray expOut = res.norm1();

    SameDiff sd = SameDiff.create();
    SDVariable sdA = sd.var("a", random);
    SDVariable t = sd.math.standardize(sdA, axis);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 7
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMatMulTensor() {
    final INDArray a = Nd4j.rand(new int[]{1, 2, 3, 4, 5});
    final INDArray b = Nd4j.rand(new int[]{1, 2, 3, 5, 6});

    final INDArray z = Nd4j.matmul(a, b);

    assertArrayEquals(z.shape(), new long[]{1, 2, 3, 4, 6});

    SameDiff sd = SameDiff.create();
    SDVariable sdA = sd.var("a", a);
    SDVariable sdB = sd.var("b", b);
    SDVariable t = sd.mmul(sdA, sdB);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 8
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLayerNormNoDeviation() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    for (int i = 0; i < 4; i++) {
        random.putScalar(1, i, 7);
    }

    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray bias = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain).addRowVector(bias);
    final INDArray expOut = res.norm1();

    final int[] axis = new int[]{1};
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("input", standardized);
    SDVariable sdGain = sd.var("gain", gain);
    SDVariable sdBias = sd.var("bias", bias);
    SDVariable out = sd.nn.layerNorm(sdInput, sdGain, sdBias, true, axis);
    out.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradCheckMask(Collections.singletonMap("input", random.neq(7)))
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 9
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDotProductAttentionWithMask(){
    final INDArray keys = Nd4j.rand(new int[]{10, 4, 3});
    final INDArray values = Nd4j.rand(new int[]{10, 4, 3});
    final INDArray query = Nd4j.rand(new int[]{10, 4, 1});
    final INDArray mask = Nd4j.rand(10, 3).gte(0.2).castTo(DataType.DOUBLE);


    final INDArray exec = Nd4j.matmul(keys, query, true, false, false)
            .divi(Math.sqrt(keys.size(1)));
    exec.addi(mask.reshape(10, 3, 1).sub(1).muli(1e9));
    Nd4j.exec((CustomOp) new SoftMax(exec, exec, 1));
    final INDArray finalOut = Nd4j.matmul(values, exec).norm1();

    SameDiff sd = SameDiff.create();
    SDVariable sdQ = sd.var("q", query);
    SDVariable sdK = sd.var("k", keys);
    SDVariable sdV = sd.var("v", values);
    SDVariable sdMask = sd.constant("mask", mask);

    SDVariable t = sd.nn.dotProductAttention(sdQ, sdK, sdV, sdMask, true);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", finalOut)
            .gradCheckSkipVariables("mask")
            .gradientCheck(true));
    assertNull(err);
}
 
Example 10
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDotProductAttentionMultiHeadInputWithMask(){
    final INDArray keys = Nd4j.rand(new int[]{2, 5, 4, 3});
    final INDArray values = Nd4j.rand(new int[]{2, 5, 4, 3});
    final INDArray query = Nd4j.rand(new int[]{2, 5, 4, 2});
    final INDArray mask = Nd4j.rand(2, 3).gte(0.2).castTo(DataType.DOUBLE);


    final INDArray exec = Nd4j.matmul(keys, query, true, false, false)
            .divi(Math.sqrt(keys.size(-2)));
    exec.addi(Nd4j.tile(mask.reshape(2, 1, 3, 1), 1, 5, 1, 2).sub(1).muli(1e9));
    Nd4j.exec((CustomOp) new SoftMax(exec, exec, -2));
    final INDArray finalOut = Nd4j.matmul(values, exec).norm1();

    SameDiff sd = SameDiff.create();
    SDVariable sdQ = sd.var("q", query);
    SDVariable sdK = sd.var("k", keys);
    SDVariable sdV = sd.var("v", values);
    SDVariable sdMask = sd.constant("mask", mask);


    SDVariable t = sd.nn.dotProductAttention(sdQ, sdK, sdV, sdMask, true);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", finalOut)
            .gradCheckSkipVariables("mask")
            .gradientCheck(true));
    assertNull(err);
}
 
Example 11
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMatMulTensorTranspose() {
    for (boolean transposeA : new boolean[]{false, true}) {
        for (boolean transposeB : new boolean[]{false, true}) {
            for (boolean transposeResult : new boolean[]{false, true}) {
                log.info("Testing with transposeA={}; transposeB={}; transposeResult={};", transposeA, transposeB, transposeResult);
                int m = 0, n = 0, k = 0, l = 0, i = 0, j = 0;
                if (!transposeA && !transposeB && !transposeResult) {
                    m = 4;
                    n = 5;
                    k = 5;
                    l = 6;
                    i = 4;
                    j = 6;
                }
                if (!transposeA && transposeB && !transposeResult) {
                    m = 4;
                    n = 5;
                    k = 6;
                    l = 5;
                    i = 4;
                    j = 6;
                }
                if (!transposeA && !transposeB && transposeResult) {
                    m = 4;
                    n = 5;
                    k = 5;
                    l = 6;
                    i = 6;
                    j = 4;
                }
                if (!transposeA && transposeB && transposeResult) {
                    m = 4;
                    n = 5;
                    k = 6;
                    l = 5;
                    i = 6;
                    j = 4;
                }
                if (transposeA && !transposeB && !transposeResult) {
                    m = 5;
                    n = 4;
                    k = 5;
                    l = 6;
                    i = 4;
                    j = 6;
                }
                if (transposeA && transposeB && !transposeResult) {
                    m = 5;
                    n = 4;
                    k = 6;
                    l = 5;
                    i = 4;
                    j = 6;
                }
                if (transposeA && !transposeB && transposeResult) {
                    m = 5;
                    n = 4;
                    k = 5;
                    l = 6;
                    i = 6;
                    j = 4;
                }
                if (transposeA && transposeB && transposeResult) {
                    m = 5;
                    n = 4;
                    k = 6;
                    l = 5;
                    i = 6;
                    j = 4;
                }

                final INDArray a = Nd4j.rand(new int[]{1, 2, 3, m, n});
                final INDArray b = Nd4j.rand(new int[]{1, 2, 3, k, l});

                final INDArray z = Nd4j.matmul(a, b, transposeA, transposeB, transposeResult);

                assertArrayEquals(z.shape(), new long[]{1, 2, 3, i, j});

                SameDiff sd = SameDiff.create();
                SDVariable sdA = sd.var("a", a);
                SDVariable sdB = sd.var("b", b);
                SDVariable t = sd.mmul(sdA, sdB, transposeA, transposeB, transposeResult);
                t.norm1("out");

                String err = OpValidation.validate(new TestCase(sd)
                        .gradientCheck(true));
                assertNull(err, err);
            }
        }
    }
}