Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#norm1()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#norm1() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testLayerNorm() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray bias = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain).addRowVector(bias);
    final INDArray expOut = res.norm1();

    final int[] axis = new int[]{1};
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("input", standardized);
    SDVariable sdGain = sd.var("gain", gain);
    SDVariable sdBias = sd.var("bias", bias);
    SDVariable out = sd.nn.layerNorm(sdInput, sdGain, sdBias, true, axis);
    out.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err);
}
 
Example 2
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testLayerNormNoBias() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain);
    final INDArray expOut = res.norm1();

    final int[] axis = new int[]{1};
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("input", standardized);
    SDVariable sdGain = sd.var("gain", gain);
    SDVariable out = sd.nn.layerNorm(sdInput, sdGain, true, axis);
    out.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 3
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testStandardize() {
    final INDArray random = Nd4j.rand(new int[]{10, 4});

    final int[] axis = new int[]{1};
    final INDArray means = random.mean(axis);
    final INDArray std = random.std(false, axis);
    final INDArray res = random.subColumnVector(means).divColumnVector(std);
    final INDArray expOut = res.norm1();

    SameDiff sd = SameDiff.create();
    SDVariable sdA = sd.var("a", random);
    SDVariable t = sd.math.standardize(sdA, axis);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 4
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testStandardizeNoDeviation() {
    final INDArray random = Nd4j.rand(new int[]{10, 4});
    for (int i = 0; i < 4; i++) {
        random.putScalar(1, i, 7);
    }

    final int[] axis = new int[]{1};
    final INDArray means = random.mean(axis);
    final INDArray std = random.std(false, axis);
    std.addi(std.eq(0).castTo(DataType.DOUBLE));

    final INDArray res = random.subColumnVector(means).divColumnVector(std);
    final INDArray expOut = res.norm1();

    SameDiff sd = SameDiff.create();
    SDVariable sdA = sd.var("a", random);
    SDVariable t = sd.math.standardize(sdA, axis);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 5
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLayerNormNoDeviation() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    for (int i = 0; i < 4; i++) {
        random.putScalar(1, i, 7);
    }

    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray bias = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain).addRowVector(bias);
    final INDArray expOut = res.norm1();

    final int[] axis = new int[]{1};
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("input", standardized);
    SDVariable sdGain = sd.var("gain", gain);
    SDVariable sdBias = sd.var("bias", bias);
    SDVariable out = sd.nn.layerNorm(sdInput, sdGain, sdBias, true, axis);
    out.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradCheckMask(Collections.singletonMap("input", random.neq(7)))
            .gradientCheck(true));
    assertNull(err, err);
}