org.nd4j.autodiff.samediff.SDVariable Java Examples

The following examples show how to use org.nd4j.autodiff.samediff.SDVariable. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testLayerNormNoBias() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain);
    final INDArray expOut = res.norm1();

    final int[] axis = new int[]{1};
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("input", standardized);
    SDVariable sdGain = sd.var("gain", gain);
    SDVariable out = sd.nn.layerNorm(sdInput, sdGain, true, axis);
    out.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example #2
Source File: TensorFlowImportTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
@Ignore
public void importGraph1() throws Exception {
    SameDiff graph = TFGraphMapper.getInstance().importGraph(new ClassPathResource("tf_graphs/max_add_2.pb.txt").getInputStream());

    assertNotNull(graph);

    assertEquals(2, graph.variableMap().size());

    SDVariable var0 = graph.variableMap().get("zeros");
    SDVariable var1 = graph.variableMap().get("ones");

    assertNotNull(var0);
    assertNotNull(var1);

    assertNotNull(var0.getArr());
    assertNotNull(var1.getArr());

    assertEquals(0.0, var0.getArr().sumNumber().doubleValue(), 1e-5);
    assertEquals(12.0, var1.getArr().sumNumber().doubleValue(), 1e-5);
}
 
Example #3
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDotProductAttentionMultiHeadInput(){
    final INDArray keys = Nd4j.rand(new int[]{2, 5, 4, 3});
    final INDArray values = Nd4j.rand(new int[]{2, 5, 4, 3});
    final INDArray query = Nd4j.rand(new int[]{2, 5, 4, 1});

    final INDArray exec = Nd4j.matmul(keys, query, true, false, false)
            .divi(Math.sqrt(keys.size(-2)));
    Nd4j.exec((CustomOp) new SoftMax(exec, exec, -2));
    final INDArray finalOut = Nd4j.matmul(values, exec).norm1();

    SameDiff sd = SameDiff.create();
    SDVariable sdQ = sd.var("q", query);
    SDVariable sdK = sd.var("k", keys);
    SDVariable sdV = sd.var("v", values);

    SDVariable t = sd.nn.dotProductAttention(sdQ, sdK, sdV, null, true);
    t.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", finalOut)
            .gradientCheck(true));
    assertNull(err);
}
 
Example #4
Source File: ClipByNorm.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Override
    public List<SDVariable> doDiff(List<SDVariable> grad) {
        //dOut/dIn is ??? if clipped, 1 otherwise
        int origRank = Shape.rankFromShape(arg().getShape());
        SDVariable l2norm = f().norm2(arg(), dimensions);
        SDVariable broadcastableNorm = f().reductionBroadcastableWithOrigShape(origRank, dimensions, l2norm);
        SDVariable isClippedBC = f().gte(broadcastableNorm, clipValue);
        SDVariable notClippedBC = isClippedBC.rsub(1.0);

//        SDVariable dnormdx = arg().div(broadcastableNorm);
//        SDVariable sqNorm = f().square(broadcastableNorm);
//        SDVariable dOutdInClipped = sqNorm.rdiv(-1).mul(dnormdx).mul(arg()) //-1/(norm2(x))^2 * x/norm2(x)
//                .add(broadcastableNorm.rdiv(1.0))
//                .mul(clipValue);

        SDVariable dOutdInClipped = f().neg(f().square(arg()).div(f().cube(broadcastableNorm))) //-x^2/(norm2(x))^3
                .add(broadcastableNorm.rdiv(1.0))   //+ 1/norm(x)
                .mul(clipValue).mul(isClippedBC);


        SDVariable ret = notClippedBC.add(dOutdInClipped).mul(grad.get(0));
        return Arrays.asList(ret);
    }
 
Example #5
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testUpsampling3dBp() {

    Nd4j.getRandom().setSeed(12345);
    for (boolean dataformat : new boolean[]{true, false}) {

        SameDiff sd = SameDiff.create();

        // NCDHW input
        SDVariable input = dataformat ? sd.var(Nd4j.rand(DataType.DOUBLE, 2, 1, 5, 5, 5)) : sd.var(Nd4j.rand(DataType.DOUBLE, 2, 5, 5, 5, 1));
        int scaleD = 2;
        int scaleH = 2;
        int scaleW = 2;
        SDVariable out = new Upsampling3d(sd, input, true, scaleD, scaleH, scaleW).outputVariable().std(true);
        out.markAsLoss();
        String err = OpValidation.validate(new TestCase(sd)
                .gradientCheck(true));
        assertNull(err);
    }


}
 
Example #6
Source File: Eye.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public Eye(SameDiff sameDiff,  int numRows, int numCols, DataType dataType, int[] batchDimension) {
    super(null, sameDiff, new SDVariable[] {}, false);
    this.numRows = numRows;
    this.numCols = numCols;
    this.batchDimension = batchDimension;
    this.dataType = dataType;
    addArgs();
}
 
Example #7
Source File: LSTMBlock.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public LSTMBlock(@NonNull SameDiff sameDiff, SDVariable maxTSLength, SDVariable x, SDVariable cLast, SDVariable yLast, LSTMWeights weights, LSTMConfiguration configuration) {
    super(null, sameDiff, weights.argsWithInputs(x, maxTSLength, cLast, yLast));
    this.configuration = configuration;
    this.weights = weights;
    addIArgument(configuration.iArgs(true));
    addTArgument(configuration.tArgs());
}
 
Example #8
Source File: ClipByAvgNorm.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public ClipByAvgNorm(SameDiff sameDiff, SDVariable x, double clipValue, int... dimensions) {
    super("clipbyavgnorm", sameDiff, new SDVariable[]{x});
    this.clipValue = clipValue;
    this.dimensions = dimensions;
    addIArgument(dimensions);
    addTArgument(clipValue);
}
 
Example #9
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testReshape() {
    SameDiff sameDiff = SameDiff.create();
    INDArray arr = Transforms.sigmoid(Nd4j.linspace(-5, 6, 12)).reshape(3, 4);
    SDVariable x = sameDiff.var("x", arr);
    SDVariable result1 = sameDiff.reshape(x, 4, 3);
    SDVariable loss = sameDiff.standardDeviation(result1, true);

    INDArray exp = arr.dup('c').reshape('c', 4,3);

    String err = OpValidation.validate(new TestCase(sameDiff)
            .expectedOutput(result1.name(), exp));

    assertNull(err);
}
 
Example #10
Source File: SDLinalgTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLu() {
    SDVariable sdInput = sameDiff.var(Nd4j.createFromArray(new double[]{
            1., 2., 3., 0., 2., 3., 0., 0., 7.
    }).reshape(3,3));

    INDArray expected = Nd4j.createFromArray(new double[]{
            1., 2., 3., 0., 2., 3., 0., 0., 7
    }).reshape(3,3);

    SDVariable out = sameDiff.linalg().lu("lu", sdInput);
    assertEquals(expected, out.eval());
}
 
Example #11
Source File: Eye.java    From nd4j with Apache License 2.0 5 votes vote down vote up
public Eye(SameDiff sameDiff,  int numRows, int numCols, int[] batchDimension) {
    super(null, sameDiff, new SDVariable[] {}, false);
    this.numRows = numRows;
    this.numCols = numCols;
    this.batchDimension = batchDimension;
    addArgs();
}
 
Example #12
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSlice2d() {
    INDArray inArr = Nd4j.linspace(1, 12, 12).reshape('c', 3, 4);

    SameDiff sd = SameDiff.create();
    SDVariable in = sd.var("in", inArr);
    SDVariable slice_full = sd.slice(in, new int[]{0, 0}, new int[]{3, 4});
    SDVariable subPart = sd.slice(in, new int[]{1, 2}, new int[]{2, 2});

    Map<String,INDArray> m = sd.outputAll(Collections.emptyMap());

    assertEquals(inArr, m.get(slice_full.name()));
    assertEquals(inArr.get(interval(1, 3), interval(2, 4)), m.get(subPart.name()));
}
 
Example #13
Source File: ATanh.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public List<SDVariable> doDiff(List<SDVariable> i_v) {
    //d(atanh(x))/dx = 1 / (1-x^2)

    SDVariable oneMinusX2 = sameDiff.math().square(arg()).rsub(1.0);
    SDVariable ret = oneMinusX2.rdiv(1.0).mul(i_v.get(0));
    return Arrays.asList(ret);
}
 
Example #14
Source File: DifferentialFunctionFactory.java    From nd4j with Apache License 2.0 4 votes vote down vote up
public SDVariable add(SDVariable differentialFunction, SDVariable i_v) {
    validateDifferentialFunctionsameDiff(differentialFunction);
    return new AddOp(sameDiff(), new SDVariable[]{differentialFunction, i_v}, false).outputVariables()[0];

}
 
Example #15
Source File: ThresholdReluBp.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public ThresholdReluBp(SameDiff sd, SDVariable input, SDVariable gradient, double cutoff){
    super(sd, new SDVariable[]{input, gradient});
    this.cutoff = cutoff;
    addTArgument(cutoff);
}
 
Example #16
Source File: LessThanOrEqual.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public List<SDVariable> doDiff(List<SDVariable> f1) {
    //2 inputs, not continuously differentiable but 0s almost everywhere
    return Arrays.asList(sameDiff.zerosLike(args()[0]), sameDiff.zerosLike(args()[1]));
}
 
Example #17
Source File: RemainderOp.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public List<SDVariable> doDiff(List<SDVariable> f1) {
    return null;
}
 
Example #18
Source File: ScalarNotEquals.java    From nd4j with Apache License 2.0 4 votes vote down vote up
public ScalarNotEquals(SameDiff sameDiff, SDVariable i_v, Number scalar, boolean inPlace) {
    super(sameDiff, i_v, scalar, inPlace);
}
 
Example #19
Source File: ELU.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public ELU(SameDiff sameDiff, SDVariable i_v) {
    super(sameDiff, new SDVariable[]{i_v});
    this.alpha = DEFAULT_ALPHA;
    addTArgument(alpha);
}
 
Example #20
Source File: FirstIndex.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public List<SDVariable> doDiff(List<SDVariable> f1) {
    return Collections.singletonList(sameDiff.zerosLike(arg()));
}
 
Example #21
Source File: Max.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public Max(SameDiff sameDiff, SDVariable i_v, SDVariable i_v2, int[] dimensions) {
    super(sameDiff, i_v, i_v2, dimensions);
}
 
Example #22
Source File: ScalarGreaterThan.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public ScalarGreaterThan(SameDiff sameDiff, SDVariable i_v, Number scalar, boolean inPlace, Object[] extraArgs) {
    super(sameDiff, i_v, scalar, inPlace, extraArgs);
}
 
Example #23
Source File: RSubBpOp.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public RSubBpOp(SameDiff sameDiff, SDVariable x, SDVariable y, SDVariable eps) {
    super(sameDiff, x,y,eps);
}
 
Example #24
Source File: Conv3DDerivative.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Builder(builderMethodName = "derivativeBuilder")
public Conv3DDerivative(SameDiff sameDiff, SDVariable[] inputFunctions, Conv3DConfig conv3DConfig) {
    super(sameDiff, inputFunctions, conv3DConfig);
}
 
Example #25
Source File: BroadcastMin.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public BroadcastMin(SameDiff sameDiff, SDVariable i_v1, SDVariable i_v2, int[] dimension, Object[] extraArgs) {
    super(sameDiff, i_v1, i_v2, dimension, extraArgs);
}
 
Example #26
Source File: BitwiseAnd.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public List<SDVariable> doDiff(List<SDVariable> i_v) {
    throw new UnsupportedOperationException("Not yet implemented: " + opName());
}
 
Example #27
Source File: Linear.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Override
public List<SDVariable> doDiff(List<SDVariable> f1) {
    execSameDiff();
    return forward.doDiff(f1);
}
 
Example #28
Source File: ScalarSetValue.java    From nd4j with Apache License 2.0 4 votes vote down vote up
public ScalarSetValue(SameDiff sameDiff, SDVariable i_v, Number scalar, Object[] extraArgs) {
    super(sameDiff, i_v, scalar, extraArgs);
}
 
Example #29
Source File: OldMin.java    From nd4j with Apache License 2.0 4 votes vote down vote up
public OldMin(SameDiff sameDiff, SDVariable i_v1, SDVariable i_v2, boolean inPlace) {
    super(sameDiff, i_v1, i_v2, inPlace);
}
 
Example #30
Source File: BroadcastMin.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public BroadcastMin(SameDiff sameDiff, SDVariable i_v, long[] shape, boolean inPlace, int[] dimension, Object[] extraArgs) {
    super(sameDiff, i_v, shape, inPlace, dimension, extraArgs);
}