Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#rdiv()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#rdiv() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SameDiffTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void validateProdDiff() {
    Nd4j.getRandom().setSeed(12345);

    INDArray arr = Nd4j.rand(3, 4);

    SameDiff sd = SameDiff.create();
    SDVariable v = sd.var("in", arr);
    SDVariable prod = sd.prod("prod", v);

    double p = arr.prodNumber().doubleValue();
    INDArray out = sd.execAndEndResult();
    assertEquals(out, arr.prod(Integer.MAX_VALUE));

    sd.execBackwards();
    INDArray dLdIn = sd.grad("in").getArr();

    //If L = prod(in)
    //then dL/dIn = prod(in) / in       i.e., product of input *excluding* in_i as (d/dx(xyzabc) = yzabc

    INDArray exp = arr.rdiv(p);
    assertEquals(exp, dLdIn);
}
 
Example 2
Source File: SameDiffTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void validateProdDiff() {
    Nd4j.getRandom().setSeed(12345);

    INDArray arr = Nd4j.rand(3, 4);

    SameDiff sd = SameDiff.create();
    SDVariable v = sd.var("in", arr);
    SDVariable prod = sd.prod("prod", v);

    double p = arr.prodNumber().doubleValue();
    INDArray out = prod.eval();
    assertEquals(out, arr.prod(Integer.MAX_VALUE));

    Map<String,INDArray> g = sd.calculateGradients(Collections.emptyMap(), sd.getVariables().keySet());
    INDArray dLdIn = sd.grad("in").getArr();

    //If L = prod(in)
    //then dL/dIn = prod(in) / in       i.e., product of input *excluding* in_i as (d/dx(xyzabc) = yzabc

    INDArray exp = arr.rdiv(p);
    assertEquals(exp, dLdIn);
}
 
Example 3
Source File: MixedDataTypesTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testSimple(){
        Nd4j.create(1);
        for(DataType dt : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.INT, DataType.LONG}) {
//            System.out.println("----- " + dt + " -----");
            INDArray arr = Nd4j.ones(dt,1, 5);
//            System.out.println("Ones: " + arr);
            arr.assign(1.0);
//            System.out.println("assign(1.0): " + arr);
//            System.out.println("DIV: " + arr.div(8));
//            System.out.println("MUL: " + arr.mul(8));
//            System.out.println("SUB: " + arr.sub(8));
//            System.out.println("ADD: " + arr.add(8));
//            System.out.println("RDIV: " + arr.rdiv(8));
//            System.out.println("RSUB: " + arr.rsub(8));
            arr.div(8);
            arr.mul(8);
            arr.sub(8);
            arr.add(8);
            arr.rdiv(8);
            arr.rsub(8);
        }
    }
 
Example 4
Source File: NDArrayTestsFortran.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testRdivScalar() {
    INDArray div = Nd4j.valueArrayOf(new long[] {1, 4}, 4);
    INDArray rdiv = div.rdiv(1);
    INDArray answer = Nd4j.valueArrayOf(new long[] {1, 4}, 0.25);
    assertEquals(rdiv, answer);
}
 
Example 5
Source File: LossMSLE.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if (labels.size(1) != preOutput.size(1)) {
        throw new IllegalArgumentException(
                        "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
                                        + " number of outputs (nOut = " + preOutput.size(1) + ") ");

    }
    //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
    INDArray output = activationFn.getActivation(preOutput.dup(), true);

    INDArray p1 = output.add(1.0);
    INDArray dlda = p1.rdiv(2.0 / labels.size(1));
    INDArray logRatio = Transforms.log(p1.divi(labels.add(1.0)), false);
    dlda.muli(logRatio);

    if (weights != null) {
        dlda.muliRowVector(weights);
    }

    if (mask != null && LossUtil.isPerOutputMasking(dlda, mask)) {
        //For *most* activation functions: we don't actually need to mask dL/da in addition to masking dL/dz later
        //but: some, like softmax, require both (due to dL/dz_i being a function of dL/da_j, for i != j)
        //We could add a special case for softmax (activationFn instanceof ActivationSoftmax) but that would be
        // error prone - though buy us a tiny bit of performance
        LossUtil.applyMask(dlda, mask);
    }

    //dL/dz
    INDArray gradients = activationFn.backprop(preOutput, dlda).getFirst(); //TODO activation functions with weights

    if (mask != null) {
        LossUtil.applyMask(gradients, mask);
    }

    return gradients;
}
 
Example 6
Source File: NDArrayTestsFortran.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testRdivScalar() {
    INDArray div = Nd4j.valueArrayOf(new long[] {1, 4}, 4.0);
    INDArray rdiv = div.rdiv(1);
    INDArray answer = Nd4j.valueArrayOf(new long[] {1, 4}, 0.25);
    assertEquals(rdiv, answer);
}
 
Example 7
Source File: InvertMatrix.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Inverts a matrix
 * @param arr the array to invert
 * @param inPlace Whether to store the result in {@code arr}
 * @return the inverted matrix
 */
public static INDArray invert(INDArray arr, boolean inPlace) {
    if(arr.rank() == 2 && arr.length() == 1){
        //[1,1] edge case. Matrix inversion: [x] * [1/x] = [1]
        if(inPlace){
            return arr.rdivi(1.0);
        } else {
            return arr.rdiv(1.0);
        }
    }
    if (!arr.isSquare()) {
        throw new IllegalArgumentException("invalid array: must be square matrix");
    }

    //FIX ME: Please
   /* int[] IPIV = new int[arr.length() + 1];
    int LWORK = arr.length() * arr.length();
    INDArray WORK = Nd4j.create(new double[LWORK]);
    INDArray inverse = inPlace ? arr : arr.dup();
    Nd4j.getBlasWrapper().lapack().getrf(arr);
    Nd4j.getBlasWrapper().lapack().getri(arr.size(0),inverse,arr.size(0),IPIV,WORK,LWORK,0);*/

    RealMatrix rm = CheckUtil.convertToApacheMatrix(arr);
    RealMatrix rmInverse = new LUDecomposition(rm).getSolver().getInverse();


    INDArray inverse = CheckUtil.convertFromApacheMatrix(rmInverse, arr.dataType());
    if (inPlace)
        arr.assign(inverse);
    return inverse;

}
 
Example 8
Source File: LossMSLE.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if(!labels.equalShapes(preOutput)){
        Preconditions.throwEx("Labels and preOutput must have equal shapes: got shapes %s vs %s", labels.shape(), preOutput.shape());
    }
    labels = labels.castTo(preOutput.dataType());   //No-op if already correct dtype
    INDArray output = activationFn.getActivation(preOutput.dup(), true);

    INDArray p1 = output.add(1.0);
    INDArray dlda = p1.rdiv(2.0 / labels.size(1));
    INDArray logRatio = Transforms.log(p1.divi(labels.add(1.0)), false);
    dlda.muli(logRatio);

    if (weights != null) {
        dlda.muliRowVector(weights.castTo(dlda.dataType()));
    }

    if (mask != null && LossUtil.isPerOutputMasking(dlda, mask)) {
        //For *most* activation functions: we don't actually need to mask dL/da in addition to masking dL/dz later
        //but: some, like softmax, require both (due to dL/dz_i being a function of dL/da_j, for i != j)
        //We could add a special case for softmax (activationFn instanceof ActivationSoftmax) but that would be
        // error prone - though buy us a tiny bit of performance
        LossUtil.applyMask(dlda, mask);
    }

    //dL/dz
    INDArray gradients = activationFn.backprop(preOutput, dlda).getFirst(); //TODO activation functions with weights

    if (mask != null) {
        LossUtil.applyMask(gradients, mask);
    }

    return gradients;
}
 
Example 9
Source File: L2Vertex.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Pair<Gradient, INDArray[]> doBackward(boolean tbptt, LayerWorkspaceMgr workspaceMgr) {
    if (!canDoBackward())
        throw new IllegalStateException("Cannot do backward pass: error not set");

    INDArray a = inputs[0];
    INDArray b = inputs[1];
    INDArray out = doForward(tbptt, workspaceMgr);
    Transforms.max(out, eps, false); // in case of 0

    INDArray dLdlambda = epsilon; //dL/dlambda aka 'epsilon' - from layer above

    INDArray sNegHalf = out.rdiv(1.0); //s^(-1/2) = 1.0 / s^(1/2) = 1.0 / out

    INDArray diff;
    try(MemoryWorkspace ws = workspaceMgr.notifyScopeBorrowed(ArrayType.ACTIVATION_GRAD)){
        diff = a.sub(b);
    }

    INDArray first = dLdlambda.mul(sNegHalf); //Column vector for all cases

    INDArray dLda;
    INDArray dLdb;
    if (a.rank() == 2) {
        //2d case (MLPs etc)
        dLda = diff.muliColumnVector(first);
        try(MemoryWorkspace ws = workspaceMgr.notifyScopeBorrowed(ArrayType.ACTIVATION_GRAD)) {
            dLdb = dLda.neg();
        }
    } else {
        //RNN and CNN case - Broadcast along dimension 0
        dLda = Nd4j.getExecutioner().exec(new BroadcastMulOp(diff, first, diff, 0));
        try(MemoryWorkspace ws = workspaceMgr.notifyScopeBorrowed(ArrayType.ACTIVATION_GRAD)) {
            dLdb = dLda.neg();
        }
    }

    return new Pair<>(null, new INDArray[] {dLda, dLdb});
}
 
Example 10
Source File: TestGraphNodes.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testL2Node() {
    Nd4j.getRandom().setSeed(12345);
    GraphVertex l2 = new L2Vertex(null, "", -1, 1e-8, Nd4j.dataType());

    INDArray in1 = Nd4j.rand(5, 2);
    INDArray in2 = Nd4j.rand(5, 2);

    l2.setInputs(in1, in2);
    INDArray out = l2.doForward(false, LayerWorkspaceMgr.noWorkspaces());

    INDArray expOut = Nd4j.create(5, 1);
    for (int i = 0; i < 5; i++) {
        double d2 = 0.0;
        for (int j = 0; j < in1.size(1); j++) {
            double temp = (in1.getDouble(i, j) - in2.getDouble(i, j));
            d2 += temp * temp;
        }
        d2 = Math.sqrt(d2);
        expOut.putScalar(i, 0, d2);
    }

    assertEquals(expOut, out);



    INDArray epsilon = Nd4j.rand(5, 1); //dL/dlambda
    INDArray diff = in1.sub(in2);
    //Out == sqrt(s) = s^1/2. Therefore: s^(-1/2) = 1/out
    INDArray sNegHalf = out.rdiv(1.0);

    INDArray dLda = diff.mulColumnVector(epsilon.mul(sNegHalf));
    INDArray dLdb = diff.mulColumnVector(epsilon.mul(sNegHalf)).neg();



    l2.setEpsilon(epsilon);
    Pair<Gradient, INDArray[]> p = l2.doBackward(false, LayerWorkspaceMgr.noWorkspaces());
    assertEquals(dLda, p.getSecond()[0]);
    assertEquals(dLdb, p.getSecond()[1]);
}