Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#mmuli()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#mmuli() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LapackTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testQRRect() {
    INDArray A = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12});
    A = A.reshape('f', 4, 3);
    INDArray O = Nd4j.create(A.shape());
    Nd4j.copy(A, O);

    INDArray R = Nd4j.create(A.columns(), A.columns());
    Nd4j.getBlasWrapper().lapack().geqrf(A, R);

    A.mmuli(R);
    O.subi(A);
    DataBuffer db = O.data();
    for (int i = 0; i < db.length(); i++) {
        assertEquals(0, db.getFloat(i), 1e-5);
    }
}
 
Example 2
Source File: MMulTranspose.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
 * Execute the matrix multiplication: A x B
 * Note that if a or b have transposeA/B == true, then this is done internally.
 * Also, if transposeResult == true, then this is also done internally - i.e., the result array - if present -
 * should not be transposed beforehand.
 * @param a      A array
 * @param b      B array
 * @param result Result array (pre resultArrayTranspose if required). May be null.
 * @return Result array
 */
public INDArray exec(INDArray a, INDArray b, INDArray result) {
    a = transposeIfReq(transposeA, a);
    b = transposeIfReq(transposeB, b);
    if(result == null) {
        INDArray ret = a.mmul(b);
        return transposeIfReq(transposeResult, ret);
    } else {

        if(!transposeResult){
            return a.mmuli(b, result);
        } else {
            return a.mmuli(b, result).transpose();
        }
    }
}
 
Example 3
Source File: LapackTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testCholeskyL() {
    INDArray A = Nd4j.create(new double[] {2, -1, 1, -1, 2, -1, 1, -1, 2,});
    A = A.reshape('c', 3, 3);
    INDArray O = Nd4j.create(A.dataType(), A.shape());
    Nd4j.copy(A, O);

    Nd4j.getBlasWrapper().lapack().potrf(A, true);

    A.mmuli(A.transpose());
    O.subi(A);
    DataBuffer db = O.data();
    for (int i = 0; i < db.length(); i++) {
        assertEquals(0, db.getFloat(i), 1e-5);
    }
}
 
Example 4
Source File: LapackTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testQRRect() {
    INDArray A = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12});
    A = A.reshape('f', 4, 3);
    INDArray O = Nd4j.create(A.dataType(), A.shape());
    Nd4j.copy(A, O);

    INDArray R = Nd4j.create(A.dataType(), A.columns(), A.columns());
    Nd4j.getBlasWrapper().lapack().geqrf(A, R);

    A.mmuli(R);
    O.subi(A);
    DataBuffer db = O.data();
    for (int i = 0; i < db.length(); i++) {
        assertEquals(0, db.getFloat(i), 1e-5);
    }
}
 
Example 5
Source File: LapackTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testQRSquare() {
    INDArray A = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9});
    A = A.reshape('c', 3, 3);
    INDArray O = Nd4j.create(A.dataType(), A.shape());
    Nd4j.copy(A, O);
    INDArray R = Nd4j.create(A.dataType(), A.columns(), A.columns());

    Nd4j.getBlasWrapper().lapack().geqrf(A, R);

    A.mmuli(R);
    O.subi(A);
    DataBuffer db = O.data();
    for (int i = 0; i < db.length(); i++) {
        assertEquals(0, db.getFloat(i), 1e-5);
    }
}
 
Example 6
Source File: LapackTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testCholeskyL() {
    INDArray A = Nd4j.create(new double[] {2, -1, 1, -1, 2, -1, 1, -1, 2,});
    A = A.reshape('c', 3, 3);
    INDArray O = Nd4j.create(A.shape());
    Nd4j.copy(A, O);

    Nd4j.getBlasWrapper().lapack().potrf(A, true);

    A.mmuli(A.transpose());
    O.subi(A);
    DataBuffer db = O.data();
    for (int i = 0; i < db.length(); i++) {
        assertEquals(0, db.getFloat(i), 1e-5);
    }
}
 
Example 7
Source File: LapackTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testQRSquare() {
    INDArray A = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9});
    A = A.reshape('c', 3, 3);
    INDArray O = Nd4j.create(A.shape());
    Nd4j.copy(A, O);
    INDArray R = Nd4j.create(A.columns(), A.columns());

    Nd4j.getBlasWrapper().lapack().geqrf(A, R);

    A.mmuli(R);
    O.subi(A);
    DataBuffer db = O.data();
    for (int i = 0; i < db.length(); i++) {
        assertEquals(0, db.getFloat(i), 1e-5);
    }
}
 
Example 8
Source File: MmulBug.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void simpleTest() throws Exception {
    INDArray m1 = Nd4j.create(new double[][] {{1.0}, {2.0}, {3.0}, {4.0}});

    m1 = m1.reshape(2, 2);

    INDArray m2 = Nd4j.create(new double[][] {{1.0, 2.0, 3.0, 4.0},});
    m2 = m2.reshape(2, 2);
    m2.setOrder('f');

    //mmul gives the correct result
    INDArray correctResult;
    correctResult = m1.mmul(m2);
    System.out.println("================");
    System.out.println(m1);
    System.out.println(m2);
    System.out.println(correctResult);
    System.out.println("================");
    INDArray newResult = Nd4j.zeros(correctResult.shape(), 'c');
    m1.mmul(m2, newResult);
    assertEquals(correctResult, newResult);

    //But not so mmuli (which is somewhat mixed)
    INDArray target = Nd4j.linspace(1, 4, 4).reshape(2, 2);
    target = m1.mmuli(m2, m1);
    assertEquals(true, target.equals(correctResult));
    assertEquals(true, m1.equals(correctResult));


}
 
Example 9
Source File: MmulBug.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void simpleTest() {
    INDArray m1 = Nd4j.create(new double[][] {{1.0}, {2.0}, {3.0}, {4.0}});

    m1 = m1.reshape(2, 2);

    INDArray m2 = Nd4j.create(new double[][] {{1.0, 2.0, 3.0, 4.0},});
    m2 = m2.reshape(2, 2);
    m2.setOrder('f');

    //mmul gives the correct result
    INDArray correctResult;
    correctResult = m1.mmul(m2);
    System.out.println("================");
    System.out.println(m1);
    System.out.println(m2);
    System.out.println(correctResult);
    System.out.println("================");
    INDArray newResult = Nd4j.create(DataType.DOUBLE, correctResult.shape(), 'c');
    m1.mmul(m2, newResult);
    assertEquals(correctResult, newResult);

    //But not so mmuli (which is somewhat mixed)
    INDArray target = Nd4j.linspace(1, 4, 4).reshape(2, 2);
    target = m1.mmuli(m2, m1);
    assertEquals(true, target.equals(correctResult));
    assertEquals(true, m1.equals(correctResult));
}
 
Example 10
Source File: BlasTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void simpleTest() {
    INDArray m1 = Nd4j.create(new double[][]{{1.0}, {2.0}, {3.0}, {4.0}});

    m1 = m1.reshape(2, 2);

    INDArray m2 = Nd4j.create(new double[][]{{1.0, 2.0, 3.0, 4.0},});
    m2 = m2.reshape(2, 2);
    m2.setOrder('f');

    //mmul gives the correct result
    INDArray correctResult;
    correctResult = m1.mmul(m2);
    System.out.println("================");
    System.out.println(m1);
    System.out.println(m2);
    System.out.println(correctResult);
    System.out.println("================");
    INDArray newResult = Nd4j.create(DataType.DOUBLE, correctResult.shape(), 'c');
    m1.mmul(m2, newResult);
    assertEquals(correctResult, newResult);

    //But not so mmuli (which is somewhat mixed)
    INDArray target = Nd4j.linspace(1, 4, 4).reshape(2, 2);
    target = m1.mmuli(m2, m1);
    assertEquals(true, target.equals(correctResult));
    assertEquals(true, m1.equals(correctResult));
}
 
Example 11
Source File: Transforms.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Raises a square matrix to a power <i>n</i>, which can be positive, negative, or zero.
 * The behavior is similar to the numpy matrix_power() function.  The algorithm uses
 * repeated squarings to minimize the number of mmul() operations needed
 * <p>If <i>n</i> is zero, the identity matrix is returned.</p>
 * <p>If <i>n</i> is negative, the matrix is inverted and raised to the abs(n) power.</p>
 *
 * @param in  A square matrix to raise to an integer power, which will be changed if dup is false.
 * @param n   The integer power to raise the matrix to.
 * @param dup If dup is true, the original input is unchanged.
 * @return The result of raising <i>in</i> to the <i>n</i>th power.
 */
public static INDArray mpow(INDArray in, int n, boolean dup) {
    Preconditions.checkState(in.isMatrix() && in.isSquare(), "Input must be a square matrix: got input with shape %s", in.shape());
    if (n == 0) {
        if (dup)
            return Nd4j.eye(in.rows());
        else
            return in.assign(Nd4j.eye(in.rows()));
    }
    INDArray temp;
    if (n < 0) {
        temp = InvertMatrix.invert(in, !dup);
        n = -n;
    } else
        temp = in.dup();
    INDArray result = temp.dup();
    if (n < 4) {
        for (int i = 1; i < n; i++) {
            result.mmuli(temp);
        }
        if (dup)
            return result;
        else
            return in.assign(result);
    } else {
        // lets try to optimize by squaring itself a bunch of times
        int squares = (int) (Math.log(n) / Math.log(2.0));
        for (int i = 0; i < squares; i++)
            result = result.mmul(result);
        int diff = (int) Math.round(n - Math.pow(2.0, squares));
        for (int i = 0; i < diff; i++)
            result.mmuli(temp);
        if (dup)
            return result;
        else
            return in.assign(result);
    }
}
 
Example 12
Source File: OCNNOutputLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private INDArray doOutput(boolean training,LayerWorkspaceMgr workspaceMgr) {
    assertInputSet(false);
    INDArray w = getParamWithNoise(W_KEY,training,workspaceMgr);
    INDArray v = getParamWithNoise(V_KEY,training,workspaceMgr);
    applyDropOutIfNecessary(training, workspaceMgr);

    INDArray first = Nd4j.createUninitialized(input.dataType(), input.size(0), v.size(1));
    input.mmuli(v, first);
    INDArray act2d = layerConf().getActivationFn().getActivation(first, training);
    INDArray output = workspaceMgr.createUninitialized(ArrayType.ACTIVATIONS, input.dataType(), input.size(0));
    act2d.mmuli(w.reshape(w.length()), output);
    this.labels = output;
    return output;
}
 
Example 13
Source File: VariationalAutoencoder.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon, LayerWorkspaceMgr workspaceMgr) {
    assertInputSet(true);
    if (!zeroedPretrainParamGradients) {
        for (Map.Entry<String, INDArray> entry : gradientViews.entrySet()) {
            if (isPretrainParam(entry.getKey())) {
                entry.getValue().assign(0);
            }
        }
        zeroedPretrainParamGradients = true;
    }

    INDArray input = this.input.castTo(dataType);

    Gradient gradient = new DefaultGradient();

    VAEFwdHelper fwd = doForward(true, true, workspaceMgr);
    INDArray currentDelta = pzxActivationFn.backprop(fwd.pzxMeanPreOut, epsilon).getFirst();

    //Finally, calculate mean value:
    INDArray meanW = getParamWithNoise(VariationalAutoencoderParamInitializer.PZX_MEAN_W, true, workspaceMgr);
    INDArray dLdMeanW = gradientViews.get(VariationalAutoencoderParamInitializer.PZX_MEAN_W); //f order
    INDArray lastEncoderActivation = fwd.encoderActivations[fwd.encoderActivations.length - 1];
    Nd4j.gemm(lastEncoderActivation, currentDelta, dLdMeanW, true, false, 1.0, 0.0);
    INDArray dLdMeanB = gradientViews.get(VariationalAutoencoderParamInitializer.PZX_MEAN_B);
    currentDelta.sum(dLdMeanB, 0); //dLdMeanB is initialized/zeroed first in sum op

    gradient.gradientForVariable().put(VariationalAutoencoderParamInitializer.PZX_MEAN_W, dLdMeanW);
    gradient.gradientForVariable().put(VariationalAutoencoderParamInitializer.PZX_MEAN_B, dLdMeanB);

    epsilon = meanW.mmul(currentDelta.transpose()).transpose();

    int nEncoderLayers = encoderLayerSizes.length;

    IActivation afn = layerConf().getActivationFn();
    for (int i = nEncoderLayers - 1; i >= 0; i--) {
        String wKey = "e" + i + WEIGHT_KEY_SUFFIX;
        String bKey = "e" + i + BIAS_KEY_SUFFIX;

        INDArray weights = getParamWithNoise(wKey, true, workspaceMgr);

        INDArray dLdW = gradientViews.get(wKey);
        INDArray dLdB = gradientViews.get(bKey);

        INDArray preOut = fwd.encoderPreOuts[i];

        currentDelta = afn.backprop(preOut, epsilon).getFirst();

        INDArray actInput;
        if (i == 0) {
            actInput = input;
        } else {
            actInput = fwd.encoderActivations[i - 1];
        }
        Nd4j.gemm(actInput, currentDelta, dLdW, true, false, 1.0, 0.0);
        currentDelta.sum(dLdB, 0); //dLdB is initialized/zeroed first in sum op

        gradient.gradientForVariable().put(wKey, dLdW);
        gradient.gradientForVariable().put(bKey, dLdB);

        if(i == 0) {
            epsilon = workspaceMgr.createUninitialized(ArrayType.ACTIVATION_GRAD, currentDelta.dataType(), new long[]{weights.size(0), currentDelta.size(0)}, 'f');
            weights.mmuli(currentDelta.transpose(), epsilon);
            epsilon = epsilon.transpose();
        } else {
            epsilon = weights.mmul(currentDelta.transpose()).transpose();
        }
    }

    return new Pair<>(gradient, epsilon);
}
 
Example 14
Source File: RandomProjection.java    From nd4j with Apache License 2.0 2 votes vote down vote up
/**
 * Create an in-place random projection by using in-place matrix product with a random matrix
 * @param data
 * @return the projected matrix
 */
public INDArray projecti(INDArray data){
    long[] tShape = targetShape(data.shape(), eps, components, autoMode);
    return data.mmuli(getProjectionMatrix(tShape, this.rng));
}
 
Example 15
Source File: RandomProjection.java    From nd4j with Apache License 2.0 2 votes vote down vote up
/**
 * Create an in-place random projection by using in-place matrix product with a random matrix
 *
 * @param data
 * @param result a placeholder result
 * @return
 */
public INDArray projecti(INDArray data, INDArray result){
    long[] tShape = targetShape(data.shape(), eps, components, autoMode);
    return data.mmuli(getProjectionMatrix(tShape, this.rng), result);
}
 
Example 16
Source File: RandomProjection.java    From nd4j with Apache License 2.0 2 votes vote down vote up
/**
 * Create a copy random projection by using matrix product with a random matrix
 *
 * @param data
 * @param result a placeholder result
 * @return
 */
public INDArray project(INDArray data, INDArray result){
    long[] tShape = targetShape(data.shape(), eps, components, autoMode);
    return data.mmuli(getProjectionMatrix(tShape, this.rng), result);
}
 
Example 17
Source File: RandomProjection.java    From deeplearning4j with Apache License 2.0 2 votes vote down vote up
/**
 * Create a copy random projection by using matrix product with a random matrix
 *
 * @param data
 * @param result a placeholder result
 * @return
 */
public INDArray project(INDArray data, INDArray result){
    long[] tShape = targetShape(data.shape(), eps, components, autoMode);
    return data.mmuli(getProjectionMatrix(tShape, this.rng), result);
}
 
Example 18
Source File: RandomProjection.java    From deeplearning4j with Apache License 2.0 2 votes vote down vote up
/**
 * Create an in-place random projection by using in-place matrix product with a random matrix
 * @param data
 * @return the projected matrix
 */
public INDArray projecti(INDArray data){
    long[] tShape = targetShape(data.shape(), eps, components, autoMode);
    return data.mmuli(getProjectionMatrix(tShape, this.rng));
}
 
Example 19
Source File: RandomProjection.java    From deeplearning4j with Apache License 2.0 2 votes vote down vote up
/**
 * Create an in-place random projection by using in-place matrix product with a random matrix
 *
 * @param data
 * @param result a placeholder result
 * @return
 */
public INDArray projecti(INDArray data, INDArray result){
    long[] tShape = targetShape(data.shape(), eps, components, autoMode);
    return data.mmuli(getProjectionMatrix(tShape, this.rng), result);
}