Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#mmul()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#mmul() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DerivativeTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
public static INDArray correctSoftmax(INDArray X) {
    // this is only for X a row vector
    // should return rank 2 matrix diagonal elements are pi*(1-pi)
    //rest are -pi*pj
    INDArray p = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform("softmax", X.dup()));
    INDArray pCol = p.dup().transpose();
    INDArray pipj = pCol.mmul(p);
    pipj.muli(-1);

    //so now pipj is correct except for the diagonal elements
    // which by the way is what our current softmax der gives us
    INDArray diagp = Nd4j.getExecutioner()
                    .execAndReturn(new SoftMaxDerivative(X.dup()));


    //ugly for loop to correct diag elements
    for (int i = 0; i < X.length(); i++) {
        pipj.put(i, i, diagp.getDouble(0, i));
    }

    return pipj;
}
 
Example 2
Source File: TestEigen.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSyev() {
    for(DataType dt : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
        //log.info("Datatype: {}", dt);
        Nd4j.setDefaultDataTypes(dt, dt);

        INDArray A = Nd4j.create(new float[][]{{1.96f, -6.49f, -0.47f, -7.20f, -0.65f},
                {-6.49f, 3.80f, -6.39f, 1.50f, -6.34f}, {-0.47f, -6.39f, 4.17f, -1.51f, 2.67f},
                {-7.20f, 1.50f, -1.51f, 5.70f, 1.80f}, {-0.65f, -6.34f, 2.67f, 1.80f, -7.10f}});

        INDArray B = A.dup();
        INDArray e = Eigen.symmetricGeneralizedEigenvalues(A);

        for (int i = 0; i < A.rows(); i++) {
            INDArray LHS = B.mmul(A.slice(i, 1).reshape(-1, 1));
            INDArray RHS = A.slice(i, 1).mul(e.getFloat(i));

            for (int j = 0; j < LHS.length(); j++) {
                assertEquals(LHS.getFloat(j), RHS.getFloat(j), 0.001f);
            }
        }
    }
}
 
Example 3
Source File: TestPCA.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testFactorVariance() {
    int m = 13;
    int n = 4;

    double f[] = new double[] {7, 1, 11, 11, 7, 11, 3, 1, 2, 21, 1, 11, 10, 26, 29, 56, 31, 52, 55, 71, 31, 54, 47,
                    40, 66, 68, 6, 15, 8, 8, 6, 9, 17, 22, 18, 4, 23, 9, 8, 60, 52, 20, 47, 33, 22, 6, 44, 22, 26,
                    34, 12, 12};

    INDArray A = Nd4j.create(f, new int[] {m, n}, 'f');

    INDArray A1 = A.dup('f');
    INDArray Factor1 = org.nd4j.linalg.dimensionalityreduction.PCA.pca_factor(A1, 0.95, true);
    A1 = A.subiRowVector(A.mean(0));
    INDArray Reduced1 = A1.mmul(Factor1);
    INDArray Reconstructed1 = Reduced1.mmul(Factor1.transpose());
    INDArray Diff1 = Reconstructed1.sub(A1);
    for (int i = 0; i < m * n; i++) {
        assertEquals("Reconstructed matrix is very different from the original.", 0.0, Diff1.getDouble(i), 0.1);
    }
    INDArray A2 = A.dup('f');
    INDArray Factor2 = org.nd4j.linalg.dimensionalityreduction.PCA.pca_factor(A2, 0.50, true);
    assertTrue("Variance differences should change factor sizes.", Factor1.columns() > Factor2.columns());
}
 
Example 4
Source File: Level3Test.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testGemm4() throws Exception {
    INDArray array1 = Nd4j.linspace(1, 1000, 1000).reshape(10, 100);
    INDArray array2 = Nd4j.linspace(1, 1000, 1000).reshape('f', 100, 10);

    INDArray array3 = array1.mmul(array2);

    //System.out.println("Array3: " + Arrays.toString(array3.data().asFloat()));

    assertEquals(338350f, array3.data().getFloat(0), 0.001f);
    assertEquals(843350f, array3.data().getFloat(1), 0.001f);
    assertEquals(843350f, array3.data().getFloat(10), 0.001f);
    assertEquals(2348350f, array3.data().getFloat(11), 0.001f);
    assertEquals(1348350f, array3.data().getFloat(20), 0.001f);
    assertEquals(3853350f, array3.data().getFloat(21), 0.001f);
}
 
Example 5
Source File: Level3Test.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testGemm6() {
    INDArray array1 = Nd4j.linspace(1, 1000, 1000).reshape('f', 10, 100);
    INDArray array2 = Nd4j.linspace(1, 1000, 1000).reshape('f', 100, 10);

    INDArray array3 = array1.mmul(array2);

    //System.out.println("Array3: " + Arrays.toString(array3.data().asFloat()));

    assertEquals(3338050.0f, array3.data().getFloat(0), 0.001f);
    assertEquals(3343100f, array3.data().getFloat(1), 0.001f);
    assertEquals(8298050f, array3.data().getFloat(10), 0.001f);
    assertEquals(8313100.0f, array3.data().getFloat(11), 0.001f);
    assertEquals(1.325805E7f, array3.data().getFloat(20), 5f);
    assertEquals(1.32831E7f, array3.data().getFloat(21), 5f);
}
 
Example 6
Source File: TestInvertMatrices.java    From nd4j with Apache License 2.0 6 votes vote down vote up
/**
 * Example from: <a href="https://www.wolframalpha.com/input/?i=invert+matrix+((1,2),(3,4),(5,6))">here</a>
 */
@Test
public void testLeftPseudoInvert() {
    INDArray X = Nd4j.create(new double[][]{{1, 2}, {3, 4}, {5, 6}});
    INDArray expectedLeftInverse = Nd4j.create(new double[][]{{-16, -4, 8}, {13, 4, -5}}).mul(1 / 12d);
    INDArray leftInverse = InvertMatrix.pLeftInvert(X, false);
    assertEquals(expectedLeftInverse, leftInverse);

    final INDArray identity3x3 = Nd4j.create(new double[][]{{1, 0, 0}, {0, 1, 0}, {0, 0, 1}});
    final INDArray identity2x2 = Nd4j.create(new double[][]{{1, 0}, {0, 1}});
    final double precision = 1e-5;

    // right inverse
    final INDArray rightInverseCheck = X.mmul(leftInverse);
    // right inverse must not hold since X rows are not linear independent (x_3 + x_1 = 2*x_2)
    assertFalse(rightInverseCheck.equalsWithEps(identity3x3, precision));

    // left inverse must hold since X columns are linear independent
    final INDArray leftInverseCheck = leftInverse.mmul(X);
    assertTrue(leftInverseCheck.equalsWithEps(identity2x2, precision));

    // general condition X = X * X^-1 * X
    final INDArray generalCond = X.mmul(leftInverse).mmul(X);
    assertTrue(X.equalsWithEps(generalCond, precision));
    checkMoorePenroseConditions(X, leftInverse, precision);
}
 
Example 7
Source File: Nd4jMatrix.java    From jstarcraft-ai with Apache License 2.0 5 votes vote down vote up
@Override
@Deprecated
// TODO 准备与dotProduct整合
public MathMatrix accumulateProduct(MathVector rowVector, MathVector columnVector, MathCalculator mode) {
    if (rowVector instanceof Nd4jVector && columnVector instanceof Nd4jVector) {
        Nd4jEnvironmentThread thread = EnvironmentThread.getThread(Nd4jEnvironmentThread.class);
        try (MemoryWorkspace workspace = thread.getSpace()) {
            INDArray leftArray = Nd4jVector.class.cast(rowVector).getArray();
            // TODO 此处需要想方案优化,否则存在性能问题.
            if (leftArray.isView()) {
                // 此处执行复制是由于gemm不支持视图向量.
                leftArray = leftArray.dup();
            }
            if (leftArray.rows() == 1) {
                leftArray = leftArray.transpose();
            }
            INDArray rightArray = Nd4jVector.class.cast(columnVector).getArray();
            if (rightArray.isView()) {
                // 此处执行复制是由于gemm不支持视图向量.
                rightArray = rightArray.dup();
            }
            if (rightArray.columns() == 1) {
                rightArray = rightArray.transpose();
            }
            INDArray dataArray = this.getArray();
            INDArray cacheArray = Nd4j.zeros(dataArray.shape(), dataArray.ordering());
            leftArray.mmul(rightArray, cacheArray);
            dataArray.addi(cacheArray);
            return this;
        }
    } else {
        return MathMatrix.super.accumulateProduct(rowVector, columnVector, mode);
    }
}
 
Example 8
Source File: IndexingTestsC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testIndexingWithMmul() {
        INDArray a = Nd4j.linspace(1, 9, 9, DataType.DOUBLE).reshape(3, 3);
        INDArray b = Nd4j.linspace(1, 5, 5, DataType.DOUBLE).reshape(1, -1);
//        System.out.println(b);
        INDArray view = a.get(all(), NDArrayIndex.interval(0, 1));
        INDArray c = view.mmul(b);
        INDArray assertion = a.get(all(), NDArrayIndex.interval(0, 1)).dup().mmul(b);
        assertEquals(assertion, c);
    }
 
Example 9
Source File: INDArrayUnitTest.java    From tutorials with MIT License 5 votes vote down vote up
@Test
void givenTwoMatrices_whenMultiply_thenMultiplicatedMatrix() {
    INDArray firstMatrix = Nd4j.create(
      new double[][]{
        new double[]{1d, 5d},
        new double[]{2d, 3d},
        new double[]{1d, 7d}
      }
    );

    INDArray secondMatrix = Nd4j.create(
      new double[][] {
        new double[] {1d, 2d, 3d, 7d},
        new double[] {5d, 2d, 8d, 1d}
      }
    );

    INDArray expected = Nd4j.create(
      new double[][] {
        new double[] {26d, 12d, 43d, 12d},
        new double[] {17d, 10d, 30d, 17d},
        new double[] {36d, 16d, 59d, 14d}
      }
    );

    INDArray actual = firstMatrix.mmul(secondMatrix);

    assertThat(actual).isEqualTo(expected);
}
 
Example 10
Source File: BlasTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMmuli3(){
    final INDArray activations = Nd4j.createUninitialized(new long[]{1, 3, 2}, 'f');
    final INDArray z = activations.tensorAlongDimension(0, 1, 2);

    final INDArray a = Nd4j.rand(3, 4);
    final INDArray b = Nd4j.rand(4, 2);

    INDArray ab = a.mmul(b);
    a.mmul(b, z);
    assertEquals(ab, z);
}
 
Example 11
Source File: Level3Test.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testGemm1() throws Exception {
    INDArray array1 = Nd4j.linspace(1, 100, 100).reshape(1, 100);
    INDArray array2 = Nd4j.linspace(1, 100, 100).reshape(100, 1);

    INDArray array3 = array1.mmul(array2);

    assertEquals(338350f, array3.getFloat(0), 0.001f);
}
 
Example 12
Source File: CublasTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testGemm2() throws Exception {
    INDArray array1 = Nd4j.linspace(1, 100, 100).reshape('f', 1, 100);
    INDArray array2 = Nd4j.linspace(1, 100, 100).reshape('f', 100, 1);

    INDArray array3 = array1.mmul(array2);

    assertEquals(338350f, array3.getFloat(0), 0.001f);
}
 
Example 13
Source File: Level2Test.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testGemv6() throws Exception {
    INDArray array1 = Nd4j.linspace(1, 1000, 1000).reshape('f', 10, 100);
    INDArray array2 = Nd4j.linspace(1, 100, 100).reshape(100, 1);

    INDArray array3 = Nd4j.create(10);

    array1.mmul(array2, array3);

    assertEquals(10, array3.length());
    assertEquals(3338050f, array3.getFloat(0), 0.001f);
    assertEquals(3343100f, array3.getFloat(1), 0.001f);
    assertEquals(3348150f, array3.getFloat(2), 0.001f);
    assertEquals(3353200f, array3.getFloat(3), 0.001f);
}
 
Example 14
Source File: LapackTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
void testEv(int N, char matrixOrder) {
    INDArray A = Nd4j.rand(N, N, matrixOrder);
    for (int r = 1; r < N; r++) {
        for (int c = 0; c < r; c++) {
            double v = A.getDouble(r, c);
            A.putScalar(c, r, v);
        }
    }

    INDArray Aorig = A.dup();
    INDArray V = Nd4j.create(N);

    Nd4j.getBlasWrapper().lapack().syev('V', 'U', A, V);

    INDArray VV = Nd4j.create(N, N);
    for (int i = 0; i < N; i++) {
        VV.put(i, i, V.getDouble(i));
    }

    INDArray L = Aorig.mmul(A);
    INDArray R = A.mmul(VV);

    NdIndexIterator iter = new NdIndexIterator(L.shape());
    while(iter.hasNext()){
        int[] pos = iter.next();
        assertEquals("SVD did not factorize properly", L.getDouble(pos), R.getDouble(pos), 1e-5);
    }
}
 
Example 15
Source File: Level3Test.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testGemm5() throws Exception {
    INDArray array1 = Nd4j.linspace(1, 1000, 1000).reshape('f', 10, 100);
    INDArray array2 = Nd4j.linspace(1, 1000, 1000).reshape(100, 10);

    INDArray array3 = array1.mmul(array2);

    //System.out.println("Array3: " + Arrays.toString(array3.data().asFloat()));

    //assertEquals(3.29341E7f, array3.data().getFloat(0),10f);
    assertEquals(3.29837E7f, array3.data().getFloat(1), 10f);
    assertEquals(3.3835E7f, array3.data().getFloat(99), 10f);
}
 
Example 16
Source File: NDArrayTestsFortran.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testRowVectorGemm() {
    INDArray linspace = Nd4j.linspace(1, 4, 4);
    INDArray other = Nd4j.linspace(1, 16, 16).reshape(4, 4);
    INDArray result = linspace.mmul(other);
    INDArray assertion = Nd4j.create(new double[] {30., 70., 110., 150.});
    assertEquals(assertion, result);
}
 
Example 17
Source File: EndlessTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMmulForever(){
    INDArray first = Nd4j.zeros(10,10);
    INDArray second = Nd4j.zeros(10,10);

    for (int i = 0; i < RUN_LIMIT; i++ ) {
        first.mmul(second);
    }
}
 
Example 18
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMmulGradients(){
    int[] aShape = new int[]{2,3};
    int[] bShape = new int[]{3,4};
    List<String> failed = new ArrayList<>();

    for( char aOrder : new char[]{'c', 'f'}) {
        for (char bOrder : new char[]{'c', 'f'}) {
            for (boolean transposeA : new boolean[]{false, true}) {
                for (boolean transposeB : new boolean[]{false, true}) {
                    for (boolean transposeResult : new boolean[]{false, true}) {    //https://github.com/deeplearning4j/deeplearning4j/issues/5648
                        Nd4j.getRandom().setSeed(12345);

                        INDArray aArr = Nd4j.rand(DataType.DOUBLE, t(transposeA, aShape)).dup(aOrder);
                        INDArray bArr = Nd4j.rand(DataType.DOUBLE, t(transposeB, bShape)).dup(bOrder);

                        SameDiff sd = SameDiff.create();
                        SDVariable a = sd.var("a", aArr);
                        SDVariable b = sd.var("b", bArr);

                        SDVariable mmul = sd.mmul(a, b, transposeA, transposeB, transposeResult);

                        INDArray exp = (transposeA ? aArr.transpose() : aArr);
                        exp = exp.mmul(transposeB ? bArr.transpose() : bArr);
                        exp = (transposeResult ? exp.transpose() : exp);

                        SDVariable loss = mmul.std(true);

                        String name = aOrder + "," + bOrder + ",tA=" + transposeA + ",tB=" + transposeB +
                                ",tRes=" + transposeResult;
                        TestCase tc = new TestCase(sd).testName(name)
                                .expected(mmul, exp);

                        String err = OpValidation.validate(tc, true);
                        if(err != null)
                            failed.add(err);
                    }
                }
            }
        }
    }

    assertEquals(failed.toString(), 0, failed.size());
}
 
Example 19
Source File: TestSimpleRnn.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSimpleRnn(){
    Nd4j.getRandom().setSeed(12345);

    int m = 3;
    int nIn = 5;
    int layerSize = 6;
    int tsLength = 7;
    INDArray in;
    if (rnnDataFormat == RNNFormat.NCW){
        in = Nd4j.rand(DataType.FLOAT, m, nIn, tsLength);
    }
    else{
        in = Nd4j.rand(DataType.FLOAT, m, tsLength, nIn);
    }


    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .updater(new NoOp())
            .weightInit(WeightInit.XAVIER)
            .activation(Activation.TANH)
            .list()
            .layer(new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).dataFormat(rnnDataFormat).build())
            .build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    INDArray out = net.output(in);

    INDArray w = net.getParam("0_W");
    INDArray rw = net.getParam("0_RW");
    INDArray b = net.getParam("0_b");

    INDArray outLast = null;
    for( int i=0; i<tsLength; i++ ){
        INDArray inCurrent;
        if (rnnDataFormat == RNNFormat.NCW){
            inCurrent = in.get(all(), all(), point(i));
        }
        else{
            inCurrent = in.get(all(), point(i), all());
        }

        INDArray outExpCurrent = inCurrent.mmul(w);
        if(outLast != null){
            outExpCurrent.addi(outLast.mmul(rw));
        }

        outExpCurrent.addiRowVector(b);

        Transforms.tanh(outExpCurrent, false);

        INDArray outActCurrent;
        if (rnnDataFormat == RNNFormat.NCW){
            outActCurrent = out.get(all(), all(), point(i));
        }
        else{
            outActCurrent = out.get(all(), point(i), all());
        }
        assertEquals(String.valueOf(i), outExpCurrent, outActCurrent);

        outLast = outExpCurrent;
    }


    TestUtils.testModelSerialization(net);
}
 
Example 20
Source File: NDArrayTestsFortran.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMmulF() {

    DataBuffer data = Nd4j.linspace(1, 10, 10).data();
    INDArray n = Nd4j.create(data, new long[] {1, 10});
    INDArray transposed = n.transpose();
    assertEquals(true, n.isRowVector());
    assertEquals(true, transposed.isColumnVector());


    INDArray innerProduct = n.mmul(transposed);

    INDArray scalar = Nd4j.scalar(385);
    assertEquals(getFailureMessage(), scalar, innerProduct);


}