Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#getRow()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#getRow() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ShapeTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testToOffsetZero() {
    INDArray matrix = Nd4j.rand(3, 5);
    INDArray rowOne = matrix.getRow(1);
    INDArray row1Copy = Shape.toOffsetZero(rowOne);
    assertEquals(rowOne, row1Copy);
    INDArray rows = matrix.getRows(1, 2);
    INDArray rowsOffsetZero = Shape.toOffsetZero(rows);
    assertEquals(rows, rowsOffsetZero);

    INDArray tensor = Nd4j.rand(new int[] {3, 3, 3});
    INDArray getTensor = tensor.slice(1).slice(1);
    INDArray getTensorZero = Shape.toOffsetZero(getTensor);
    assertEquals(getTensor, getTensorZero);



}
 
Example 2
Source File: ShapeTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testToOffsetZero() {
    INDArray matrix = Nd4j.rand(3, 5);
    INDArray rowOne = matrix.getRow(1);
    INDArray row1Copy = Shape.toOffsetZero(rowOne);
    assertEquals(rowOne, row1Copy);
    INDArray rows = matrix.getRows(1, 2);
    INDArray rowsOffsetZero = Shape.toOffsetZero(rows);
    assertEquals(rows, rowsOffsetZero);

    INDArray tensor = Nd4j.rand(new int[] {3, 3, 3});
    INDArray getTensor = tensor.slice(1).slice(1);
    INDArray getTensorZero = Shape.toOffsetZero(getTensor);
    assertEquals(getTensor, getTensorZero);



}
 
Example 3
Source File: CopyTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDup() {

    for (int x = 0; x < 100; x++) {
        INDArray orig = Nd4j.linspace(1, 4, 4);
        INDArray dup = orig.dup();
        assertEquals(orig, dup);

        INDArray matrix = Nd4j.create(new float[] {1, 2, 3, 4}, new int[] {2, 2});
        INDArray dup2 = matrix.dup();
        assertEquals(matrix, dup2);

        INDArray row1 = matrix.getRow(1);
        INDArray dupRow = row1.dup();
        assertEquals(row1, dupRow);


        INDArray columnSorted = Nd4j.create(new float[] {2, 1, 4, 3}, new int[] {2, 2});
        INDArray dup3 = columnSorted.dup();
        assertEquals(columnSorted, dup3);
    }
}
 
Example 4
Source File: DrawMnist.java    From DataVec with Apache License 2.0 6 votes vote down vote up
public static void drawMnist(DataSet mnist, INDArray reconstruct) throws InterruptedException {
    for (int j = 0; j < mnist.numExamples(); j++) {
        INDArray draw1 = mnist.get(j).getFeatureMatrix().mul(255);
        INDArray reconstructed2 = reconstruct.getRow(j);
        INDArray draw2 = Nd4j.getDistributions().createBinomial(1, reconstructed2).sample(reconstructed2.shape())
                        .mul(255);

        DrawReconstruction d = new DrawReconstruction(draw1);
        d.title = "REAL";
        d.draw();
        DrawReconstruction d2 = new DrawReconstruction(draw2, 1000, 1000);
        d2.title = "TEST";

        d2.draw();
        Thread.sleep(1000);
        d.frame.dispose();
        d2.frame.dispose();

    }
}
 
Example 5
Source File: ShapeTestC.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testToOffsetZero() {
    INDArray matrix = Nd4j.rand(3, 5);
    INDArray rowOne = matrix.getRow(1);
    INDArray row1Copy = Shape.toOffsetZero(rowOne);
    assertEquals(rowOne, row1Copy);
    INDArray rows = matrix.getRows(1, 2);
    INDArray rowsOffsetZero = Shape.toOffsetZero(rows);
    assertEquals(rows, rowsOffsetZero);

    INDArray tensor = Nd4j.rand(new int[] {3, 3, 3});
    INDArray getTensor = tensor.slice(1).slice(1);
    INDArray getTensorZero = Shape.toOffsetZero(getTensor);
    assertEquals(getTensor, getTensorZero);


}
 
Example 6
Source File: ShufflesTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public boolean compareRow(INDArray newData) {
            float[] newMap = measureState(newData);

            if (newMap.length != map.length) {
                System.out.println("Different map lengths");
                return false;
            }

            if (Arrays.equals(map, newMap)) {
//                System.out.println("Maps are equal");
                return false;
            }

            for (int x = 0; x < newData.rows(); x++) {
                INDArray row = newData.getRow(x);
                for (int y = 0; y < row.length(); y++) {
                    if (Math.abs(row.getFloat(y) - newMap[x]) > Nd4j.EPS_THRESHOLD) {
                        System.out.print("Different data in a row");
                        return false;
                    }
                }
            }

            return true;
        }
 
Example 7
Source File: INDArrayPropertyTranslator.java    From ml-models with Apache License 2.0 5 votes vote down vote up
@Override
public Value toProperty(int propertyId, INDArray data, long nodeId) {

    INDArray row = data.getRow((int) nodeId);

    double[] rowAsDouble = new double[row.size(1)];
    for (int columnIndex = 0; columnIndex < row.size(1); columnIndex++) {
        rowAsDouble[columnIndex] = row.getDouble(columnIndex);
    }

    return new DoubleArray(rowAsDouble);
}
 
Example 8
Source File: SparseCOOLevel1Test.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void shouldComputeDot() {
    INDArray sparseVec = Nd4j.createSparseCOO(data, indexes, shape);
    //INDArray vec = Nd4j.create( new double[] {1 ,2, 3, 4});
    INDArray matrix = Nd4j.linspace(1, 4, 4).reshape(1, 4);
    INDArray vec = matrix.getRow(0);
    assertEquals(21, Nd4j.getBlasWrapper().dot(sparseVec, vec), 1e-1);
}
 
Example 9
Source File: HierarchicSoftmaxTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testHSGradient1() throws Exception {
    INDArray syn0 = Nd4j.ones(10, 10).assign(0.01f);
    INDArray syn1 = Nd4j.ones(10, 10).assign(0.02f);
    INDArray expTable = Nd4j.create(10000).assign(0.5f);
    INDArray neu1e = Nd4j.create(10);

    INDArray expSyn0 = Nd4j.create(10).assign(0.01f);
    INDArray expSyn1 = Nd4j.create(10).assign(0.020005);
    INDArray expNeu1e = Nd4j.create(10).assign(0.00001f);

    int idxSyn0 = 1;
    int idxSyn1 = 1;
    int code = 0;

    double lr = 0.001;

    HierarchicSoftmax op =
                    new HierarchicSoftmax(syn0.getRow(idxSyn0), syn1.getRow(idxSyn1), expTable, neu1e, code, lr);

    Nd4j.getExecutioner().exec(op);

    INDArray syn0row = syn0.getRow(idxSyn0);
    INDArray syn1row = syn1.getRow(idxSyn1);

    // expected gradient is 0.0005
    // expected neu1 = 0.00001
    // expected syn1 = 0.020005

    assertEquals(expNeu1e, neu1e);

    assertEquals(expSyn1, syn1row);

    // we hadn't modified syn0 at all yet
    assertEquals(expSyn0, syn0row);
}
 
Example 10
Source File: ShapeTestsC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testPutRow() {
    INDArray matrix = Nd4j.create(new double[][] {{1, 2}, {3, 4}});
    for (int i = 0; i < matrix.rows(); i++) {
        INDArray row = matrix.getRow(i);
        System.out.println(matrix.getRow(i));
    }
    matrix.putRow(1, Nd4j.create(new double[] {1, 2}));
    assertEquals(matrix.getRow(0), matrix.getRow(1));
}
 
Example 11
Source File: ShufflesTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testSymmetricShuffle1() {
        INDArray features = Nd4j.zeros(10, 10);
        INDArray labels = Nd4j.zeros(10, 3);
        for (int x = 0; x < 10; x++) {
            features.getRow(x).assign(x);
            labels.getRow(x).assign(x);
        }
//        System.out.println(features);

        OrderScanner2D scanner = new OrderScanner2D(features);

        assertArrayEquals(new float[] {0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f}, scanner.getMap(), 0.01f);

        List<INDArray> list = new ArrayList<>();
        list.add(features);
        list.add(labels);

        Nd4j.shuffle(list, 1);

//        System.out.println(features);
//        System.out.println();
//        System.out.println(labels);

        ArrayUtil.argMin(new int[] {});

        assertTrue(scanner.compareRow(features));

        for (int x = 0; x < 10; x++) {
            double val = features.getRow(x).getDouble(0);
            INDArray row = labels.getRow(x);

            for (int y = 0; y < row.length(); y++) {
                assertEquals(val, row.getDouble(y), 0.001);
            }
        }
    }
 
Example 12
Source File: NDArrayTestsFortran.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMatrix() {
    INDArray arr = Nd4j.create(new double[] {1, 2, 3, 4}, new long[] {2, 2});
    INDArray brr = Nd4j.create(new double[] {5, 6}, new long[] {2});
    INDArray row = arr.getRow(0);
    row.subi(brr);
    assertEquals(Nd4j.create(new double[] {-4, -3}), arr.getRow(0));

}
 
Example 13
Source File: IndexingTestsC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetRowEdgeCase() {
    INDArray rowVec = Nd4j.linspace(1, 5, 5, DataType.DOUBLE).reshape(1, -1);
    INDArray get = rowVec.getRow(0); //Returning shape [1,1]

    assertArrayEquals(new long[] {1, 5}, get.shape());
    assertEquals(rowVec, get);
}
 
Example 14
Source File: HierarchicSoftmaxTests.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testCBOWGradient1() throws Exception {
    INDArray syn0 = Nd4j.create(10, 10).assign(0.01f);
    INDArray syn1 = Nd4j.create(10, 10).assign(0.02f);
    INDArray expTable = Nd4j.create(10000).assign(0.5f);

    double lr = 0.025;

    INDArray syn0row_before_0 = syn0.getRow(0).dup();
    INDArray syn0row_before_1 = syn0.getRow(1).dup();
    INDArray syn0row_before_2 = syn0.getRow(2).dup();

    AggregateCBOW op = new AggregateCBOW(syn0, syn1, null, expTable, null, 0, new int[] {0, 1, 2}, new int[] {4, 5},
                    new int[] {1, 1}, 0, 0, 10, lr, 2L, 10);

    Nd4j.getExecutioner().exec(op);

    INDArray syn0row_0 = syn0.getRow(0);
    INDArray syn0row_1 = syn0.getRow(1);
    INDArray syn0row_2 = syn0.getRow(2);

    INDArray syn1row_4 = syn1.getRow(4);
    INDArray syn1row_5 = syn1.getRow(5);
    INDArray syn1row_6 = syn1.getRow(6);

    INDArray expSyn0row_0 = Nd4j.create(10).assign(0.0095f);
    INDArray expSyn1row_4 = Nd4j.create(10).assign(0.019875f);
    INDArray expSyn1row_6 = Nd4j.create(10).assign(0.02f);

    assertNotEquals(syn0row_before_0, syn0row_0);
    assertNotEquals(syn0row_before_1, syn0row_1);
    assertNotEquals(syn0row_before_2, syn0row_2);

    // neu1 is expected to be 0.01
    // dot is expected to be 0.002
    // g is expected -0.0125 for both rounds: both codes are 1, so (1 - 1 - 0.5) * 0.025 = -0.0125
    // neu1e is expected to be -0.00025 after first round ( g * syn1 + neu1e) (-0.0125 * 0.02 + 0.000)
    // neu1e is expected to be -0.00050 after second round (-0.0125 * 0.02 + -0.00025)
    // syn1 is expected to be 0.019875 after first round (g * neu1 + syn1)  (-0.0125 * 0.01 + 0.02 )
    // syn1 is expected to be 0.019875 after second round (g * neu1 + syn1)  (-0.0125 * 0.01 + 0.02 ) NOTE: each of round uses it's own syn1 index

    // syn0 is expected to be 0.0095f after op (syn0 += neu1e) (0.01 += -0.0005)

    log.info("syn1row4[0]: {}", syn1row_4.getFloat(0));

    assertEquals(expSyn0row_0, syn0row_0);
    assertEquals(expSyn0row_0, syn0row_1);
    assertEquals(expSyn0row_0, syn0row_2);

    assertEquals(expSyn1row_4, syn1row_4);
    assertEquals(expSyn1row_4, syn1row_5);
    assertEquals(expSyn1row_6, syn1row_6);

}
 
Example 15
Source File: GlobalPoolingMaskingTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMaskingRnn() {


    int timeSeriesLength = 5;
    int nIn = 5;
    int layerSize = 4;
    int nOut = 2;
    int[] minibatchSizes = new int[] {1, 3};

    for (int miniBatchSize : minibatchSizes) {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                        .updater(new NoOp())
                        .dist(new NormalDistribution(0, 1.0)).seed(12345L).list()
                        .layer(0, new GravesLSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH)
                                        .build())
                        .layer(1, new org.deeplearning4j.nn.conf.layers.GlobalPoolingLayer.Builder()
                                        .poolingType(PoolingType.AVG).build())
                        .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                        .activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build())
                        .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        Random r = new Random(12345L);
        INDArray input = Nd4j.rand(new int[] {miniBatchSize, nIn, timeSeriesLength}).subi(0.5);

        INDArray mask;
        if (miniBatchSize == 1) {
            mask = Nd4j.create(new double[] {1, 1, 1, 1, 0}).reshape(1,5);
        } else {
            mask = Nd4j.create(new double[][] {{1, 1, 1, 1, 1}, {1, 1, 1, 1, 0}, {1, 1, 1, 0, 0}});
        }

        INDArray labels = Nd4j.zeros(miniBatchSize, nOut);
        for (int i = 0; i < miniBatchSize; i++) {
            int idx = r.nextInt(nOut);
            labels.putScalar(i, idx, 1.0);
        }

        net.setLayerMaskArrays(mask, null);
        INDArray outputMasked = net.output(input);

        net.clearLayerMaskArrays();

        for (int i = 0; i < miniBatchSize; i++) {
            INDArray maskRow = mask.getRow(i);
            int tsLength = maskRow.sumNumber().intValue();
            INDArray inputSubset = input.get(NDArrayIndex.interval(i, i, true), NDArrayIndex.all(),
                            NDArrayIndex.interval(0, tsLength));

            INDArray outSubset = net.output(inputSubset);
            INDArray outputMaskedSubset = outputMasked.getRow(i,true);

            assertEquals(outSubset, outputMaskedSubset);
        }
    }
}
 
Example 16
Source File: GlobalPoolingMaskingTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMaskingCnnDim23() {
    //Test masking, where mask is along dimension 2 AND 3
    //For example, input images of 2 different sizes

    int minibatch = 2;
    int depthIn = 2;
    int depthOut = 4;
    int nOut = 5;
    int height = 5;
    int width = 4;

    PoolingType[] poolingTypes =
            new PoolingType[] {PoolingType.SUM, PoolingType.AVG, PoolingType.MAX, PoolingType.PNORM};

    for (PoolingType pt : poolingTypes) {
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().weightInit(WeightInit.XAVIER)
                .convolutionMode(ConvolutionMode.Same).seed(12345L).list()
                .layer(0, new ConvolutionLayer.Builder().nIn(depthIn).nOut(depthOut).kernelSize(2, 2)
                        .stride(1, 1).activation(Activation.TANH).build())
                .layer(1, new org.deeplearning4j.nn.conf.layers.GlobalPoolingLayer.Builder().poolingType(pt)
                        .build())
                .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                        .activation(Activation.SOFTMAX).nIn(depthOut).nOut(nOut).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        INDArray inToBeMasked = Nd4j.rand(new int[] {minibatch, depthIn, height, width});

        //Second example in minibatch: size [3,2]
        inToBeMasked.get(point(1), NDArrayIndex.all(), NDArrayIndex.interval(3,height), NDArrayIndex.all()).assign(0);
        inToBeMasked.get(point(1), NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(2,width)).assign(0);

        //Shape for mask: [minibatch, 1, height, 1] -> broadcast
        INDArray maskArray = Nd4j.create(minibatch, 1, height, width);
        maskArray.get(point(0), all(), all(), all()).assign(1);
        maskArray.get(point(1), all(), interval(0,3), interval(0,2)).assign(1);

        net.setLayerMaskArrays(maskArray, null);

        INDArray outMasked = net.output(inToBeMasked);
        net.clearLayerMaskArrays();

        net.setLayerMaskArrays(maskArray, null);

        for (int i = 0; i < minibatch; i++) {
            INDArray subset;
            if(i == 0){
                subset = inToBeMasked.get(interval(i, i, true), all(), all(), all());
            } else {
                subset = inToBeMasked.get(interval(i, i, true), all(), interval(0,3), interval(0,2));
            }

            net.clear();
            net.clearLayerMaskArrays();
            INDArray outSubset = net.output(subset);
            INDArray outMaskedSubset = outMasked.getRow(i,true);

            assertEquals("minibatch: " + i + ", " + pt, outSubset, outMaskedSubset);
        }
    }
}
 
Example 17
Source File: LossMultiLabel.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
private void calculate(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, INDArray scoreOutput, INDArray gradientOutput) {
    if (scoreOutput == null && gradientOutput == null) {
        throw new IllegalArgumentException("You have to provide at least one of scoreOutput or gradientOutput!");
    }
    if (labels.size(1) != preOutput.size(1)) {
        throw new IllegalArgumentException(
                "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
                        + " number of outputs (nOut = " + preOutput.size(1) + ") ");

    }
    labels = labels.castTo(preOutput.dataType());   //No-op if already correct dtype
    final INDArray postOutput = activationFn.getActivation(preOutput.dup(), true);

    final INDArray positive = labels;
    final INDArray negative = labels.eq(0.0).castTo(Nd4j.defaultFloatingPointType());
    final INDArray normFactor = negative.sum(true,1).castTo(Nd4j.defaultFloatingPointType()).muli(positive.sum(true,1));


    long examples = positive.size(0);
    for (int i = 0; i < examples; i++) {
        final INDArray locCfn = postOutput.getRow(i, true);
        final long[] shape = locCfn.shape();

        final INDArray locPositive = positive.getRow(i, true);
        final INDArray locNegative = negative.getRow(i, true);
        final Double locNormFactor = normFactor.getDouble(i);

        final int outSetSize = locNegative.sumNumber().intValue();
        if(outSetSize == 0 || outSetSize == locNegative.columns()){
            if (scoreOutput != null) {
                scoreOutput.getRow(i, true).assign(0);
            }

            if (gradientOutput != null) {
                gradientOutput.getRow(i, true).assign(0);
            }
        }else {
            final INDArray operandA = Nd4j.ones(shape[1], shape[0]).mmul(locCfn);
            final INDArray operandB = operandA.transpose();

            final INDArray pairwiseSub = Transforms.exp(operandA.sub(operandB));

            final INDArray selection = locPositive.transpose().mmul(locNegative);

            final INDArray classificationDifferences = pairwiseSub.muli(selection).divi(locNormFactor);

            if (scoreOutput != null) {
                if (mask != null) {
                    final INDArray perLabel = classificationDifferences.sum(0);
                    LossUtil.applyMask(perLabel, mask.getRow(i, true));
                    perLabel.sum(scoreOutput.getRow(i, true), 0);
                } else {
                    classificationDifferences.sum(scoreOutput.getRow(i, true), 0, 1);
                }
            }

            if (gradientOutput != null) {
                gradientOutput.getRow(i, true).assign(classificationDifferences.sum(true, 0).addi(classificationDifferences.sum(true,1).transposei().negi()));
            }
        }
    }

    if (gradientOutput != null) {
        gradientOutput.assign(activationFn.backprop(preOutput.dup(), gradientOutput).getFirst());
        //multiply with masks, always
        if (mask != null) {
            LossUtil.applyMask(gradientOutput, mask);
        }
    }
}
 
Example 18
Source File: LossMultiLabel.java    From nd4j with Apache License 2.0 4 votes vote down vote up
private void calculate(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask, INDArray scoreOutput, INDArray gradientOutput) {
    if (scoreOutput == null && gradientOutput == null) {
        throw new IllegalArgumentException("You have to provide at least one of scoreOutput or gradientOutput!");
    }
    if (labels.size(1) != preOutput.size(1)) {
        throw new IllegalArgumentException(
                "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
                        + " number of outputs (nOut = " + preOutput.size(1) + ") ");

    }
    final INDArray postOutput = activationFn.getActivation(preOutput.dup(), true);

    final INDArray positive = labels;
    final INDArray negative = labels.eq(0.0);
    final INDArray normFactor = negative.sum(1).muli(positive.sum(1));


    long examples = positive.size(0);
    for (int i = 0; i < examples; i++) {
        final INDArray locCfn = postOutput.getRow(i);
        final long[] shape = locCfn.shape();

        final INDArray locPositive = positive.getRow(i);
        final INDArray locNegative = negative.getRow(i);
        final Double locNormFactor = normFactor.getDouble(i);

        final INDArray operandA = Nd4j.ones(shape[1], shape[0]).mmul(locCfn);
        final INDArray operandB = operandA.transpose();

        final INDArray pairwiseSub = Transforms.exp(operandA.sub(operandB));

        final INDArray selection = locPositive.transpose().mmul(locNegative);

        final INDArray classificationDifferences = pairwiseSub.muli(selection).divi(locNormFactor);

        if (scoreOutput != null) {
            if (mask != null) {
                final INDArray perLabel = classificationDifferences.sum(0);
                LossUtil.applyMask(perLabel, mask.getRow(i));
                perLabel.sum(scoreOutput.getRow(i), 0);
            } else {
                classificationDifferences.sum(scoreOutput.getRow(i), 0, 1);
            }
        }

        if (gradientOutput != null) {
            gradientOutput.getRow(i).assign(classificationDifferences.sum(0).addi(classificationDifferences.sum(1).transposei().negi()));
        }
    }

    if (gradientOutput != null) {
        gradientOutput.assign(activationFn.backprop(preOutput.dup(), gradientOutput).getFirst());
        //multiply with masks, always
        if (mask != null) {
            LossUtil.applyMask(gradientOutput, mask);
        }
    }
}
 
Example 19
Source File: TestComputationGraphNetwork.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testScoreExamples() {
    Nd4j.getRandom().setSeed(12345);
    int nIn = 5;
    int nOut = 6;
    ComputationGraphConfiguration conf =
            new NeuralNetConfiguration.Builder().seed(12345).l1(0.01).l2(0.01)
                    .updater(new Sgd(0.1))
                    .activation(Activation.TANH).weightInit(WeightInit.XAVIER)
                    .graphBuilder().addInputs("in")
                    .addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(20).build(), "in")
                    .addLayer("1", new DenseLayer.Builder().nIn(20).nOut(30).build(), "0")
                    .addLayer("2", new OutputLayer.Builder()
                            .lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut)
                            .build(), "1")
                    .setOutputs("2").build();

    ComputationGraphConfiguration confNoReg =
            new NeuralNetConfiguration.Builder().seed(12345).updater(new Sgd(0.1)).activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
                    .addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(20).build(), "in")
                    .addLayer("1", new DenseLayer.Builder().nIn(20).nOut(30).build(), "0")
                    .addLayer("2", new OutputLayer.Builder()
                            .lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut)
                            .build(), "1")
                    .setOutputs("2").build();


    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    ComputationGraph netNoReg = new ComputationGraph(confNoReg);
    netNoReg.init();
    netNoReg.setParams(net.params().dup());

    //Score single example, and compare to scoreExamples:
    INDArray input = Nd4j.rand(3, nIn);
    INDArray output = Nd4j.rand(3, nOut);
    DataSet ds = new DataSet(input, output);

    INDArray scoresWithRegularization = net.scoreExamples(ds, true);
    INDArray scoresNoRegularization = net.scoreExamples(ds, false);

    assertArrayEquals(new long[]{3, 1}, scoresWithRegularization.shape());
    assertArrayEquals(new long[]{3, 1}, scoresNoRegularization.shape());

    for (int i = 0; i < 3; i++) {
        DataSet singleEx = new DataSet(input.getRow(i,true), output.getRow(i,true));
        double score = net.score(singleEx);
        double scoreNoReg = netNoReg.score(singleEx);

        double scoreUsingScoreExamples = scoresWithRegularization.getDouble(i);
        double scoreUsingScoreExamplesNoReg = scoresNoRegularization.getDouble(i);
        assertEquals(score, scoreUsingScoreExamples, 1e-4);
        assertEquals(scoreNoReg, scoreUsingScoreExamplesNoReg, 1e-4);
        assertTrue(scoreUsingScoreExamples > scoreUsingScoreExamplesNoReg); //Regularization term increases score

        //            System.out.println(score + "\t" + scoreUsingScoreExamples + "\t|\t" + scoreNoReg + "\t" + scoreUsingScoreExamplesNoReg);
    }
}
 
Example 20
Source File: ShufflesTests.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSymmetricShuffle1() {
    INDArray features = Nd4j.zeros(10, 10);
    INDArray labels = Nd4j.zeros(10, 3);
    for (int x = 0; x < 10; x++) {
        features.getRow(x).assign(x);
        labels.getRow(x).assign(x);
    }

    System.out.println(features);

    OrderScanner2D scanner = new OrderScanner2D(features);

    assertArrayEquals(new float[] {0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f}, scanner.getMap(), 0.01f);

    System.out.println();

    List<INDArray> list = new ArrayList<>();
    list.add(features);
    list.add(labels);

    Nd4j.shuffle(list, 1);

    System.out.println(features);

    System.out.println();

    System.out.println(labels);

    ArrayUtil.argMin(new int[] {});

    assertTrue(scanner.compareRow(features));

    for (int x = 0; x < 10; x++) {
        double val = features.getRow(x).getDouble(0);
        INDArray row = labels.getRow(x);

        for (int y = 0; y < row.length(); y++) {
            assertEquals(val, row.getDouble(y), 0.001);
        }
    }
}