Java Code Examples for org.nd4j.linalg.factory.Nd4j#vstack()

The following examples show how to use org.nd4j.linalg.factory.Nd4j#vstack() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SpecialTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testVstack1() {
        INDArray matrix = Nd4j.create(10000, 100);

        List<INDArray> views = new ArrayList<>();
        for (int i = 0; i < matrix.rows() / 2; i++) {
            views.add(matrix.getRow(RandomUtils.nextInt(0, matrix.rows())));
            //views.add(Nd4j.create(1, 10));
        }

//        log.info("Starting...");

        //while (true) {
        for (int i = 0; i < 1; i++) {
            INDArray result = Nd4j.vstack(views);

            System.gc();
        }
    }
 
Example 2
Source File: ConcatTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcatRowVectors() {
    INDArray rowVector = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6}, new int[] {1, 6});
    INDArray matrix = Nd4j.create(new double[] {7, 8, 9, 10, 11, 12}, new int[] {1, 6});

    INDArray assertion1 = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, new int[] {1, 12});
    INDArray assertion0 = Nd4j.create(new double[][] {{1, 2, 3, 4, 5, 6}, {7, 8, 9, 10, 11, 12}});

    //      INDArray concat1 = Nd4j.hstack(rowVector, matrix);
    INDArray concat0 = Nd4j.vstack(rowVector, matrix);
    //        assertEquals(assertion1, concat1);
    assertEquals(assertion0, concat0);
}
 
Example 3
Source File: RegressionEvalTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testRegressionEval3d() {
    INDArray prediction = Nd4j.rand(DataType.FLOAT, 2, 5, 10);
    INDArray label = Nd4j.rand(DataType.FLOAT, 2, 5, 10);


    List<INDArray> rowsP = new ArrayList<>();
    List<INDArray> rowsL = new ArrayList<>();
    NdIndexIterator iter = new NdIndexIterator(2, 10);
    while (iter.hasNext()) {
        long[] idx = iter.next();
        INDArrayIndex[] idxs = new INDArrayIndex[]{NDArrayIndex.point(idx[0]), NDArrayIndex.all(), NDArrayIndex.point(idx[1])};
        rowsP.add(prediction.get(idxs));
        rowsL.add(label.get(idxs));
    }

    INDArray p2d = Nd4j.vstack(rowsP);
    INDArray l2d = Nd4j.vstack(rowsL);

    RegressionEvaluation e3d = new RegressionEvaluation();
    RegressionEvaluation e2d = new RegressionEvaluation();

    e3d.eval(label, prediction);
    e2d.eval(l2d, p2d);

    for (Metric m : Metric.values()) {
        double d1 = e3d.scoreForMetric(m);
        double d2 = e2d.scoreForMetric(m);
        assertEquals(m.toString(), d2, d1, 1e-6);
    }
}
 
Example 4
Source File: EvaluationCalibrationTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEvaluationCalibration3dMasking() {
    INDArray prediction = Nd4j.rand(DataType.FLOAT, 2, 3, 10);
    INDArray label = Nd4j.rand(DataType.FLOAT, 2, 3, 10);

    List<INDArray> rowsP = new ArrayList<>();
    List<INDArray> rowsL = new ArrayList<>();

    //Check "DL4J-style" 2d per timestep masking [minibatch, seqLength] mask shape
    INDArray mask2d = Nd4j.randomBernoulli(0.5, 2, 10);
    NdIndexIterator iter = new NdIndexIterator(2, 10);
    while (iter.hasNext()) {
        long[] idx = iter.next();
        if(mask2d.getDouble(idx[0], idx[1]) != 0.0) {
            INDArrayIndex[] idxs = new INDArrayIndex[]{NDArrayIndex.point(idx[0]), NDArrayIndex.all(), NDArrayIndex.point(idx[1])};
            rowsP.add(prediction.get(idxs));
            rowsL.add(label.get(idxs));
        }
    }
    INDArray p2d = Nd4j.vstack(rowsP);
    INDArray l2d = Nd4j.vstack(rowsL);

    EvaluationCalibration e3d_m2d = new EvaluationCalibration();
    EvaluationCalibration e2d_m2d = new EvaluationCalibration();
    e3d_m2d.eval(label, prediction, mask2d);
    e2d_m2d.eval(l2d, p2d);

    assertEquals(e3d_m2d, e2d_m2d);
}
 
Example 5
Source File: ROCBinaryTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testROCBinary3d() {
    INDArray prediction = Nd4j.rand(DataType.FLOAT, 2, 5, 10);
    INDArray label = Nd4j.rand(DataType.FLOAT, 2, 5, 10);


    List<INDArray> rowsP = new ArrayList<>();
    List<INDArray> rowsL = new ArrayList<>();
    NdIndexIterator iter = new NdIndexIterator(2, 10);
    while (iter.hasNext()) {
        long[] idx = iter.next();
        INDArrayIndex[] idxs = new INDArrayIndex[]{NDArrayIndex.point(idx[0]), NDArrayIndex.all(), NDArrayIndex.point(idx[1])};
        rowsP.add(prediction.get(idxs));
        rowsL.add(label.get(idxs));
    }

    INDArray p2d = Nd4j.vstack(rowsP);
    INDArray l2d = Nd4j.vstack(rowsL);

    ROCBinary e3d = new ROCBinary();
    ROCBinary e2d = new ROCBinary();

    e3d.eval(label, prediction);
    e2d.eval(l2d, p2d);

    for (ROCBinary.Metric m : ROCBinary.Metric.values()) {
        for( int i=0; i<5; i++ ) {
            double d1 = e3d.scoreForMetric(m, i);
            double d2 = e2d.scoreForMetric(m, i);
            assertEquals(m.toString(), d2, d1, 1e-6);
        }
    }
}
 
Example 6
Source File: LoneTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcat3D_Vstack_C() throws Exception {
    int[] shape = new int[]{1, 1000, 150};
    //INDArray cOrder =  Nd4j.rand(shape,123);


    List<INDArray> cArrays = new ArrayList<>();
    List<INDArray> fArrays = new ArrayList<>();

    for (int e = 0; e < 32; e++) {
        cArrays.add(Nd4j.create(shape, 'c').assign(e));
        //            fArrays.add(cOrder.dup('f'));
    }

    Nd4j.getExecutioner().commit();

    long time1 = System.currentTimeMillis();
    INDArray res = Nd4j.vstack(cArrays);
    long time2 = System.currentTimeMillis();

    log.info("Time spent: {} ms", time2 - time1);

    for (int e = 0; e < 32; e++) {
        INDArray tad = res.tensorAlongDimension(e, 1, 2);
        assertEquals((double) e, tad.meanNumber().doubleValue(), 1e-5);
    }
}
 
Example 7
Source File: RecordConverterTest.java    From DataVec with Apache License 2.0 5 votes vote down vote up
@Test
public void toRecords_PassInClassificationDataSet_ExpectNDArrayAndIntWritables() {
    INDArray feature1 = Nd4j.create(new double[]{4, -5.7, 10, -0.1});
    INDArray feature2 = Nd4j.create(new double[]{11, .7, -1.3, 4});
    INDArray label1 = Nd4j.create(new double[]{0, 0, 1, 0});
    INDArray label2 = Nd4j.create(new double[]{0, 1, 0, 0});
    DataSet dataSet = new DataSet(Nd4j.vstack(Lists.newArrayList(feature1, feature2)),
            Nd4j.vstack(Lists.newArrayList(label1, label2)));

    List<List<Writable>> writableList = RecordConverter.toRecords(dataSet);

    assertEquals(2, writableList.size());
    testClassificationWritables(feature1, 2, writableList.get(0));
    testClassificationWritables(feature2, 1, writableList.get(1));
}
 
Example 8
Source File: Nd4jTestsF.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testConcat3D_Vstack_F() {
    //Nd4j.getExecutioner().enableVerboseMode(true);
    //Nd4j.getExecutioner().enableDebugMode(true);

    int[] shape = new int[] {1, 1000, 150};
    //INDArray cOrder =  Nd4j.rand(shape,123);


    List<INDArray> cArrays = new ArrayList<>();
    List<INDArray> fArrays = new ArrayList<>();

    for (int e = 0; e < 32; e++) {
        cArrays.add(Nd4j.create(shape, 'f').assign(e));
        //            fArrays.add(cOrder.dup('f'));
    }

    Nd4j.getExecutioner().commit();

    long time1 = System.currentTimeMillis();
    INDArray res = Nd4j.vstack(cArrays);
    long time2 = System.currentTimeMillis();

    log.info("Time spent: {} ms", time2 - time1);

    for (int e = 0; e < 32; e++) {
        INDArray tad = res.tensorAlongDimension(e, 1, 2);
        assertEquals((double) e, tad.meanNumber().doubleValue(), 1e-5);
    }
}
 
Example 9
Source File: PLNetDyadRanker.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * Converts a dyad ranking to a {@link INDArray} matrix where each row
 * corresponds to a dyad.
 *
 * @param drInstance
 *            The dyad ranking to convert to a matrix.
 * @return The dyad ranking in {@link INDArray} matrix form.
 */
private INDArray dyadRankingToMatrix(final IDyadRankingInstance drInstance) {
	List<INDArray> dyadList = new ArrayList<>(drInstance.getNumAttributes());
	for (IDyad dyad : drInstance) {
		INDArray dyadVector = this.dyadToVector(dyad);
		dyadList.add(dyadVector);
	}
	INDArray dyadMatrix;
	dyadMatrix = Nd4j.vstack(dyadList);
	return dyadMatrix;
}
 
Example 10
Source File: EvaluationBinaryTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEvaluationBinary3d() {
    INDArray prediction = Nd4j.rand(DataType.FLOAT, 2, 5, 10);
    INDArray label = Nd4j.rand(DataType.FLOAT, 2, 5, 10);


    List<INDArray> rowsP = new ArrayList<>();
    List<INDArray> rowsL = new ArrayList<>();
    NdIndexIterator iter = new NdIndexIterator(2, 10);
    while (iter.hasNext()) {
        long[] idx = iter.next();
        INDArrayIndex[] idxs = new INDArrayIndex[]{NDArrayIndex.point(idx[0]), NDArrayIndex.all(), NDArrayIndex.point(idx[1])};
        rowsP.add(prediction.get(idxs));
        rowsL.add(label.get(idxs));
    }

    INDArray p2d = Nd4j.vstack(rowsP);
    INDArray l2d = Nd4j.vstack(rowsL);

    EvaluationBinary e3d = new EvaluationBinary();
    EvaluationBinary e2d = new EvaluationBinary();

    e3d.eval(label, prediction);
    e2d.eval(l2d, p2d);

    for (EvaluationBinary.Metric m : EvaluationBinary.Metric.values()) {
        for( int i=0; i<5; i++ ) {
            double d1 = e3d.scoreForMetric(m, i);
            double d2 = e2d.scoreForMetric(m, i);
            assertEquals(m.toString(), d2, d1, 1e-6);
        }
    }
}
 
Example 11
Source File: SpecialTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testVstack2() {
    INDArray matrix = Nd4j.create(10000, 100);

    List<INDArray> views = new ArrayList<>();
    views.add(matrix.getRow(1));
    views.add(matrix.getRow(4));
    views.add(matrix.getRow(7));

    INDArray result = Nd4j.vstack(views);
}
 
Example 12
Source File: ROCBinaryTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testROCBinary4d() {
    INDArray prediction = Nd4j.rand(DataType.FLOAT, 2, 3, 10, 10);
    INDArray label = Nd4j.rand(DataType.FLOAT, 2, 3, 10, 10);


    List<INDArray> rowsP = new ArrayList<>();
    List<INDArray> rowsL = new ArrayList<>();
    NdIndexIterator iter = new NdIndexIterator(2, 10, 10);
    while (iter.hasNext()) {
        long[] idx = iter.next();
        INDArrayIndex[] idxs = new INDArrayIndex[]{NDArrayIndex.point(idx[0]), NDArrayIndex.all(), NDArrayIndex.point(idx[1]), NDArrayIndex.point(idx[2])};
        rowsP.add(prediction.get(idxs));
        rowsL.add(label.get(idxs));
    }

    INDArray p2d = Nd4j.vstack(rowsP);
    INDArray l2d = Nd4j.vstack(rowsL);

    ROCBinary e4d = new ROCBinary();
    ROCBinary e2d = new ROCBinary();

    e4d.eval(label, prediction);
    e2d.eval(l2d, p2d);

    for (ROCBinary.Metric m : ROCBinary.Metric.values()) {
        for( int i=0; i<3; i++ ) {
            double d1 = e4d.scoreForMetric(m, i);
            double d2 = e2d.scoreForMetric(m, i);
            assertEquals(m.toString(), d2, d1, 1e-6);
        }
    }
}
 
Example 13
Source File: TestSameDiffOutput.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testOutputMSELossLayer(){
    Nd4j.getRandom().setSeed(12345);

    MultiLayerConfiguration confSD = new NeuralNetConfiguration.Builder()
            .seed(12345)
            .updater(new Adam(0.01))
            .list()
            .layer(new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
            .layer(new SameDiffMSELossLayer())
            .build();

    MultiLayerConfiguration confStd = new NeuralNetConfiguration.Builder()
            .seed(12345)
            .updater(new Adam(0.01))
            .list()
            .layer(new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
            .layer(new LossLayer.Builder().activation(Activation.IDENTITY).lossFunction(LossFunctions.LossFunction.MSE).build())
            .build();

    MultiLayerNetwork netSD = new MultiLayerNetwork(confSD);
    netSD.init();

    MultiLayerNetwork netStd = new MultiLayerNetwork(confStd);
    netStd.init();

    INDArray in = Nd4j.rand(3, 5);
    INDArray label = Nd4j.rand(3,5);

    INDArray outSD = netSD.output(in);
    INDArray outStd = netStd.output(in);
    assertEquals(outStd, outSD);

    DataSet ds = new DataSet(in, label);
    double scoreSD = netSD.score(ds);
    double scoreStd = netStd.score(ds);
    assertEquals(scoreStd, scoreSD, 1e-6);

    for( int i=0; i<3; i++ ){
        netSD.fit(ds);
        netStd.fit(ds);

        assertEquals(netStd.params(), netSD.params());
        assertEquals(netStd.getFlattenedGradients(), netSD.getFlattenedGradients());
    }

    //Test fit before output:
    MultiLayerNetwork net = new MultiLayerNetwork(confSD.clone());
    net.init();
    net.fit(ds);

    //Sanity check on different minibatch sizes:
    INDArray newIn = Nd4j.vstack(in, in);
    INDArray outMbsd = netSD.output(newIn);
    INDArray outMb = netStd.output(newIn);
    assertEquals(outMb, outMbsd);
}
 
Example 14
Source File: TestSameDiffConv.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSameDiffConvGradient() {
    int imgH = 8;
    int imgW = 8;
    int nIn = 3;
    int nOut = 4;
    int[] kernel = {2, 2};
    int[] strides = {1, 1};
    int[] dilation = {1, 1};

    int count = 0;

    //Note: to avoid the exporential number of tests here, we'll randomly run every Nth test only.
    //With n=1, m=3 this is 1 out of every 3 tests (on average)
    Random r = new Random(12345);
    int n = 1;
    int m = 5;
    for(boolean workspaces : new boolean[]{false, true}) {
        for (int minibatch : new int[]{5, 1}) {
            for (boolean hasBias : new boolean[]{true, false}) {
                for (ConvolutionMode cm : new ConvolutionMode[]{ConvolutionMode.Truncate, ConvolutionMode.Same}) {
                    int i = r.nextInt(m);
                    if (i >= n) {
                        //Example: n=2, m=3... skip on i=2, run test on i=0, i=1
                        continue;
                    }

                    String msg = "Test " + (count++) + " - minibatch=" + minibatch + ", ConvolutionMode=" + cm + ", hasBias=" + hasBias;

                    int outH = cm == ConvolutionMode.Same ? imgH : (imgH-2);
                    int outW = cm == ConvolutionMode.Same ? imgW : (imgW-2);

                    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                            .dataType(DataType.DOUBLE)
                            .seed(12345)
                            .updater(new NoOp())
                            .trainingWorkspaceMode(workspaces ? WorkspaceMode.ENABLED : WorkspaceMode.NONE)
                            .inferenceWorkspaceMode(workspaces ? WorkspaceMode.ENABLED : WorkspaceMode.NONE)
                            .list()
                            .layer(new SameDiffConv.Builder()
                                    .weightInit(WeightInit.XAVIER)
                                    .nIn(nIn)
                                    .nOut(nOut)
                                    .kernelSize(kernel)
                                    .stride(strides)
                                    .dilation(dilation)
                                    .convolutionMode(cm)
                                    .activation(Activation.TANH)
                                    .hasBias(hasBias)
                                    .build())
                            .layer(new SameDiffConv.Builder()
                                    .weightInit(WeightInit.XAVIER)
                                    .nIn(nOut)
                                    .nOut(nOut)
                                    .kernelSize(kernel)
                                    .stride(strides)
                                    .dilation(dilation)
                                    .convolutionMode(cm)
                                    .activation(Activation.SIGMOID)
                                    .hasBias(hasBias)
                                    .build())
                            .layer(new OutputLayer.Builder().activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT)
                                    .nIn(nOut * outH * outW)
                                    .nOut(nOut).build())
                            .inputPreProcessor(2, new CnnToFeedForwardPreProcessor(outH, outW, nOut))
                            .build();

                    MultiLayerNetwork net = new MultiLayerNetwork(conf);
                    net.init();

                    INDArray f = Nd4j.rand(new int[]{minibatch, nIn, imgH, imgW});
                    INDArray l = TestUtils.randomOneHot(minibatch, nOut);

                    log.info("Starting: " + msg);
                    boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(f)
                            .labels(l).subset(true).maxPerParam(50));

                    assertTrue(msg, gradOK);

                    TestUtils.testModelSerialization(net);

                    //Sanity check on different minibatch sizes:
                    INDArray newIn = Nd4j.vstack(f, f);
                    net.output(newIn);
                }
            }
        }
    }
}
 
Example 15
Source File: TestBertIterator.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test(timeout = 20000L)
public void testMinibatchPadding() throws Exception {
    Nd4j.setDefaultDataTypes(DataType.FLOAT, DataType.FLOAT);
    int minibatchSize = 3;
    TestSentenceHelper testHelper = new TestSentenceHelper(minibatchSize);
    INDArray zeros = Nd4j.create(DataType.INT, 1, 16);
    INDArray expF = Nd4j.create(DataType.INT, 1, 16);
    INDArray expM = Nd4j.create(DataType.INT, 1, 16);
    Map<String, Integer> m = testHelper.getTokenizer().getVocab();
    for (int i = 0; i < minibatchSize; i++) {
        List<String> tokens = testHelper.getTokenizedSentences().get(i);
        INDArray expFTemp = Nd4j.create(DataType.INT, 1, 16);
        INDArray expMTemp = Nd4j.create(DataType.INT, 1, 16);
        System.out.println(tokens);
        for (int j = 0; j < tokens.size(); j++) {
            String token = tokens.get(j);
            if (!m.containsKey(token)) {
                throw new IllegalStateException("Unknown token: \"" + token + "\"");
            }
            int idx = m.get(token);
            expFTemp.putScalar(0, j, idx);
            expMTemp.putScalar(0, j, 1);
        }
        if (i == 0) {
            expF = expFTemp.dup();
            expM = expMTemp.dup();
        } else {
            expF = Nd4j.vstack(expF.dup(), expFTemp);
            expM = Nd4j.vstack(expM.dup(), expMTemp);
        }
    }

    expF = Nd4j.vstack(expF, zeros);
    expM = Nd4j.vstack(expM, zeros);
    INDArray expL = Nd4j.createFromArray(new float[][]{{0, 1}, {1, 0}, {0, 1}, {0, 0}});
    INDArray expLM = Nd4j.create(DataType.FLOAT, 4, 1);
    expLM.putScalar(0, 0, 1);
    expLM.putScalar(1, 0, 1);
    expLM.putScalar(2, 0, 1);

    //--------------------------------------------------------------

    BertIterator b = BertIterator.builder()
            .tokenizer(testHelper.getTokenizer())
            .lengthHandling(BertIterator.LengthHandling.FIXED_LENGTH, 16)
            .minibatchSize(minibatchSize + 1)
            .padMinibatches(true)
            .sentenceProvider(testHelper.getSentenceProvider())
            .featureArrays(BertIterator.FeatureArrays.INDICES_MASK_SEGMENTID)
            .vocabMap(testHelper.getTokenizer().getVocab())
            .task(BertIterator.Task.SEQ_CLASSIFICATION)
            .build();

    MultiDataSet mds = b.next();
    long[] expShape = {4, 16};
    assertArrayEquals(expShape, mds.getFeatures(0).shape());
    assertArrayEquals(expShape, mds.getFeatures(1).shape());
    assertArrayEquals(expShape, mds.getFeaturesMaskArray(0).shape());

    long[] lShape = {4, 2};
    long[] lmShape = {4, 1};
    assertArrayEquals(lShape, mds.getLabels(0).shape());
    assertArrayEquals(lmShape, mds.getLabelsMaskArray(0).shape());

    assertEquals(expF, mds.getFeatures(0));
    assertEquals(expM, mds.getFeaturesMaskArray(0));
    assertEquals(expL, mds.getLabels(0));
    assertEquals(expLM, mds.getLabelsMaskArray(0));

    assertEquals(expF, b.featurizeSentences(testHelper.getSentences()).getFirst()[0]);
    assertEquals(expM, b.featurizeSentences(testHelper.getSentences()).getSecond()[0]);
}
 
Example 16
Source File: TestSameDiffLambda.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSameDiffLamdaLayerBasic(){
    for(WorkspaceMode wsm : new WorkspaceMode[]{WorkspaceMode.ENABLED, WorkspaceMode.NONE}) {
        log.info("--- Workspace Mode: {} ---", wsm);


        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in")
                .addLayer("1", new SameDiffSimpleLambdaLayer(), "0")
                .addLayer("2", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "1")
                .setOutputs("2")
                .build();

        //Equavalent, not using SameDiff Lambda:
        ComputationGraphConfiguration confStd = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in")
                .addVertex("1", new ShiftVertex(1.0), "0")
                .addVertex("2", new ScaleVertex(2.0), "1")
                .addLayer("3", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "2")
                .setOutputs("3")
                .build();

        ComputationGraph lambda = new ComputationGraph(conf);
        lambda.init();

        ComputationGraph std = new ComputationGraph(confStd);
        std.init();

        lambda.setParams(std.params());

        INDArray in = Nd4j.rand(3, 5);
        INDArray labels = TestUtils.randomOneHot(3, 5);
        DataSet ds = new DataSet(in, labels);

        INDArray outLambda = lambda.outputSingle(in);
        INDArray outStd = std.outputSingle(in);

        assertEquals(outLambda, outStd);

        double scoreLambda = lambda.score(ds);
        double scoreStd = std.score(ds);

        assertEquals(scoreStd, scoreLambda, 1e-6);

        for (int i = 0; i < 3; i++) {
            lambda.fit(ds);
            std.fit(ds);

            String s = String.valueOf(i);
            assertEquals(s, std.params(), lambda.params());
            assertEquals(s, std.getFlattenedGradients(), lambda.getFlattenedGradients());
        }

        ComputationGraph loaded = TestUtils.testModelSerialization(lambda);
        outLambda = loaded.outputSingle(in);
        outStd = std.outputSingle(in);

        assertEquals(outStd, outLambda);

        //Sanity check on different minibatch sizes:
        INDArray newIn = Nd4j.vstack(in, in);
        INDArray outMbsd = lambda.output(newIn)[0];
        INDArray outMb = std.output(newIn)[0];
        assertEquals(outMb, outMbsd);
    }
}
 
Example 17
Source File: TestSameDiffLambda.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSameDiffLamdaVertexBasic(){
    for(WorkspaceMode wsm : new WorkspaceMode[]{WorkspaceMode.ENABLED, WorkspaceMode.NONE}) {
        log.info("--- Workspace Mode: {} ---", wsm);

        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .dataType(DataType.DOUBLE)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in1", "in2")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in1")
                .addLayer("1", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in2")
                .addVertex("lambda", new SameDiffSimpleLambdaVertex(), "0", "1")
                .addLayer("2", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "lambda")
                .setOutputs("2")
                .build();

        //Equavalent, not using SameDiff Lambda:
        ComputationGraphConfiguration confStd = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .dataType(DataType.DOUBLE)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in1", "in2")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in1")
                .addLayer("1", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in2")
                .addVertex("elementwise", new ElementWiseVertex(ElementWiseVertex.Op.Product), "0", "1")
                .addLayer("3", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "elementwise")
                .setOutputs("3")
                .build();

        ComputationGraph lambda = new ComputationGraph(conf);
        lambda.init();

        ComputationGraph std = new ComputationGraph(confStd);
        std.init();

        lambda.setParams(std.params());

        INDArray in1 = Nd4j.rand(3, 5);
        INDArray in2 = Nd4j.rand(3, 5);
        INDArray labels = TestUtils.randomOneHot(3, 5);
        MultiDataSet mds = new org.nd4j.linalg.dataset.MultiDataSet(new INDArray[]{in1, in2}, new INDArray[]{labels});

        INDArray outLambda = lambda.output(in1, in2)[0];
        INDArray outStd = std.output(in1, in2)[0];

        assertEquals(outLambda, outStd);

        double scoreLambda = lambda.score(mds);
        double scoreStd = std.score(mds);

        assertEquals(scoreStd, scoreLambda, 1e-6);

        for (int i = 0; i < 3; i++) {
            lambda.fit(mds);
            std.fit(mds);

            String s = String.valueOf(i);
            assertEquals(s, std.params(), lambda.params());
            assertEquals(s, std.getFlattenedGradients(), lambda.getFlattenedGradients());
        }

        ComputationGraph loaded = TestUtils.testModelSerialization(lambda);
        outLambda = loaded.output(in1, in2)[0];
        outStd = std.output(in1, in2)[0];

        assertEquals(outStd, outLambda);

        //Sanity check on different minibatch sizes:
        INDArray newIn1 = Nd4j.vstack(in1, in1);
        INDArray newIn2 = Nd4j.vstack(in2, in2);
        INDArray outMbsd = lambda.output(newIn1, newIn2)[0];
        INDArray outMb = std.output(newIn1, newIn2)[0];
        assertEquals(outMb, outMbsd);
    }
}
 
Example 18
Source File: AbstractDataSetIterator.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
protected void fillQueue() {
    if (queue.isEmpty()) {
        List<INDArray> ndLabels = null;
        List<INDArray> ndFeatures = null;
        float[][] fLabels = null;
        float[][] fFeatures = null;
        double[][] dLabels = null;
        double[][] dFeatures = null;

        int sampleCount = 0;

        for (int cnt = 0; cnt < batchSize; cnt++) {
            if (iterator.hasNext()) {
                Pair<T, T> pair = iterator.next();
                if (numFeatures < 1) {
                    if (pair.getFirst() instanceof INDArray) {
                        numFeatures = (int) ((INDArray) pair.getFirst()).length();
                        numLabels = (int) ((INDArray) pair.getSecond()).length();
                    } else if (pair.getFirst() instanceof float[]) {
                        numFeatures = ((float[]) pair.getFirst()).length;
                        numLabels = ((float[]) pair.getSecond()).length;
                    } else if (pair.getFirst() instanceof double[]) {
                        numFeatures = ((double[]) pair.getFirst()).length;
                        numLabels = ((double[]) pair.getSecond()).length;
                    }
                }

                if (pair.getFirst() instanceof INDArray) {
                    if (ndLabels == null) {
                        ndLabels = new ArrayList<>();
                        ndFeatures = new ArrayList<>();
                    }
                    ndFeatures.add(((INDArray) pair.getFirst()));
                    ndLabels.add(((INDArray) pair.getSecond()));
                } else if (pair.getFirst() instanceof float[]) {
                    if (fLabels == null) {
                        fLabels = new float[batchSize][];
                        fFeatures = new float[batchSize][];
                    }
                    fFeatures[sampleCount] = (float[]) pair.getFirst();
                    fLabels[sampleCount] = (float[]) pair.getSecond();
                } else if (pair.getFirst() instanceof double[]) {
                    if (dLabels == null) {
                        dLabels = new double[batchSize][];
                        dFeatures = new double[batchSize][];
                    }
                    dFeatures[sampleCount] = (double[]) pair.getFirst();
                    dLabels[sampleCount] = (double[]) pair.getSecond();
                }

                sampleCount += 1;
            } else
                break;
        }

        if (sampleCount == batchSize) {
            INDArray labels = null;
            INDArray features = null;
            if (ndLabels != null) {
                labels = Nd4j.vstack(ndLabels);
                features = Nd4j.vstack(ndFeatures);
            } else if (fLabels != null) {
                labels = Nd4j.create(fLabels);
                features = Nd4j.create(fFeatures);
            } else if (dLabels != null) {
                labels = Nd4j.create(dLabels);
                features = Nd4j.create(dFeatures);
            }

            DataSet dataSet = new DataSet(features, labels);
            try {
                queue.add(dataSet);
            } catch (Exception e) {
                // live with it
            }
        }
    }
}
 
Example 19
Source File: StackVertex.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public Pair<INDArray, MaskState> feedForwardMaskArrays(INDArray[] maskArrays, MaskState currentMaskState,
                int minibatchSize) {
    //Cases here: no mask arrays, or all mask arrays - all of the same size
    if (maskArrays == null) {
        return new Pair<>(null, currentMaskState);
    }

    boolean allNull = true;
    for(INDArray i : maskArrays){
        if(i != null) {
            allNull = false;
            break;
        }
    }
    if(allNull){
        return new Pair<>(null, currentMaskState);
    }

    // stacking along dimension 0
    //Given masks are all either 1d (column vector) or 2d (examples, timeSeriesLength) we can just vStack the masks
    //However: variable length TS might have different length masks...
    boolean allSameLength = true;
    long size1_ex0 = maskArrays[0].size(1);
    long maxLength = size1_ex0;
    for (int i = 1; i < maskArrays.length; i++) {
        allSameLength &= (size1_ex0 == maskArrays[i].size(1));
        maxLength = Math.max(maxLength, maskArrays[i].size(1));
    }

    if (allSameLength) {
        return new Pair<>(Nd4j.vstack(maskArrays), currentMaskState);
    } else {
        long numExamples = maskArrays[0].size(0);
        INDArray outMask = Nd4j.create(maskArrays.length * numExamples, maxLength);
        for (int i = 0; i < maskArrays.length; i++) {
            outMask.put(new INDArrayIndex[] {NDArrayIndex.interval(i * numExamples, (i + 1) * numExamples),
                            NDArrayIndex.interval(0, maskArrays[i].size(1))}, maskArrays[i]);
        }

        return new Pair<>(outMask, currentMaskState);
    }
}
 
Example 20
Source File: CrashTest.java    From nd4j with Apache License 2.0 2 votes vote down vote up
protected void op(INDArray x, INDArray y, int i) {
    // broadcast along row & column
    INDArray row = Nd4j.ones(64);
    INDArray column = Nd4j.ones(1024, 1);

    x.addiRowVector(row);
    x.addiColumnVector(column);

    // casual scalar
    x.addi(i * 2);

    // reduction along all dimensions
    float sum = x.sumNumber().floatValue();

    // index reduction
    Nd4j.getExecutioner().exec(new IMax(x), Integer.MAX_VALUE);

    // casual transform
    Nd4j.getExecutioner().exec(new Sqrt(x, x));

    //  dup
    INDArray x1 = x.dup(x.ordering());
    INDArray x2 = x.dup(x.ordering());
    INDArray x3 = x.dup('c');
    INDArray x4 = x.dup('f');


    // vstack && hstack
    INDArray vstack = Nd4j.vstack(x, x1, x2, x3, x4);

    INDArray hstack = Nd4j.hstack(x, x1, x2, x3, x4);

    // reduce3 call
    Nd4j.getExecutioner().exec(new ManhattanDistance(x, x2));


    // flatten call
    INDArray flat = Nd4j.toFlattened(x, x1, x2, x3, x4);


    // reduction along dimension: row & column
    INDArray max_0 = x.max(0);
    INDArray max_1 = x.max(1);


    // index reduction along dimension: row & column
    INDArray imax_0 = Nd4j.argMax(x, 0);
    INDArray imax_1 = Nd4j.argMax(x, 1);


    // logisoftmax, softmax & softmax derivative
    Nd4j.getExecutioner().exec(new OldSoftMax(x));
    Nd4j.getExecutioner().exec(new SoftMaxDerivative(x));
    Nd4j.getExecutioner().exec(new LogSoftMax(x));


    // BooleanIndexing
    BooleanIndexing.replaceWhere(x, 5f, Conditions.lessThan(8f));

    // assing on view
    BooleanIndexing.assignIf(x, x1, Conditions.greaterThan(-1000000000f));

    // std var along all dimensions
    float std = x.stdNumber().floatValue();

    // std var along row & col
    INDArray xStd_0 = x.std(0);
    INDArray xStd_1 = x.std(1);

    // blas call
    float dot = (float) Nd4j.getBlasWrapper().dot(x, x1);

    // mmul
    for (boolean tA : paramsA) {
        for (boolean tB : paramsB) {

            INDArray xT = tA ? x.dup() : x.dup().transpose();
            INDArray yT = tB ? y.dup() : y.dup().transpose();

            Nd4j.gemm(xT, yT, tA, tB);
        }
    }

    // specially for views, checking here without dup and rollover
    Nd4j.gemm(x, y, false, false);

    log.debug("Iteration passed: " + i);
}