Java Code Examples for org.deeplearning4j.nn.graph.ComputationGraph#setParams()

The following examples show how to use org.deeplearning4j.nn.graph.ComputationGraph#setParams() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SharedTrainingWorker.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public ComputationGraph getInitialModelGraph() {
    //Before getting NetBroadcastTuple, to ensure it always gets mapped to device 0
    Nd4j.getAffinityManager().unsafeSetDevice(0);
    NetBroadcastTuple tuple = broadcastModel.getValue();
    if (tuple.getGraphConfiguration() != null) {
        ComputationGraphConfiguration conf = tuple.getGraphConfiguration();
        ComputationGraph network = new ComputationGraph(conf);
        network.init();

        if (tuple.getParameters() != null)
            network.setParams(tuple.getParameters());

        if (tuple.getUpdaterState() != null)
            network.getUpdater().getUpdaterStateViewArray().assign(tuple.getUpdaterState());

        return network;
    } else
        return null;
}
 
Example 2
Source File: CGVaeReconstructionErrorWithKeyFunction.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public VariationalAutoencoder getVaeLayer() {
    ComputationGraph network =
                    new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue()));
    network.init();
    INDArray val = ((INDArray) params.value()).unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcasted set parameters");
    network.setParams(val);

    Layer l = network.getLayer(0);
    if (!(l instanceof VariationalAutoencoder)) {
        throw new RuntimeException(
                        "Cannot use CGVaeReconstructionErrorWithKeyFunction on network that doesn't have a VAE "
                                        + "layer as layer 0. Layer type: " + l.getClass());
    }
    return (VariationalAutoencoder) l;
}
 
Example 3
Source File: CGVaeReconstructionProbWithKeyFunction.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public VariationalAutoencoder getVaeLayer() {
    ComputationGraph network =
                    new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue()));
    network.init();
    INDArray val = ((INDArray) params.value()).unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcasted set parameters");
    network.setParams(val);

    Layer l = network.getLayer(0);
    if (!(l instanceof VariationalAutoencoder)) {
        throw new RuntimeException(
                        "Cannot use CGVaeReconstructionProbWithKeyFunction on network that doesn't have a VAE "
                                        + "layer as layer 0. Layer type: " + l.getClass());
    }
    return (VariationalAutoencoder) l;
}
 
Example 4
Source File: ScoreFlatMapFunctionCGMultiDataSet.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Iterator<Tuple2<Long, Double>> call(Iterator<MultiDataSet> dataSetIterator) throws Exception {
    if (!dataSetIterator.hasNext()) {
        return Collections.singletonList(new Tuple2<>(0L, 0.0)).iterator();
    }

    MultiDataSetIterator iter = new IteratorMultiDataSetIterator(dataSetIterator, minibatchSize); //Does batching where appropriate


    ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson(json));
    network.init();
    INDArray val = params.value().unsafeDuplication(); //.value() is shared by all executors on single machine -> OK, as params are not changed in score function
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcast set parameters");
    network.setParams(val);

    List<Tuple2<Long, Double>> out = new ArrayList<>();
    while (iter.hasNext()) {
        MultiDataSet ds = iter.next();
        double score = network.score(ds, false);

        long numExamples = ds.getFeatures(0).size(0);
        out.add(new Tuple2<>(numExamples, score * numExamples));
    }

    Nd4j.getExecutioner().commit();

    return out.iterator();
}
 
Example 5
Source File: GradientCheckTestsComputationGraph.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testBasicTwoOutputs() {
        Nd4j.getRandom().setSeed(12345);

        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                        .dataType(DataType.DOUBLE)
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                        .dist(new NormalDistribution(0, 1))
                        .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
                        .addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1")
                        .addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2")
                        .addLayer("out1",
                                        new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
                                                        .nOut(2).activation(Activation.IDENTITY).build(),
                                        "d0")
                        .addLayer("out2",
                                        new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
                                                        .nOut(2).activation(Activation.IDENTITY).build(),
                                        "d1")
                        .setOutputs("out1", "out2").build();

        ComputationGraph graph = new ComputationGraph(conf);
        graph.init();

        System.out.println("Num layers: " + graph.getNumLayers());
        System.out.println("Num params: " + graph.numParams());


        Nd4j.getRandom().setSeed(12345);
        long nParams = graph.numParams();
        INDArray newParams = Nd4j.rand(new long[]{1, nParams});
        graph.setParams(newParams);

        int[] mbSizes = new int[] {1, 3, 10};
        for (int minibatch : mbSizes) {

            INDArray in1 = Nd4j.rand(minibatch, 2);
            INDArray in2 = Nd4j.rand(minibatch, 2);
            INDArray labels1 = Nd4j.rand(minibatch, 2);
            INDArray labels2 = Nd4j.rand(minibatch, 2);

            String testName = "testBasicStackUnstack() - minibatch = " + minibatch;

            if (PRINT_RESULTS) {
                System.out.println(testName);
//                for (int j = 0; j < graph.getNumLayers(); j++)
//                    System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
            }

            boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2})
                    .labels(new INDArray[]{labels1, labels2}));
            assertTrue(testName, gradOK);
            TestUtils.testModelSerialization(graph);
        }
    }
 
Example 6
Source File: FrozenLayerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testFrozenCompGraph() {
    DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));

    NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1))
                    .activation(Activation.IDENTITY);

    ComputationGraph modelToFineTune = new ComputationGraph(overallConf.graphBuilder().addInputs("layer0In")
                    .addLayer("layer0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "layer0In")
                    .addLayer("layer1", new DenseLayer.Builder().nIn(3).nOut(2).build(), "layer0")
                    .addLayer("layer2", new DenseLayer.Builder().nIn(2).nOut(3).build(), "layer1")
                    .addLayer("layer3",
                                    new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.MCXENT)
                                                                    .activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                                    .build(),
                                    "layer2")
                    .setOutputs("layer3").build());

    modelToFineTune.init();
    INDArray asFrozenFeatures = modelToFineTune.feedForward(randomData.getFeatures(), false).get("layer1");

    ComputationGraph modelNow =
                    new TransferLearning.GraphBuilder(modelToFineTune).setFeatureExtractor("layer1").build();

    ComputationGraph notFrozen = new ComputationGraph(overallConf.graphBuilder().addInputs("layer0In")
                    .addLayer("layer0", new DenseLayer.Builder().nIn(2).nOut(3).build(), "layer0In")
                    .addLayer("layer1",
                                    new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.MCXENT)
                                                                    .activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                                    .build(),
                                    "layer0")
                    .setOutputs("layer1").build());

    notFrozen.init();
    notFrozen.setParams(Nd4j.hstack(modelToFineTune.getLayer("layer2").params(),
                    modelToFineTune.getLayer("layer3").params()));

    int i = 0;
    while (i < 5) {
        notFrozen.fit(new DataSet(asFrozenFeatures, randomData.getLabels()));
        modelNow.fit(randomData);
        i++;
    }

    assertEquals(Nd4j.hstack(modelToFineTune.getLayer("layer0").params(),
                    modelToFineTune.getLayer("layer1").params(), notFrozen.params()), modelNow.params());
}
 
Example 7
Source File: TestSameDiffLambda.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSameDiffLamdaVertexBasic(){
    for(WorkspaceMode wsm : new WorkspaceMode[]{WorkspaceMode.ENABLED, WorkspaceMode.NONE}) {
        log.info("--- Workspace Mode: {} ---", wsm);

        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .dataType(DataType.DOUBLE)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in1", "in2")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in1")
                .addLayer("1", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in2")
                .addVertex("lambda", new SameDiffSimpleLambdaVertex(), "0", "1")
                .addLayer("2", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "lambda")
                .setOutputs("2")
                .build();

        //Equavalent, not using SameDiff Lambda:
        ComputationGraphConfiguration confStd = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .dataType(DataType.DOUBLE)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in1", "in2")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in1")
                .addLayer("1", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in2")
                .addVertex("elementwise", new ElementWiseVertex(ElementWiseVertex.Op.Product), "0", "1")
                .addLayer("3", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "elementwise")
                .setOutputs("3")
                .build();

        ComputationGraph lambda = new ComputationGraph(conf);
        lambda.init();

        ComputationGraph std = new ComputationGraph(confStd);
        std.init();

        lambda.setParams(std.params());

        INDArray in1 = Nd4j.rand(3, 5);
        INDArray in2 = Nd4j.rand(3, 5);
        INDArray labels = TestUtils.randomOneHot(3, 5);
        MultiDataSet mds = new org.nd4j.linalg.dataset.MultiDataSet(new INDArray[]{in1, in2}, new INDArray[]{labels});

        INDArray outLambda = lambda.output(in1, in2)[0];
        INDArray outStd = std.output(in1, in2)[0];

        assertEquals(outLambda, outStd);

        double scoreLambda = lambda.score(mds);
        double scoreStd = std.score(mds);

        assertEquals(scoreStd, scoreLambda, 1e-6);

        for (int i = 0; i < 3; i++) {
            lambda.fit(mds);
            std.fit(mds);

            String s = String.valueOf(i);
            assertEquals(s, std.params(), lambda.params());
            assertEquals(s, std.getFlattenedGradients(), lambda.getFlattenedGradients());
        }

        ComputationGraph loaded = TestUtils.testModelSerialization(lambda);
        outLambda = loaded.output(in1, in2)[0];
        outStd = std.output(in1, in2)[0];

        assertEquals(outStd, outLambda);

        //Sanity check on different minibatch sizes:
        INDArray newIn1 = Nd4j.vstack(in1, in1);
        INDArray newIn2 = Nd4j.vstack(in2, in2);
        INDArray outMbsd = lambda.output(newIn1, newIn2)[0];
        INDArray outMb = std.output(newIn1, newIn2)[0];
        assertEquals(outMb, outMbsd);
    }
}
 
Example 8
Source File: TestSameDiffLambda.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSameDiffLamdaLayerBasic(){
    for(WorkspaceMode wsm : new WorkspaceMode[]{WorkspaceMode.ENABLED, WorkspaceMode.NONE}) {
        log.info("--- Workspace Mode: {} ---", wsm);


        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in")
                .addLayer("1", new SameDiffSimpleLambdaLayer(), "0")
                .addLayer("2", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "1")
                .setOutputs("2")
                .build();

        //Equavalent, not using SameDiff Lambda:
        ComputationGraphConfiguration confStd = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in")
                .addVertex("1", new ShiftVertex(1.0), "0")
                .addVertex("2", new ScaleVertex(2.0), "1")
                .addLayer("3", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "2")
                .setOutputs("3")
                .build();

        ComputationGraph lambda = new ComputationGraph(conf);
        lambda.init();

        ComputationGraph std = new ComputationGraph(confStd);
        std.init();

        lambda.setParams(std.params());

        INDArray in = Nd4j.rand(3, 5);
        INDArray labels = TestUtils.randomOneHot(3, 5);
        DataSet ds = new DataSet(in, labels);

        INDArray outLambda = lambda.outputSingle(in);
        INDArray outStd = std.outputSingle(in);

        assertEquals(outLambda, outStd);

        double scoreLambda = lambda.score(ds);
        double scoreStd = std.score(ds);

        assertEquals(scoreStd, scoreLambda, 1e-6);

        for (int i = 0; i < 3; i++) {
            lambda.fit(ds);
            std.fit(ds);

            String s = String.valueOf(i);
            assertEquals(s, std.params(), lambda.params());
            assertEquals(s, std.getFlattenedGradients(), lambda.getFlattenedGradients());
        }

        ComputationGraph loaded = TestUtils.testModelSerialization(lambda);
        outLambda = loaded.outputSingle(in);
        outStd = std.outputSingle(in);

        assertEquals(outStd, outLambda);

        //Sanity check on different minibatch sizes:
        INDArray newIn = Nd4j.vstack(in, in);
        INDArray outMbsd = lambda.output(newIn)[0];
        INDArray outMb = std.output(newIn)[0];
        assertEquals(outMb, outMbsd);
    }
}
 
Example 9
Source File: ScoreExamplesWithKeyFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public Iterator<Tuple2<K, Double>> call(Iterator<Tuple2<K, MultiDataSet>> iterator) throws Exception {
    if (!iterator.hasNext()) {
        return Collections.emptyIterator();
    }

    ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson(jsonConfig.getValue()));
    network.init();
    INDArray val = params.value().unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcast set parameters");
    network.setParams(val);

    List<Tuple2<K, Double>> ret = new ArrayList<>();

    List<MultiDataSet> collect = new ArrayList<>(batchSize);
    List<K> collectKey = new ArrayList<>(batchSize);
    int totalCount = 0;
    while (iterator.hasNext()) {
        collect.clear();
        collectKey.clear();
        int nExamples = 0;
        while (iterator.hasNext() && nExamples < batchSize) {
            Tuple2<K, MultiDataSet> t2 = iterator.next();
            MultiDataSet ds = t2._2();
            val n = ds.getFeatures(0).size(0);
            if (n != 1)
                throw new IllegalStateException("Cannot score examples with one key per data set if "
                                + "data set contains more than 1 example (numExamples: " + n + ")");
            collect.add(ds);
            collectKey.add(t2._1());
            nExamples += n;
        }
        totalCount += nExamples;

        MultiDataSet data = org.nd4j.linalg.dataset.MultiDataSet.merge(collect);


        INDArray scores = network.scoreExamples(data, addRegularization);
        double[] doubleScores = scores.data().asDouble();

        for (int i = 0; i < doubleScores.length; i++) {
            ret.add(new Tuple2<>(collectKey.get(i), doubleScores[i]));
        }
    }

    Nd4j.getExecutioner().commit();

    if (log.isDebugEnabled()) {
        log.debug("Scored {} examples ", totalCount);
    }

    return ret.iterator();
}
 
Example 10
Source File: TestLrChanges.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testChangeLrCompGraphSchedule(){
    //First: Set LR for a *single* layer and compare vs. equivalent net config
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
            .activation(Activation.TANH)
            .seed(12345)
            .updater(new Adam(0.1))
            .graphBuilder()
            .addInputs("in")
            .addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
            .addLayer("1", new DenseLayer.Builder().nIn(10).nOut(10).build(), "0")
            .addLayer("2", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
            .setOutputs("2")
            .build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    for( int i=0; i<10; i++ ){
        net.fit(new DataSet(Nd4j.rand(10,10), Nd4j.rand(10,10)));
    }


    ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder()
            .activation(Activation.TANH)
            .seed(12345)
            .updater(new Adam(new ExponentialSchedule(ScheduleType.ITERATION, 0.5, 0.8 )))
            .graphBuilder()
            .addInputs("in")
            .addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
            .addLayer("1", new DenseLayer.Builder().nIn(10).nOut(10).build(), "0")
            .layer("2", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build(), "1")
            .setOutputs("2")
            .build();
    ComputationGraph net2 = new ComputationGraph(conf2);
    net2.init();
    net2.getUpdater().getStateViewArray().assign(net.getUpdater().getStateViewArray());
    conf2.setIterationCount(conf.getIterationCount());
    net2.setParams(net.params().dup());

    net.setLearningRate(new ExponentialSchedule(ScheduleType.ITERATION, 0.5, 0.8 ));  //Set LR for layer 0 to 0.5

    assertEquals(conf, conf2);
    assertEquals(conf.toJson(), conf2.toJson());

    assertEquals(net.getUpdater().getStateViewArray(), net2.getUpdater().getStateViewArray());

    //Perform some parameter updates - check things are actually in sync...
    for( int i=0; i<3; i++ ){
        INDArray in = Nd4j.rand(10, 10);
        INDArray l = Nd4j.rand(10, 10);

        net.fit(new DataSet(in, l));
        net2.fit(new DataSet(in, l));
    }

    assertEquals(net.params(), net2.params());
    assertEquals(net.getUpdater().getStateViewArray(), net2.getUpdater().getStateViewArray());
}
 
Example 11
Source File: TransferLearningCompGraphTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testNoutChanges() {
    DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 2));

    NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1))
                    .activation(Activation.IDENTITY);
    FineTuneConfiguration fineTuneConfiguration = new FineTuneConfiguration.Builder().updater(new Sgd(0.1))
                    .activation(Activation.IDENTITY).build();

    ComputationGraph modelToFineTune = new ComputationGraph(overallConf.graphBuilder().addInputs("layer0In")
                    .addLayer("layer0", new DenseLayer.Builder().nIn(4).nOut(5).build(), "layer0In")
                    .addLayer("layer1", new DenseLayer.Builder().nIn(3).nOut(2).build(), "layer0")
                    .addLayer("layer2", new DenseLayer.Builder().nIn(2).nOut(3).build(), "layer1")
                    .addLayer("layer3",
                                    new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.MCXENT)
                                                                    .activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                                    .build(),
                                    "layer2")
                    .setOutputs("layer3").build());
    modelToFineTune.init();
    ComputationGraph modelNow = new TransferLearning.GraphBuilder(modelToFineTune)
                    .fineTuneConfiguration(fineTuneConfiguration).nOutReplace("layer3", 2, WeightInit.XAVIER)
                    .nOutReplace("layer0", 3, new NormalDistribution(1, 1e-1), WeightInit.XAVIER)
                    //.setOutputs("layer3")
                    .build();

    BaseLayer bl0 = ((BaseLayer) modelNow.getLayer("layer0").conf().getLayer());
    BaseLayer bl1 = ((BaseLayer) modelNow.getLayer("layer1").conf().getLayer());
    BaseLayer bl3 = ((BaseLayer) modelNow.getLayer("layer3").conf().getLayer());
    assertEquals(bl0.getWeightInitFn(), new WeightInitDistribution(new NormalDistribution(1, 1e-1)));
    assertEquals(bl1.getWeightInitFn(), new WeightInitXavier());
    assertEquals(bl1.getWeightInitFn(), new WeightInitXavier());

    ComputationGraph modelExpectedArch = new ComputationGraph(overallConf.graphBuilder().addInputs("layer0In")
                    .addLayer("layer0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "layer0In")
                    .addLayer("layer1", new DenseLayer.Builder().nIn(3).nOut(2).build(), "layer0")
                    .addLayer("layer2", new DenseLayer.Builder().nIn(2).nOut(3).build(), "layer1")
                    .addLayer("layer3",
                                    new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.MCXENT)
                                                                    .activation(Activation.SOFTMAX).nIn(3).nOut(2)
                                                                    .build(),
                                    "layer2")
                    .setOutputs("layer3").build());

    modelExpectedArch.init();

    //modelNow should have the same architecture as modelExpectedArch
    assertArrayEquals(modelExpectedArch.params().shape(), modelNow.params().shape());
    assertArrayEquals(modelExpectedArch.getLayer("layer0").params().shape(),
                    modelNow.getLayer("layer0").params().shape());
    assertArrayEquals(modelExpectedArch.getLayer("layer1").params().shape(),
                    modelNow.getLayer("layer1").params().shape());
    assertArrayEquals(modelExpectedArch.getLayer("layer2").params().shape(),
                    modelNow.getLayer("layer2").params().shape());
    assertArrayEquals(modelExpectedArch.getLayer("layer3").params().shape(),
                    modelNow.getLayer("layer3").params().shape());

    modelNow.setParams(modelExpectedArch.params());
    //fit should give the same results
    modelExpectedArch.fit(randomData);
    modelNow.fit(randomData);
    assertEquals(modelExpectedArch.score(), modelNow.score(), 1e-8);
    assertEquals(modelExpectedArch.params(), modelNow.params());
}
 
Example 12
Source File: TransferLearningCompGraphTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void simpleFineTune() {

    long rng = 12345L;
    DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));
    //original conf
    ComputationGraphConfiguration confToChange = new NeuralNetConfiguration.Builder().seed(rng)
                    .optimizationAlgo(OptimizationAlgorithm.LBFGS).updater(new Nesterovs(0.01, 0.99))
                    .graphBuilder().addInputs("layer0In").setInputTypes(InputType.feedForward(4))
                    .addLayer("layer0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "layer0In")
                    .addLayer("layer1",
                                    new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.MCXENT)
                                                                    .activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                                    .build(),
                                    "layer0")
                    .setOutputs("layer1").build();

    //conf with learning parameters changed
    ComputationGraphConfiguration expectedConf = new NeuralNetConfiguration.Builder().seed(rng)
                    .updater(new RmsProp(0.2))
                    .graphBuilder().addInputs("layer0In")
                    .setInputTypes(InputType.feedForward(4))
                    .addLayer("layer0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "layer0In")
                    .addLayer("layer1",
                                    new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.MCXENT)
                                                                    .activation(Activation.SOFTMAX).nIn(3).nOut(3)
                                                                    .build(),
                                    "layer0")
                    .setOutputs("layer1").build();
    ComputationGraph expectedModel = new ComputationGraph(expectedConf);
    expectedModel.init();

    ComputationGraph modelToFineTune = new ComputationGraph(expectedConf);
    modelToFineTune.init();
    modelToFineTune.setParams(expectedModel.params());
    //model after applying changes with transfer learning
    ComputationGraph modelNow =
                    new TransferLearning.GraphBuilder(modelToFineTune)
                                    .fineTuneConfiguration(new FineTuneConfiguration.Builder().seed(rng)
                                                    .updater(new RmsProp(0.2)).build())
                                    .build();

    //Check json
    assertEquals(expectedConf.toJson(), modelNow.getConfiguration().toJson());

    //Check params after fit
    modelNow.fit(randomData);
    expectedModel.fit(randomData);
    assertEquals(modelNow.score(), expectedModel.score(), 1e-8);
    assertEquals(modelNow.params(), expectedModel.params());
}
 
Example 13
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testEvalSplittingCompGraph(){
        //Test for "tbptt-like" functionality

        for(WorkspaceMode ws : WorkspaceMode.values()) {
            System.out.println("Starting test for workspace mode: " + ws);

            int nIn = 4;
            int layerSize = 5;
            int nOut = 6;
            int tbpttLength = 10;
            int tsLength = 5 * tbpttLength + tbpttLength / 2;

            ComputationGraphConfiguration conf1 = new NeuralNetConfiguration.Builder()
                    .seed(12345)
                    .trainingWorkspaceMode(ws)
                    .inferenceWorkspaceMode(ws)
                    .graphBuilder()
                    .addInputs("in")
                    .addLayer("0", new LSTM.Builder().nIn(nIn).nOut(layerSize).build(), "in")
                    .addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
                            .activation(Activation.SOFTMAX)
                            .build(), "0")
                    .setOutputs("1")
                    .build();

            ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder()
                    .seed(12345)
                    .trainingWorkspaceMode(ws)
                    .inferenceWorkspaceMode(ws)
                    .graphBuilder()
                    .addInputs("in")
                    .addLayer("0", new LSTM.Builder().nIn(nIn).nOut(layerSize).build(), "in")
                    .addLayer("1", new RnnOutputLayer.Builder().nIn(layerSize).nOut(nOut)
                            .activation(Activation.SOFTMAX)
                            .build(), "0")
                    .setOutputs("1")
                    .tBPTTLength(10)
                    .backpropType(BackpropType.TruncatedBPTT)
                    .build();

            ComputationGraph net1 = new ComputationGraph(conf1);
            net1.init();

            ComputationGraph net2 = new ComputationGraph(conf2);
            net2.init();

            net2.setParams(net1.params());

            for (boolean useMask : new boolean[]{false, true}) {

                INDArray in1 = Nd4j.rand(new int[]{3, nIn, tsLength});
                INDArray out1 = TestUtils.randomOneHotTimeSeries(3, nOut, tsLength);

                INDArray in2 = Nd4j.rand(new int[]{5, nIn, tsLength});
                INDArray out2 = TestUtils.randomOneHotTimeSeries(5, nOut, tsLength);

                INDArray lMask1 = null;
                INDArray lMask2 = null;
                if (useMask) {
                    lMask1 = Nd4j.create(3, tsLength);
                    lMask2 = Nd4j.create(5, tsLength);
                    Nd4j.getExecutioner().exec(new BernoulliDistribution(lMask1, 0.5));
                    Nd4j.getExecutioner().exec(new BernoulliDistribution(lMask2, 0.5));
                }

                List<DataSet> l = Arrays.asList(new DataSet(in1, out1), new DataSet(in2, out2));
                DataSetIterator iter = new ExistingDataSetIterator(l);

//                System.out.println("Eval net 1");
                org.nd4j.evaluation.IEvaluation[] e1 = net1.doEvaluation(iter, new org.nd4j.evaluation.classification.Evaluation(), new org.nd4j.evaluation.classification.ROCMultiClass(), new org.nd4j.evaluation.regression.RegressionEvaluation());
//                System.out.println("Eval net 2");
                org.nd4j.evaluation.IEvaluation[] e2 = net2.doEvaluation(iter, new org.nd4j.evaluation.classification.Evaluation(), new org.nd4j.evaluation.classification.ROCMultiClass(), new org.nd4j.evaluation.regression.RegressionEvaluation());

                assertEquals(e1[0], e2[0]);
                assertEquals(e1[1], e2[1]);
                assertEquals(e1[2], e2[2]);
            }
        }
    }
 
Example 14
Source File: GradientCheckTestsComputationGraph.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testBasicStackUnstackDebug() {
        Nd4j.getRandom().setSeed(12345);

        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                        .dataType(DataType.DOUBLE)
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                        .dist(new NormalDistribution(0, 1))
                        .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
                        .addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1")
                        .addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2")
                        .addVertex("stack", new StackVertex(), "d0", "d1")
                        .addVertex("u0", new UnstackVertex(0, 2), "stack")
                        .addVertex("u1", new UnstackVertex(1, 2), "stack")
                        .addLayer("out1",
                                        new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
                                                        .nOut(2).activation(Activation.IDENTITY).build(),
                                        "u0")
                        .addLayer("out2",
                                        new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2)
                                                        .nOut(2).activation(Activation.IDENTITY).build(),
                                        "u1")
                        .setOutputs("out1", "out2").build();

        ComputationGraph graph = new ComputationGraph(conf);
        graph.init();


        Nd4j.getRandom().setSeed(12345);
        long nParams = graph.numParams();
        INDArray newParams = Nd4j.rand(new long[]{1, nParams});
        graph.setParams(newParams);

        int[] mbSizes = new int[] {1, 3, 10};
        for (int minibatch : mbSizes) {

            INDArray in1 = Nd4j.rand(minibatch, 2);
            INDArray in2 = Nd4j.rand(minibatch, 2);

            INDArray labels1 = Nd4j.rand(minibatch, 2);
            INDArray labels2 = Nd4j.rand(minibatch, 2);

            String testName = "testBasicStackUnstack() - minibatch = " + minibatch;

            if (PRINT_RESULTS) {
                System.out.println(testName);
//                for (int j = 0; j < graph.getNumLayers(); j++)
//                    System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
            }

            boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2})
                    .labels(new INDArray[]{labels1, labels2}));

            assertTrue(testName, gradOK);
            TestUtils.testModelSerialization(graph);
        }
    }
 
Example 15
Source File: GradientCheckTestsComputationGraph.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testBasicStackUnstack() {

        int layerSizes = 2;

        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                        .dataType(DataType.DOUBLE)
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                        .dist(new NormalDistribution(0, 1))
                        .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
                        .addInputs("in1", "in2")
                        .addLayer("d0", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1")
                        .addLayer("d1", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2")
                        .addVertex("stack", new StackVertex(), "d0", "d1")
                        .addLayer("d2", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack")
                        .addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2")
                        .addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
                                        .nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "u1")
                        .addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2)
                                        .nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "u2")
                        .setOutputs("out1", "out2").build();

        ComputationGraph graph = new ComputationGraph(conf);
        graph.init();


        Nd4j.getRandom().setSeed(12345);
        long nParams = graph.numParams();
        INDArray newParams = Nd4j.rand(new long[]{1, nParams});
        graph.setParams(newParams);

        int[] mbSizes = new int[] {1, 3, 10};
        for (int minibatch : mbSizes) {

            INDArray in1 = Nd4j.rand(minibatch, layerSizes);
            INDArray in2 = Nd4j.rand(minibatch, layerSizes);

            INDArray labels1 = Nd4j.rand(minibatch, 2);
            INDArray labels2 = Nd4j.rand(minibatch, 2);

            String testName = "testBasicStackUnstack() - minibatch = " + minibatch;

            if (PRINT_RESULTS) {
                System.out.println(testName);
//                for (int j = 0; j < graph.getNumLayers(); j++)
//                    System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
            }

            boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2})
                    .labels(new INDArray[]{labels1, labels2}));

            assertTrue(testName, gradOK);
            TestUtils.testModelSerialization(graph);
        }
    }
 
Example 16
Source File: GradientCheckTestsComputationGraph.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testBasicL2() {
        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                        .dataType(DataType.DOUBLE)
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                        .dist(new NormalDistribution(0, 1))
                        .activation(Activation.TANH).updater(new NoOp()).graphBuilder()
                        .addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1")
                        .addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2")
                        .addVertex("l2", new L2Vertex(), "d0", "d1")
                        .addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(1)
                                        .nOut(1).activation(Activation.IDENTITY).build(), "l2")
                        .setOutputs("out").build();

        ComputationGraph graph = new ComputationGraph(conf);
        graph.init();


        Nd4j.getRandom().setSeed(12345);
        long nParams = graph.numParams();
        INDArray newParams = Nd4j.rand(new long[]{1, nParams});
        graph.setParams(newParams);

        int[] mbSizes = new int[] {1, 3, 10};
        for (int minibatch : mbSizes) {

            INDArray in1 = Nd4j.rand(DataType.DOUBLE, minibatch, 2);
            INDArray in2 = Nd4j.rand(DataType.DOUBLE, minibatch, 2);

            INDArray labels = Nd4j.rand(DataType.DOUBLE, minibatch, 1);

            String testName = "testBasicL2() - minibatch = " + minibatch;

            if (PRINT_RESULTS) {
                System.out.println(testName);
//                for (int j = 0; j < graph.getNumLayers(); j++)
//                    System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
            }

            boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2})
                    .labels(new INDArray[]{labels}));

            assertTrue(testName, gradOK);
            TestUtils.testModelSerialization(graph);
        }
    }
 
Example 17
Source File: ScoreExamplesFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public Iterator<Double> call(Iterator<MultiDataSet> iterator) throws Exception {
    if (!iterator.hasNext()) {
        return Collections.emptyIterator();
    }

    ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson(jsonConfig.getValue()));
    network.init();
    INDArray val = params.value().unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcast set parameters");
    network.setParams(val);

    List<Double> ret = new ArrayList<>();

    List<MultiDataSet> collect = new ArrayList<>(batchSize);
    int totalCount = 0;
    while (iterator.hasNext()) {
        collect.clear();
        int nExamples = 0;
        while (iterator.hasNext() && nExamples < batchSize) {
            MultiDataSet ds = iterator.next();
            val n = ds.getFeatures(0).size(0);
            collect.add(ds);
            nExamples += n;
        }
        totalCount += nExamples;


        MultiDataSet data = org.nd4j.linalg.dataset.MultiDataSet.merge(collect);


        INDArray scores = network.scoreExamples(data, addRegularization);
        double[] doubleScores = scores.data().asDouble();

        for (double doubleScore : doubleScores) {
            ret.add(doubleScore);
        }
    }

    Nd4j.getExecutioner().commit();

    if (log.isDebugEnabled()) {
        log.debug("Scored {} examples ", totalCount);
    }

    return ret.iterator();
}
 
Example 18
Source File: GradientCheckTestsComputationGraph.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testBasicIrisWithElementWiseNode() {

        ElementWiseVertex.Op[] ops = new ElementWiseVertex.Op[] {ElementWiseVertex.Op.Add,
                        ElementWiseVertex.Op.Subtract, ElementWiseVertex.Op.Product, ElementWiseVertex.Op.Average, ElementWiseVertex.Op.Max};

        for (ElementWiseVertex.Op op : ops) {

            Nd4j.getRandom().setSeed(12345);
            ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                            .dataType(DataType.DOUBLE)
                            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                            .dist(new NormalDistribution(0, 1))
                            .updater(new NoOp()).graphBuilder().addInputs("input")
                            .addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
                                            "input")
                            .addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.SIGMOID)
                                            .build(), "input")
                            .addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2")
                            .addLayer("outputLayer",
                                            new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                                                            .activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
                                            "elementwise")
                            .setOutputs("outputLayer").build();

            ComputationGraph graph = new ComputationGraph(conf);
            graph.init();

            int numParams = (4 * 5 + 5) + (4 * 5 + 5) + (5 * 3 + 3);
            assertEquals(numParams, graph.numParams());

            Nd4j.getRandom().setSeed(12345);
            long nParams = graph.numParams();
            INDArray newParams = Nd4j.rand(new long[]{1, nParams});
            graph.setParams(newParams);

            DataSet ds = new IrisDataSetIterator(150, 150).next();
            INDArray min = ds.getFeatures().min(0);
            INDArray max = ds.getFeatures().max(0);
            ds.getFeatures().subiRowVector(min).diviRowVector(max.sub(min));
            INDArray input = ds.getFeatures();
            INDArray labels = ds.getLabels();

            if (PRINT_RESULTS) {
                System.out.println("testBasicIrisWithElementWiseVertex(op=" + op + ")");
//                for (int j = 0; j < graph.getNumLayers(); j++)
//                    System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
            }

            boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input})
                    .labels(new INDArray[]{labels}));

            String msg = "testBasicIrisWithElementWiseVertex(op=" + op + ")";
            assertTrue(msg, gradOK);
            TestUtils.testModelSerialization(graph);
        }
    }
 
Example 19
Source File: GradientCheckTestsComputationGraph.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testBasicIrisWithMerging() {
        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                        .dataType(DataType.DOUBLE)
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                        .dist(new NormalDistribution(0, 1)).updater(new NoOp())
                        .graphBuilder().addInputs("input")
                        .addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
                                        "input")
                        .addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
                                        "input")
                        .addVertex("merge", new MergeVertex(), "l1", "l2")
                        .addLayer("outputLayer",
                                        new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                                                        .activation(Activation.SOFTMAX).nIn(5 + 5).nOut(3).build(),
                                        "merge")
                        .setOutputs("outputLayer").build();

        ComputationGraph graph = new ComputationGraph(conf);
        graph.init();

        int numParams = (4 * 5 + 5) + (4 * 5 + 5) + (10 * 3 + 3);
        assertEquals(numParams, graph.numParams());

        Nd4j.getRandom().setSeed(12345);
        long nParams = graph.numParams();
        INDArray newParams = Nd4j.rand(new long[]{1, nParams});
        graph.setParams(newParams);

        DataSet ds = new IrisDataSetIterator(150, 150).next();
        INDArray min = ds.getFeatures().min(0);
        INDArray max = ds.getFeatures().max(0);
        ds.getFeatures().subiRowVector(min).diviRowVector(max.sub(min));
        INDArray input = ds.getFeatures();
        INDArray labels = ds.getLabels();

        if (PRINT_RESULTS) {
            System.out.println("testBasicIrisWithMerging()");
//            for (int j = 0; j < graph.getNumLayers(); j++)
//                System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
        }

        boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input})
                .labels(new INDArray[]{labels}));

        String msg = "testBasicIrisWithMerging()";
        assertTrue(msg, gradOK);
        TestUtils.testModelSerialization(graph);
    }
 
Example 20
Source File: GradientCheckTestsComputationGraph.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testBasicIris() {
        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                        .dataType(DataType.DOUBLE)
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                        .dist(new NormalDistribution(0, 1)).updater(new NoOp())
                        .graphBuilder().addInputs("input")
                        .addLayer("firstLayer",
                                        new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(),
                                        "input")
                        .addLayer("outputLayer",
                                        new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                                                        .activation(Activation.SOFTMAX).nIn(5).nOut(3).build(),
                                        "firstLayer")
                        .setOutputs("outputLayer").build();

        ComputationGraph graph = new ComputationGraph(conf);
        graph.init();

        Nd4j.getRandom().setSeed(12345);
        long nParams = graph.numParams();
        INDArray newParams = Nd4j.rand(new long[]{1, nParams});
        graph.setParams(newParams);

        DataSet ds = new IrisDataSetIterator(150, 150).next();
        INDArray min = ds.getFeatures().min(0);
        INDArray max = ds.getFeatures().max(0);
        ds.getFeatures().subiRowVector(min).diviRowVector(max.sub(min));
        INDArray input = ds.getFeatures();
        INDArray labels = ds.getLabels();

        if (PRINT_RESULTS) {
            System.out.println("testBasicIris()");
//            for (int j = 0; j < graph.getNumLayers(); j++)
//                System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
        }

        boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input})
                .labels(new INDArray[]{labels}));

        String msg = "testBasicIris()";
        assertTrue(msg, gradOK);
        TestUtils.testModelSerialization(graph);
    }