Java Code Examples for org.deeplearning4j.nn.conf.ComputationGraphConfiguration#toJson()

The following examples show how to use org.deeplearning4j.nn.conf.ComputationGraphConfiguration#toJson() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestCustomLayers.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testJsonComputationGraph() {
        //ComputationGraph with a custom layer; check JSON and YAML config actually works...

        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder()
                        .addInputs("in").addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
                        .addLayer("1", new CustomLayer(3.14159), "0").addLayer("2",
                                        new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX)
                                                .nIn(10).nOut(10).build(),
                                        "1")
                        .setOutputs("2").build();

        String json = conf.toJson();
        String yaml = conf.toYaml();

//        System.out.println(json);

        ComputationGraphConfiguration confFromJson = ComputationGraphConfiguration.fromJson(json);
        assertEquals(conf, confFromJson);

        ComputationGraphConfiguration confFromYaml = ComputationGraphConfiguration.fromYaml(yaml);
        assertEquals(conf, confFromYaml);
    }
 
Example 2
Source File: TestGraphNodes.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testJSON() {
    //The config here is non-sense, but that doesn't matter for config -> json -> config test
    ComputationGraphConfiguration conf =
                    new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in")
                                    .addVertex("v1", new ElementWiseVertex(ElementWiseVertex.Op.Add), "in")
                                    .addVertex("v2", new org.deeplearning4j.nn.conf.graph.MergeVertex(), "in", "in")
                                    .addVertex("v3", new PreprocessorVertex(
                                                    new CnnToFeedForwardPreProcessor(1, 2, 1)), "in")
                                    .addVertex("v4", new org.deeplearning4j.nn.conf.graph.SubsetVertex(0, 1), "in")
                                    .addVertex("v5", new DuplicateToTimeSeriesVertex("in"), "in")
                                    .addVertex("v6", new LastTimeStepVertex("in"), "in")
                                    .addVertex("v7", new org.deeplearning4j.nn.conf.graph.StackVertex(), "in")
                                    .addVertex("v8", new org.deeplearning4j.nn.conf.graph.UnstackVertex(0, 1), "in")
                                    .addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "in")
                                    .setOutputs("out", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8").build();

    String json = conf.toJson();
    ComputationGraphConfiguration conf2 = ComputationGraphConfiguration.fromJson(json);
    assertEquals(conf, conf2);
}
 
Example 3
Source File: TestGraphNodes.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDuplicateToTimeSeriesVertex() {

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder()
                    .addInputs("in2d", "in3d")
                    .addVertex("duplicateTS", new DuplicateToTimeSeriesVertex("in3d"), "in2d")
                    .addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "duplicateTS")
                    .addLayer("out3d", new RnnOutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "in3d")
                    .setOutputs("out", "out3d").build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();

    INDArray in2d = Nd4j.rand(3, 5);
    INDArray in3d = Nd4j.rand(new int[] {3, 2, 7});

    graph.setInputs(in2d, in3d);

    INDArray expOut = Nd4j.zeros(3, 5, 7);
    for (int i = 0; i < 7; i++) {
        expOut.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(i)}, in2d);
    }

    GraphVertex gv = graph.getVertex("duplicateTS");
    gv.setInputs(in2d);
    INDArray outFwd = gv.doForward(true, LayerWorkspaceMgr.noWorkspaces());
    assertEquals(expOut, outFwd);

    INDArray expOutBackward = expOut.sum(2);
    gv.setEpsilon(expOut);
    INDArray outBwd = gv.doBackward(false, LayerWorkspaceMgr.noWorkspaces()).getSecond()[0];
    assertEquals(expOutBackward, outBwd);

    String json = conf.toJson();
    ComputationGraphConfiguration conf2 = ComputationGraphConfiguration.fromJson(json);
    assertEquals(conf, conf2);
}
 
Example 4
Source File: FrozenLayerWithBackpropTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testFrozenLayerInstantiationCompGraph() {

    //We need to be able to instantitate frozen layers from JSON etc, and have them be the same as if
    // they were initialized via the builder
    ComputationGraphConfiguration conf1 = new NeuralNetConfiguration.Builder().seed(12345).graphBuilder()
            .addInputs("in")
            .addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER).build(), "in")
            .addLayer("1", new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER).build(), "0")
            .addLayer("2", new OutputLayer.Builder(
                            LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10)
                            .nOut(10).build(),
                    "1")
            .setOutputs("2").build();

    ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).graphBuilder()
            .addInputs("in")
            .addLayer("0", new org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop(
                    new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                            .weightInit(WeightInit.XAVIER).build()), "in")
            .addLayer("1", new org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop(
                    new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                            .weightInit(WeightInit.XAVIER).build()), "0")
            .addLayer("2", new OutputLayer.Builder(
                            LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10)
                            .nOut(10).build(),
                    "1")
            .setOutputs("2").build();

    ComputationGraph net1 = new ComputationGraph(conf1);
    net1.init();
    ComputationGraph net2 = new ComputationGraph(conf2);
    net2.init();

    assertEquals(net1.params(), net2.params());


    String json = conf2.toJson();
    ComputationGraphConfiguration fromJson = ComputationGraphConfiguration.fromJson(json);

    assertEquals(conf2, fromJson);

    ComputationGraph net3 = new ComputationGraph(fromJson);
    net3.init();

    INDArray input = Nd4j.rand(10, 10);

    INDArray out2 = net2.outputSingle(input);
    INDArray out3 = net3.outputSingle(input);

    assertEquals(out2, out3);
}
 
Example 5
Source File: TestCustomLayers.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCustomOutputLayerCG() {
        //Create a ComputationGraphConfiguration with custom output layer, and check JSON and YAML config actually works...
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                        .graphBuilder().addInputs("in")
                        .addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in").addLayer("1",
                                        new CustomOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10)
                                                        .nOut(10).activation(Activation.SOFTMAX).build(),
                                        "0")
                        .setOutputs("1").build();

        String json = conf.toJson();
        String yaml = conf.toYaml();

//        System.out.println(json);

        ComputationGraphConfiguration confFromJson = ComputationGraphConfiguration.fromJson(json);
        assertEquals(conf, confFromJson);

        ComputationGraphConfiguration confFromYaml = ComputationGraphConfiguration.fromYaml(yaml);
        assertEquals(conf, confFromYaml);

        //Third: check initialization
        Nd4j.getRandom().setSeed(12345);
        ComputationGraph net = new ComputationGraph(conf);
        net.init();

        assertTrue(net.getLayer(1) instanceof CustomOutputLayerImpl);

        //Fourth: compare to an equivalent standard output layer (should be identical)
        ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345)
                        .graphBuilder().addInputs("in")
                        .addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in").addLayer("1",
                                        new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10)
                                                .activation(Activation.SOFTMAX).build(),
                                        "0")
                        .setOutputs("1").build();
        Nd4j.getRandom().setSeed(12345);
        ComputationGraph net2 = new ComputationGraph(conf2);
        net2.init();

        assertEquals(net2.params(), net.params());

        INDArray testFeatures = Nd4j.rand(1, 10);
        INDArray testLabels = Nd4j.zeros(1, 10);
        testLabels.putScalar(0, 3, 1.0);
        DataSet ds = new DataSet(testFeatures, testLabels);

        assertEquals(net2.output(testFeatures)[0], net.output(testFeatures)[0]);
        assertEquals(net2.score(ds), net.score(ds), 1e-6);
    }
 
Example 6
Source File: FrozenLayerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testFrozenLayerInstantiationCompGraph() {

    //We need to be able to instantitate frozen layers from JSON etc, and have them be the same as if
    // they were initialized via the builder
    ComputationGraphConfiguration conf1 = new NeuralNetConfiguration.Builder().seed(12345).graphBuilder()
                    .addInputs("in")
                    .addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                                    .weightInit(WeightInit.XAVIER).build(), "in")
                    .addLayer("1", new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                                    .weightInit(WeightInit.XAVIER).build(), "0")
                    .addLayer("2", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10)
                                                    .nOut(10).build(),
                                    "1")
                    .setOutputs("2").build();

    ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).graphBuilder()
                    .addInputs("in")
                    .addLayer("0", new org.deeplearning4j.nn.conf.layers.misc.FrozenLayer.Builder()
                                    .layer(new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                                                    .weightInit(WeightInit.XAVIER).build())
                                    .build(), "in")
                    .addLayer("1", new org.deeplearning4j.nn.conf.layers.misc.FrozenLayer.Builder()
                                    .layer(new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                                                    .weightInit(WeightInit.XAVIER).build())
                                    .build(), "0")
                    .addLayer("2", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10)
                                                    .nOut(10).build(),
                                    "1")
                    .setOutputs("2").build();

    ComputationGraph net1 = new ComputationGraph(conf1);
    net1.init();
    ComputationGraph net2 = new ComputationGraph(conf2);
    net2.init();

    assertEquals(net1.params(), net2.params());


    String json = conf2.toJson();
    ComputationGraphConfiguration fromJson = ComputationGraphConfiguration.fromJson(json);

    assertEquals(conf2, fromJson);

    ComputationGraph net3 = new ComputationGraph(fromJson);
    net3.init();

    INDArray input = Nd4j.rand(10, 10);

    INDArray out2 = net2.outputSingle(input);
    INDArray out3 = net3.outputSingle(input);

    assertEquals(out2, out3);
}
 
Example 7
Source File: TestGraphNodes.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLastTimeStepVertex() {

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in")
                    .addVertex("lastTS", new LastTimeStepVertex("in"), "in")
                    .addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "lastTS").setOutputs("out")
                    .build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();

    //First: test without input mask array
    Nd4j.getRandom().setSeed(12345);
    INDArray in = Nd4j.rand(new int[] {3, 5, 6});
    INDArray expOut = in.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(5));

    GraphVertex gv = graph.getVertex("lastTS");
    gv.setInputs(in);
    //Forward pass:
    INDArray outFwd = gv.doForward(true, LayerWorkspaceMgr.noWorkspaces());
    assertEquals(expOut, outFwd);
    //Backward pass:
    gv.setEpsilon(expOut);
    Pair<Gradient, INDArray[]> pair = gv.doBackward(false, LayerWorkspaceMgr.noWorkspaces());
    INDArray eps = pair.getSecond()[0];
    assertArrayEquals(in.shape(), eps.shape());
    assertEquals(Nd4j.zeros(3, 5, 5),
                    eps.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.interval(0, 4, true)));
    assertEquals(expOut, eps.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(5)));

    //Second: test with input mask array
    INDArray inMask = Nd4j.zeros(3, 6);
    inMask.putRow(0, Nd4j.create(new double[] {1, 1, 1, 0, 0, 0}));
    inMask.putRow(1, Nd4j.create(new double[] {1, 1, 1, 1, 0, 0}));
    inMask.putRow(2, Nd4j.create(new double[] {1, 1, 1, 1, 1, 0}));
    graph.setLayerMaskArrays(new INDArray[] {inMask}, null);

    expOut = Nd4j.zeros(3, 5);
    expOut.putRow(0, in.get(NDArrayIndex.point(0), NDArrayIndex.all(), NDArrayIndex.point(2)));
    expOut.putRow(1, in.get(NDArrayIndex.point(1), NDArrayIndex.all(), NDArrayIndex.point(3)));
    expOut.putRow(2, in.get(NDArrayIndex.point(2), NDArrayIndex.all(), NDArrayIndex.point(4)));

    gv.setInputs(in);
    outFwd = gv.doForward(true, LayerWorkspaceMgr.noWorkspaces());
    assertEquals(expOut, outFwd);

    String json = conf.toJson();
    ComputationGraphConfiguration conf2 = ComputationGraphConfiguration.fromJson(json);
    assertEquals(conf, conf2);
}