Java Code Examples for org.deeplearning4j.nn.conf.MultiLayerConfiguration#toJson()

The following examples show how to use org.deeplearning4j.nn.conf.MultiLayerConfiguration#toJson() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestCustomActivation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testCustomActivationFn() {
        //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1)).list()
                        .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).activation(new CustomActivation()).build())
                        .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
                        .build();

        String json = conf.toJson();
        String yaml = conf.toYaml();

//        System.out.println(json);

        MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
        assertEquals(conf, confFromJson);

        MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
        assertEquals(conf, confFromYaml);

    }
 
Example 2
Source File: CustomPreprocessorTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testCustomPreprocessor() {
        //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
        MultiLayerConfiguration conf =
                        new NeuralNetConfiguration.Builder().list()
                                        .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                        .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10)
                                                .activation(Activation.SOFTMAX).nOut(10).build())
                                        .inputPreProcessor(0, new MyCustomPreprocessor())
                                        .build();

        String json = conf.toJson();
        String yaml = conf.toYaml();

//        System.out.println(json);

        MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
        assertEquals(conf, confFromJson);

        MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
        assertEquals(conf, confFromYaml);

        assertTrue(confFromJson.getInputPreProcess(0) instanceof MyCustomPreprocessor);

    }
 
Example 3
Source File: RandomTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testRngInitMLN() {
    Nd4j.getRandom().setSeed(12345);

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER).list()
                    .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                    .layer(1, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(2,
                                    new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                                    .activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
                    .build();

    String json = conf.toJson();

    MultiLayerNetwork net1 = new MultiLayerNetwork(conf);
    net1.init();

    MultiLayerNetwork net2 = new MultiLayerNetwork(conf);
    net2.init();

    assertEquals(net1.params(), net2.params());

    MultiLayerConfiguration fromJson = MultiLayerConfiguration.fromJson(json);

    Nd4j.getRandom().setSeed(987654321);
    MultiLayerNetwork net3 = new MultiLayerNetwork(fromJson);
    net3.init();

    assertEquals(net1.params(), net3.params());
}
 
Example 4
Source File: TestDropout.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSpatialDropoutJSON(){

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .list()
            .layer(new DropoutLayer.Builder(new SpatialDropout(0.5)).build())
            .build();

    String asJson = conf.toJson();
    MultiLayerConfiguration fromJson = MultiLayerConfiguration.fromJson(asJson);

    assertEquals(conf, fromJson);
}
 
Example 5
Source File: TestVAE.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testJsonYaml() {

    MultiLayerConfiguration config = new NeuralNetConfiguration.Builder().seed(12345).list()
                    .layer(0, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .reconstructionDistribution(new GaussianReconstructionDistribution(Activation.IDENTITY))
                                    .nIn(3).nOut(4).encoderLayerSizes(5).decoderLayerSizes(6).build())
                    .layer(1, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .reconstructionDistribution(new GaussianReconstructionDistribution(Activation.TANH))
                                    .nIn(7).nOut(8).encoderLayerSizes(9).decoderLayerSizes(10).build())
                    .layer(2, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .reconstructionDistribution(new BernoulliReconstructionDistribution()).nIn(11)
                                    .nOut(12).encoderLayerSizes(13).decoderLayerSizes(14).build())
                    .layer(3, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .reconstructionDistribution(new ExponentialReconstructionDistribution(Activation.TANH))
                                    .nIn(11).nOut(12).encoderLayerSizes(13).decoderLayerSizes(14).build())
                    .layer(4, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .lossFunction(new ActivationTanH(), LossFunctions.LossFunction.MSE).nIn(11)
                                    .nOut(12).encoderLayerSizes(13).decoderLayerSizes(14).build())
                    .layer(5, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .reconstructionDistribution(new CompositeReconstructionDistribution.Builder()
                                                    .addDistribution(5, new GaussianReconstructionDistribution())
                                                    .addDistribution(5,
                                                                    new GaussianReconstructionDistribution(Activation.TANH))
                                                    .addDistribution(5, new BernoulliReconstructionDistribution())
                                                    .build())
                                    .nIn(15).nOut(16).encoderLayerSizes(17).decoderLayerSizes(18).build())
                    .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(18)
                                    .nOut(19).activation(new ActivationTanH()).build())
                    .build();

    String asJson = config.toJson();
    String asYaml = config.toYaml();

    MultiLayerConfiguration fromJson = MultiLayerConfiguration.fromJson(asJson);
    MultiLayerConfiguration fromYaml = MultiLayerConfiguration.fromYaml(asYaml);

    assertEquals(config, fromJson);
    assertEquals(config, fromYaml);
}
 
Example 6
Source File: TestCustomLayers.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testJsonMultiLayerNetwork() {
        MultiLayerConfiguration conf =
                        new NeuralNetConfiguration.Builder().list()
                                        .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                        .layer(1, new CustomLayer(3.14159)).layer(2,
                                                        new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                                                .activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
                                        .build();

        String json = conf.toJson();
        String yaml = conf.toYaml();

//        System.out.println(json);

        MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
        assertEquals(conf, confFromJson);

        MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
        assertEquals(conf, confFromYaml);
    }
 
Example 7
Source File: GravesBidirectionalLSTMTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSerialization() {

    final MultiLayerConfiguration conf1 = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new AdaGrad(0.1))
                    .l2(0.001)
                    .seed(12345).list()
                    .layer(0, new org.deeplearning4j.nn.conf.layers.GravesBidirectionalLSTM.Builder()
                                    .activation(Activation.TANH).nIn(2).nOut(2)
                                    .dist(new UniformDistribution(-0.05, 0.05)).build())
                    .layer(1, new org.deeplearning4j.nn.conf.layers.GravesBidirectionalLSTM.Builder()
                                    .activation(Activation.TANH).nIn(2).nOut(2)
                                    .dist(new UniformDistribution(-0.05, 0.05)).build())
                    .layer(2, new org.deeplearning4j.nn.conf.layers.RnnOutputLayer.Builder()
                                    .activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT)
                                    .nIn(2).nOut(2).build())
                    .build();


    final String json1 = conf1.toJson();

    final MultiLayerConfiguration conf2 = MultiLayerConfiguration.fromJson(json1);

    final String json2 = conf1.toJson();


    TestCase.assertEquals(json1, json2);
}
 
Example 8
Source File: MiscRegressionTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testFrozenNewFormat(){
    MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
            .list()
            .layer(0, new FrozenLayer(new DenseLayer.Builder().nIn(10).nOut(10).build()))
            .build();

    String json = configuration.toJson();
    MultiLayerConfiguration fromJson = MultiLayerConfiguration.fromJson(json);
    assertEquals(configuration, fromJson);
}
 
Example 9
Source File: CNN1DGradientCheckTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCnn1DWithCropping1D() {
        Nd4j.getRandom().setSeed(1337);

        int[] minibatchSizes = {1, 3};
        int length = 7;
        int convNIn = 2;
        int convNOut1 = 3;
        int convNOut2 = 4;
        int finalNOut = 4;


        int[] kernels = {1, 2, 4};
        int stride = 1;

        int padding = 0;
        int cropping = 1;
        int croppedLength = length - 2 * cropping;

        Activation[] activations = {Activation.SIGMOID};
        SubsamplingLayer.PoolingType[] poolingTypes =
                new SubsamplingLayer.PoolingType[]{SubsamplingLayer.PoolingType.MAX,
                        SubsamplingLayer.PoolingType.AVG, SubsamplingLayer.PoolingType.PNORM};

        for (Activation afn : activations) {
            for (SubsamplingLayer.PoolingType poolingType : poolingTypes) {
                for (int minibatchSize : minibatchSizes) {
                    for (int kernel : kernels) {
                        INDArray input = Nd4j.rand(new int[]{minibatchSize, convNIn, length});
                        INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, croppedLength);
                        for (int i = 0; i < minibatchSize; i++) {
                            for (int j = 0; j < croppedLength; j++) {
                                labels.putScalar(new int[]{i, i % finalNOut, j}, 1.0);
                            }
                        }

                        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                                .dataType(DataType.DOUBLE)
                                .updater(new NoOp())
                                .dist(new NormalDistribution(0, 1)).convolutionMode(ConvolutionMode.Same).list()
                                .layer(new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
                                        .stride(stride).padding(padding).nIn(convNIn).nOut(convNOut1)
                                        .build())
                                .layer(new Cropping1D.Builder(cropping).build())
                                .layer(new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
                                        .stride(stride).padding(padding).nIn(convNOut1).nOut(convNOut2)
                                        .build())
                                .layer(new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                        .activation(Activation.SOFTMAX).nOut(finalNOut).build())
                                .setInputType(InputType.recurrent(convNIn, length)).build();

                        String json = conf.toJson();
                        MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
                        assertEquals(conf, c2);

                        MultiLayerNetwork net = new MultiLayerNetwork(conf);
                        net.init();

                        String msg = "PoolingType=" + poolingType + ", minibatch=" + minibatchSize + ", activationFn="
                                + afn + ", kernel = " + kernel;

                        if (PRINT_RESULTS) {
                            System.out.println(msg);
//                            for (int j = 0; j < net.getnLayers(); j++)
//                                System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams());
                        }

                        boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
                                DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);

                        assertTrue(msg, gradOK);

                        TestUtils.testModelSerialization(net);
                    }
                }
            }
        }
    }
 
Example 10
Source File: CNN1DGradientCheckTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCnn1DWithSubsampling1D() {
        Nd4j.getRandom().setSeed(12345);

        int[] minibatchSizes = {1, 3};
        int length = 7;
        int convNIn = 2;
        int convNOut1 = 3;
        int convNOut2 = 4;
        int finalNOut = 4;

        int[] kernels = {1, 2, 4};
        int stride = 1;
        int padding = 0;
        int pnorm = 2;

        Activation[] activations = {Activation.SIGMOID, Activation.TANH};
        SubsamplingLayer.PoolingType[] poolingTypes =
                new SubsamplingLayer.PoolingType[]{SubsamplingLayer.PoolingType.MAX,
                        SubsamplingLayer.PoolingType.AVG, SubsamplingLayer.PoolingType.PNORM};

        for (Activation afn : activations) {
            for (SubsamplingLayer.PoolingType poolingType : poolingTypes) {
                for (int minibatchSize : minibatchSizes) {
                    for (int kernel : kernels) {
                        INDArray input = Nd4j.rand(new int[]{minibatchSize, convNIn, length});
                        INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, length);
                        for (int i = 0; i < minibatchSize; i++) {
                            for (int j = 0; j < length; j++) {
                                labels.putScalar(new int[]{i, i % finalNOut, j}, 1.0);
                            }
                        }

                        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                                .dataType(DataType.DOUBLE)
                                .updater(new NoOp())
                                .dist(new NormalDistribution(0, 1)).convolutionMode(ConvolutionMode.Same).list()
                                .layer(0, new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
                                        .stride(stride).padding(padding).nIn(convNIn).nOut(convNOut1)
                                        .build())
                                .layer(1, new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
                                        .stride(stride).padding(padding).nIn(convNOut1).nOut(convNOut2)
                                        .build())
                                .layer(2, new Subsampling1DLayer.Builder(poolingType).kernelSize(kernel)
                                        .stride(stride).padding(padding).pnorm(pnorm).build())
                                .layer(3, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                        .activation(Activation.SOFTMAX).nOut(finalNOut).build())
                                .setInputType(InputType.recurrent(convNIn, length)).build();

                        String json = conf.toJson();
                        MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
                        assertEquals(conf, c2);

                        MultiLayerNetwork net = new MultiLayerNetwork(conf);
                        net.init();

                        String msg = "PoolingType=" + poolingType + ", minibatch=" + minibatchSize + ", activationFn="
                                + afn + ", kernel = " + kernel;

                        if (PRINT_RESULTS) {
                            System.out.println(msg);
//                            for (int j = 0; j < net.getnLayers(); j++)
//                                System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams());
                        }

                        boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
                                DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);

                        assertTrue(msg, gradOK);
                        TestUtils.testModelSerialization(net);
                    }
                }
            }
        }
    }
 
Example 11
Source File: CNN1DGradientCheckTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCnn1DWithLocallyConnected1D() {
        Nd4j.getRandom().setSeed(1337);

        int[] minibatchSizes = {2, 3};
        int length = 7;
        int convNIn = 2;
        int convNOut1 = 3;
        int convNOut2 = 4;
        int finalNOut = 4;

        int[] kernels = {1};
        int stride = 1;
        int padding = 0;

        Activation[] activations = {Activation.SIGMOID};

        for (Activation afn : activations) {
            for (int minibatchSize : minibatchSizes) {
                for (int kernel : kernels) {
                    INDArray input = Nd4j.rand(new int[]{minibatchSize, convNIn, length});
                    INDArray labels = Nd4j.zeros(minibatchSize, finalNOut, length);
                    for (int i = 0; i < minibatchSize; i++) {
                        for (int j = 0; j < length; j++) {
                            labels.putScalar(new int[]{i, i % finalNOut, j}, 1.0);
                        }
                    }

                    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                            .dataType(DataType.DOUBLE)
                            .updater(new NoOp())
                            .dist(new NormalDistribution(0, 1)).convolutionMode(ConvolutionMode.Same).list()
                            .layer(new Convolution1DLayer.Builder().activation(afn).kernelSize(kernel)
                                    .stride(stride).padding(padding).nIn(convNIn).nOut(convNOut1)
                                    .build())
                            .layer(new LocallyConnected1D.Builder().activation(afn).kernelSize(kernel)
                                    .stride(stride).padding(padding).nIn(convNOut1).nOut(convNOut2).hasBias(false)
                                    .build())
                            .layer(new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                    .activation(Activation.SOFTMAX).nOut(finalNOut).build())
                            .setInputType(InputType.recurrent(convNIn, length)).build();

                    String json = conf.toJson();
                    MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
                    assertEquals(conf, c2);

                    MultiLayerNetwork net = new MultiLayerNetwork(conf);
                    net.init();

                    String msg = "Minibatch=" + minibatchSize + ", activationFn="
                            + afn + ", kernel = " + kernel;

                    if (PRINT_RESULTS) {
                        System.out.println(msg);
//                        for (int j = 0; j < net.getnLayers(); j++)
//                            System.out.println("Layer " + j + " # params: " + net.getLayer(j).numParams());
                    }

                    boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
                            DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);

                    assertTrue(msg, gradOK);

                    TestUtils.testModelSerialization(net);
                }

            }
        }
    }
 
Example 12
Source File: TestCustomUpdater.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testCustomUpdater() {

    //Create a simple custom updater, equivalent to SGD updater

    double lr = 0.03;

    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration conf1 = new NeuralNetConfiguration.Builder().seed(12345)
                    .activation(Activation.TANH).updater(new CustomIUpdater(lr)) //Specify custom IUpdater
                    .list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                    .layer(1, new OutputLayer.Builder().nIn(10).nOut(10)
                                    .lossFunction(LossFunctions.LossFunction.MSE).build())
                    .build();

    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345)
                    .activation(Activation.TANH).updater(new Sgd(lr)).list()
                    .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder()
                                    .nIn(10).nOut(10).lossFunction(LossFunctions.LossFunction.MSE).build())
                    .build();

    //First: Check updater config
    assertTrue(((BaseLayer) conf1.getConf(0).getLayer()).getIUpdater() instanceof CustomIUpdater);
    assertTrue(((BaseLayer) conf1.getConf(1).getLayer()).getIUpdater() instanceof CustomIUpdater);
    assertTrue(((BaseLayer) conf2.getConf(0).getLayer()).getIUpdater() instanceof Sgd);
    assertTrue(((BaseLayer) conf2.getConf(1).getLayer()).getIUpdater() instanceof Sgd);

    CustomIUpdater u0_0 = (CustomIUpdater) ((BaseLayer) conf1.getConf(0).getLayer()).getIUpdater();
    CustomIUpdater u0_1 = (CustomIUpdater) ((BaseLayer) conf1.getConf(1).getLayer()).getIUpdater();
    assertEquals(lr, u0_0.getLearningRate(), 1e-6);
    assertEquals(lr, u0_1.getLearningRate(), 1e-6);

    Sgd u1_0 = (Sgd) ((BaseLayer) conf2.getConf(0).getLayer()).getIUpdater();
    Sgd u1_1 = (Sgd) ((BaseLayer) conf2.getConf(1).getLayer()).getIUpdater();
    assertEquals(lr, u1_0.getLearningRate(), 1e-6);
    assertEquals(lr, u1_1.getLearningRate(), 1e-6);


    //Second: check JSON
    String asJson = conf1.toJson();
    MultiLayerConfiguration fromJson = MultiLayerConfiguration.fromJson(asJson);
    assertEquals(conf1, fromJson);

    Nd4j.getRandom().setSeed(12345);
    MultiLayerNetwork net1 = new MultiLayerNetwork(conf1);
    net1.init();

    Nd4j.getRandom().setSeed(12345);
    MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
    net2.init();


    //Third: check gradients are equal
    INDArray in = Nd4j.rand(5, 10);
    INDArray labels = Nd4j.rand(5, 10);

    net1.setInput(in);
    net2.setInput(in);

    net1.setLabels(labels);
    net2.setLabels(labels);

    net1.computeGradientAndScore();
    net2.computeGradientAndScore();;

    assertEquals(net1.getFlattenedGradients(), net2.getFlattenedGradients());
}
 
Example 13
Source File: CNN3DGradientCheckTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testDeconv3d() {
    Nd4j.getRandom().setSeed(12345);
    // Note: we checked this with a variety of parameters, but it takes a lot of time.
    int[] depths = {8, 8, 9};
    int[] heights = {8, 9, 9};
    int[] widths = {8, 8, 9};


    int[][] kernels = {{2, 2, 2}, {3, 3, 3}, {2, 3, 2}};
    int[][] strides = {{1, 1, 1}, {1, 1, 1}, {2, 2, 2}};

    Activation[] activations = {Activation.SIGMOID, Activation.TANH, Activation.IDENTITY};

    ConvolutionMode[] modes = {ConvolutionMode.Truncate, ConvolutionMode.Same, ConvolutionMode.Same};
    int[] mbs = {1, 3, 2};
    Convolution3D.DataFormat[] dataFormats = new Convolution3D.DataFormat[]{Convolution3D.DataFormat.NCDHW, Convolution3D.DataFormat.NDHWC, Convolution3D.DataFormat.NCDHW};

    int convNIn = 2;
    int finalNOut = 2;
    int[] deconvOut = {2, 3, 4};

    for (int i = 0; i < activations.length; i++) {
        Activation afn = activations[i];
        int miniBatchSize = mbs[i];
        int depth = depths[i];
        int height = heights[i];
        int width = widths[i];
        ConvolutionMode mode = modes[i];
        int[] kernel = kernels[i];
        int[] stride = strides[i];
        Convolution3D.DataFormat df = dataFormats[i];
        int dOut = deconvOut[i];

        INDArray input;
        if (df == Convolution3D.DataFormat.NDHWC) {
            input = Nd4j.rand(new int[]{miniBatchSize, depth, height, width, convNIn});
        } else {
            input = Nd4j.rand(new int[]{miniBatchSize, convNIn, depth, height, width});
        }
        INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut);
        for (int j = 0; j < miniBatchSize; j++) {
            labels.putScalar(new int[]{j, j % finalNOut}, 1.0);
        }

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .dataType(DataType.DOUBLE)
                .updater(new NoOp())
                .weightInit(new NormalDistribution(0, 0.1))
                .list()
                .layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
                        .stride(stride).nIn(convNIn).nOut(dOut).hasBias(false)
                        .convolutionMode(mode).dataFormat(df)
                        .build())
                .layer(1, new Deconvolution3D.Builder().activation(afn).kernelSize(kernel)
                        .stride(stride).nOut(dOut).hasBias(false)
                        .convolutionMode(mode).dataFormat(df)
                        .build())
                .layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                        .activation(Activation.SOFTMAX).nOut(finalNOut).build())
                .setInputType(InputType.convolutional3D(df, depth, height, width, convNIn)).build();

        String json = conf.toJson();
        MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
        assertEquals(conf, c2);

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        String msg = "DataFormat = " + df + ", minibatch size = " + miniBatchSize + ", activationFn=" + afn
                + ", kernel = " + Arrays.toString(kernel) + ", stride = "
                + Arrays.toString(stride) + ", mode = " + mode.toString()
                + ", input depth " + depth + ", input height " + height
                + ", input width " + width;

        if (PRINT_RESULTS) {
            log.info(msg);
        }

        boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input)
                .labels(labels).subset(true).maxPerParam(64));

        assertTrue(msg, gradOK);

        TestUtils.testModelSerialization(net);
    }
}
 
Example 14
Source File: FrozenLayerWithBackpropTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testFrozenWithBackpropLayerInstantiation() {
    //We need to be able to instantitate frozen layers from JSON etc, and have them be the same as if
    // they were initialized via the builder
    MultiLayerConfiguration conf1 = new NeuralNetConfiguration.Builder().seed(12345).list()
            .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER).build())
            .layer(1, new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER).build())
            .layer(2, new OutputLayer.Builder(
                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10)
                    .nOut(10).build())
            .build();

    MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).list().layer(0,
            new org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop(new DenseLayer.Builder().nIn(10).nOut(10)
                    .activation(Activation.TANH).weightInit(WeightInit.XAVIER).build()))
            .layer(1, new org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop(
                    new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                            .weightInit(WeightInit.XAVIER).build()))
            .layer(2, new OutputLayer.Builder(
                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10)
                    .nOut(10).build())
            .build();

    MultiLayerNetwork net1 = new MultiLayerNetwork(conf1);
    net1.init();
    MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
    net2.init();

    assertEquals(net1.params(), net2.params());


    String json = conf2.toJson();
    MultiLayerConfiguration fromJson = MultiLayerConfiguration.fromJson(json);

    assertEquals(conf2, fromJson);

    MultiLayerNetwork net3 = new MultiLayerNetwork(fromJson);
    net3.init();

    INDArray input = Nd4j.rand(10, 10);

    INDArray out2 = net2.output(input);
    INDArray out3 = net3.output(input);

    assertEquals(out2, out3);
}
 
Example 15
Source File: CNN3DGradientCheckTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCnn3DCropping() {
        Nd4j.getRandom().setSeed(42);

        int depth = 6;
        int height = 6;
        int width = 6;


        int[] minibatchSizes = {3};
        int convNIn = 2;
        int convNOut1 = 3;
        int convNOut2 = 4;
        int denseNOut = 5;
        int finalNOut = 8;


        int[] kernel = {1, 1, 1};
        int[] cropping = {0, 0, 1, 1, 2, 2};

        Activation[] activations = {Activation.SIGMOID};

        ConvolutionMode[] modes = {ConvolutionMode.Same};

        for (Activation afn : activations) {
            for (int miniBatchSize : minibatchSizes) {
                for (ConvolutionMode mode : modes) {

                    int outDepth = mode == ConvolutionMode.Same ?
                            depth : (depth - kernel[0]) + 1;
                    int outHeight = mode == ConvolutionMode.Same ?
                            height : (height - kernel[1]) + 1;
                    int outWidth = mode == ConvolutionMode.Same ?
                            width : (width - kernel[2]) + 1;

                    outDepth -= cropping[0] + cropping[1];
                    outHeight -= cropping[2] + cropping[3];
                    outWidth -= cropping[4] + cropping[5];

                    INDArray input = Nd4j.rand(new int[]{miniBatchSize, convNIn, depth, height, width});
                    INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut);
                    for (int i = 0; i < miniBatchSize; i++) {
                        labels.putScalar(new int[]{i, i % finalNOut}, 1.0);
                    }

                    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                            .dataType(DataType.DOUBLE)
                            .updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
                            .dist(new NormalDistribution(0, 1))
                            .list()
                            .layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
                                    .nIn(convNIn).nOut(convNOut1).hasBias(false)
                                    .convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
                                    .build())
                            .layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
                                    .nIn(convNOut1).nOut(convNOut2).hasBias(false)
                                    .convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
                                    .build())
                            .layer(2, new Cropping3D.Builder(cropping).build())
                            .layer(3, new DenseLayer.Builder().nOut(denseNOut).build())
                            .layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                    .activation(Activation.SOFTMAX).nOut(finalNOut).build())
                            .inputPreProcessor(3,
                                    new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
                                            convNOut2, true))
                            .setInputType(InputType.convolutional3D(depth, height, width, convNIn)).build();

                    String json = conf.toJson();
                    MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
                    assertEquals(conf, c2);

                    MultiLayerNetwork net = new MultiLayerNetwork(conf);
                    net.init();

                    String msg = "Minibatch size = " + miniBatchSize + ", activationFn=" + afn
                            + ", kernel = " + Arrays.toString(kernel) + ", mode = " + mode.toString()
                            + ", input depth " + depth + ", input height " + height
                            + ", input width " + width;

                    if (PRINT_RESULTS) {
                        log.info(msg);
//                        for (int j = 0; j < net.getnLayers(); j++) {
//                            log.info("Layer " + j + " # params: " + net.getLayer(j).numParams());
//                        }
                    }

                    boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS,
                            DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS,
                            RETURN_ON_FIRST_FAILURE, input, labels);

                    assertTrue(msg, gradOK);

                    TestUtils.testModelSerialization(net);
                }

            }
        }
    }
 
Example 16
Source File: CNN3DGradientCheckTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCnn3DUpsampling() {
        Nd4j.getRandom().setSeed(42);

        int depth = 2;
        int height = 2;
        int width = 2;


        int[] minibatchSizes = {3};
        int convNIn = 2;
        int convNOut = 4;
        int denseNOut = 5;
        int finalNOut = 42;


        int[] upsamplingSize = {2, 2, 2};

        Activation[] activations = {Activation.SIGMOID};


        ConvolutionMode[] modes = {ConvolutionMode.Truncate};

        for (Activation afn : activations) {
            for (int miniBatchSize : minibatchSizes) {
                for (ConvolutionMode mode : modes) {
                    for(Convolution3D.DataFormat df : Convolution3D.DataFormat.values()) {

                        int outDepth = depth * upsamplingSize[0];
                        int outHeight = height * upsamplingSize[1];
                        int outWidth = width * upsamplingSize[2];

                        INDArray input = df == Convolution3D.DataFormat.NCDHW ? Nd4j.rand(miniBatchSize, convNIn, depth, height, width) : Nd4j.rand(miniBatchSize, depth, height, width, convNIn);
                        INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut);
                        for (int i = 0; i < miniBatchSize; i++) {
                            labels.putScalar(new int[]{i, i % finalNOut}, 1.0);
                        }

                        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                                .dataType(DataType.DOUBLE)
                                .updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
                                .dist(new NormalDistribution(0, 1))
                                .seed(12345)
                                .list()
                                .layer(0, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
                                        .nIn(convNIn).nOut(convNOut).hasBias(false)
                                        .convolutionMode(mode).dataFormat(df)
                                        .build())
                                .layer(1, new Upsampling3D.Builder(upsamplingSize[0]).dataFormat(df).build())
                                .layer(2, new DenseLayer.Builder().nOut(denseNOut).build())
                                .layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                        .activation(Activation.SOFTMAX).nOut(finalNOut).build())
                                .inputPreProcessor(2,
                                        new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
                                                convNOut, true))
                                .setInputType(InputType.convolutional3D(df, depth, height, width, convNIn)).build();

                        String json = conf.toJson();
                        MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
                        assertEquals(conf, c2);

                        MultiLayerNetwork net = new MultiLayerNetwork(conf);
                        net.init();

                        String msg = "Minibatch size = " + miniBatchSize + ", activationFn=" + afn
                                + ", kernel = " + Arrays.toString(upsamplingSize) + ", mode = " + mode.toString()
                                + ", input depth " + depth + ", input height " + height
                                + ", input width " + width;

                        if (PRINT_RESULTS) {
                            log.info(msg);
//                            for (int j = 0; j < net.getnLayers(); j++) {
//                                log.info("Layer " + j + " # params: " + net.getLayer(j).numParams());
//                            }
                        }

                        boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS,
                                DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS,
                                RETURN_ON_FIRST_FAILURE, input, labels);

                        assertTrue(msg, gradOK);

                        TestUtils.testModelSerialization(net);
                    }
                }
            }
        }
    }
 
Example 17
Source File: TestCustomLayers.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCustomOutputLayerMLN() {
        //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
        MultiLayerConfiguration conf =
                        new NeuralNetConfiguration.Builder().seed(12345).list()
                                        .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                        .layer(1, new CustomOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                                        .activation(Activation.SOFTMAX)
                                                        .nIn(10).nOut(10).build())
                                        .build();

        String json = conf.toJson();
        String yaml = conf.toYaml();

//        System.out.println(json);

        MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
        assertEquals(conf, confFromJson);

        MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
        assertEquals(conf, confFromYaml);

        //Third: check initialization
        Nd4j.getRandom().setSeed(12345);
        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        assertTrue(net.getLayer(1) instanceof CustomOutputLayerImpl);

        //Fourth: compare to an equivalent standard output layer (should be identical)
        MultiLayerConfiguration conf2 =
                        new NeuralNetConfiguration.Builder().seed(12345).weightInit(WeightInit.XAVIER)
                                        .list()
                                        .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1,
                                                        new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                                                .activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
                                        .build();
        Nd4j.getRandom().setSeed(12345);
        MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
        net2.init();

        assertEquals(net2.params(), net.params());

        INDArray testFeatures = Nd4j.rand(1, 10);
        INDArray testLabels = Nd4j.zeros(1, 10);
        testLabels.putScalar(0, 3, 1.0);
        DataSet ds = new DataSet(testFeatures, testLabels);

        assertEquals(net2.output(testFeatures), net.output(testFeatures));
        assertEquals(net2.score(ds), net.score(ds), 1e-6);
    }
 
Example 18
Source File: CNN3DGradientCheckTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testCnn3DPooling() {
    Nd4j.getRandom().setSeed(42);

    int depth = 4;
    int height = 4;
    int width = 4;


    int[] minibatchSizes = {3};
    int convNIn = 2;
    int convNOut = 4;
    int denseNOut = 5;
    int finalNOut = 42;

    int[] kernel = {2, 2, 2};

    Activation[] activations = {Activation.SIGMOID};

    Subsampling3DLayer.PoolingType[] poolModes = {Subsampling3DLayer.PoolingType.AVG};

    ConvolutionMode[] modes = {ConvolutionMode.Truncate};

    for (Activation afn : activations) {
        for (int miniBatchSize : minibatchSizes) {
            for (Subsampling3DLayer.PoolingType pool : poolModes) {
                for (ConvolutionMode mode : modes) {
                    for (Convolution3D.DataFormat df : Convolution3D.DataFormat.values()) {

                        int outDepth = depth / kernel[0];
                        int outHeight = height / kernel[1];
                        int outWidth = width / kernel[2];

                        INDArray input = Nd4j.rand(
                                df == Convolution3D.DataFormat.NCDHW ? new int[]{miniBatchSize, convNIn, depth, height, width}
                                        : new int[]{miniBatchSize, depth, height, width, convNIn});
                        INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut);
                        for (int i = 0; i < miniBatchSize; i++) {
                            labels.putScalar(new int[]{i, i % finalNOut}, 1.0);
                        }

                        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                                .dataType(DataType.DOUBLE)
                                .updater(new NoOp())
                                .weightInit(WeightInit.XAVIER)
                                .dist(new NormalDistribution(0, 1))
                                .list()
                                .layer(0, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
                                        .nIn(convNIn).nOut(convNOut).hasBias(false)
                                        .convolutionMode(mode).dataFormat(df)
                                        .build())
                                .layer(1, new Subsampling3DLayer.Builder(kernel)
                                        .poolingType(pool).convolutionMode(mode).dataFormat(df).build())
                                .layer(2, new DenseLayer.Builder().nOut(denseNOut).build())
                                .layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                        .activation(Activation.SOFTMAX).nOut(finalNOut).build())
                                .inputPreProcessor(2,
                                        new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,convNOut, df))
                                .setInputType(InputType.convolutional3D(df, depth, height, width, convNIn)).build();

                        String json = conf.toJson();
                        MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
                        assertEquals(conf, c2);

                        MultiLayerNetwork net = new MultiLayerNetwork(conf);
                        net.init();

                        String msg = "Minibatch size = " + miniBatchSize + ", activationFn=" + afn
                                + ", kernel = " + Arrays.toString(kernel) + ", mode = " + mode.toString()
                                + ", input depth " + depth + ", input height " + height
                                + ", input width " + width + ", dataFormat=" + df;

                        if (PRINT_RESULTS) {
                            log.info(msg);
                        }

                        boolean gradOK = GradientCheckUtil.checkGradients(net, DEFAULT_EPS,
                                DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS,
                                RETURN_ON_FIRST_FAILURE, input, labels);

                        assertTrue(msg, gradOK);

                        TestUtils.testModelSerialization(net);
                    }
                }
            }
        }
    }
}
 
Example 19
Source File: FrozenLayerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testFrozenLayerInstantiation() {
    //We need to be able to instantitate frozen layers from JSON etc, and have them be the same as if
    // they were initialized via the builder
    MultiLayerConfiguration conf1 = new NeuralNetConfiguration.Builder().seed(12345).list()
                    .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                                    .weightInit(WeightInit.XAVIER).build())
                    .layer(1, new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                                    .weightInit(WeightInit.XAVIER).build())
                    .layer(2, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10)
                                                    .nOut(10).build())
                    .build();

    MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).list().layer(0,
                    new org.deeplearning4j.nn.conf.layers.misc.FrozenLayer(new DenseLayer.Builder().nIn(10).nOut(10)
                                    .activation(Activation.TANH).weightInit(WeightInit.XAVIER).build()))
                    .layer(1, new org.deeplearning4j.nn.conf.layers.misc.FrozenLayer(
                                    new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                                                    .weightInit(WeightInit.XAVIER).build()))
                    .layer(2, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10)
                                                    .nOut(10).build())
                    .build();

    MultiLayerNetwork net1 = new MultiLayerNetwork(conf1);
    net1.init();
    MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
    net2.init();

    assertEquals(net1.params(), net2.params());


    String json = conf2.toJson();
    MultiLayerConfiguration fromJson = MultiLayerConfiguration.fromJson(json);

    assertEquals(conf2, fromJson);

    MultiLayerNetwork net3 = new MultiLayerNetwork(fromJson);
    net3.init();

    INDArray input = Nd4j.rand(10, 10);

    INDArray out2 = net2.output(input);
    INDArray out3 = net3.output(input);

    assertEquals(out2, out3);
}
 
Example 20
Source File: CNN3DGradientCheckTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCnn3DZeroPadding() {
        Nd4j.getRandom().setSeed(42);

        int depth = 4;
        int height = 4;
        int width = 4;


        int[] minibatchSizes = {3};
        int convNIn = 2;
        int convNOut1 = 3;
        int convNOut2 = 4;
        int denseNOut = 5;
        int finalNOut = 42;


        int[] kernel = {2, 2, 2};
        int[] zeroPadding = {1, 1, 2, 2, 3, 3};

        Activation[] activations = {Activation.SIGMOID};

        ConvolutionMode[] modes = {ConvolutionMode.Truncate, ConvolutionMode.Same};

        for (Activation afn : activations) {
            for (int miniBatchSize : minibatchSizes) {
                for (ConvolutionMode mode : modes) {

                    int outDepth = mode == ConvolutionMode.Same ?
                            depth : (depth - kernel[0]) + 1;
                    int outHeight = mode == ConvolutionMode.Same ?
                            height : (height - kernel[1]) + 1;
                    int outWidth = mode == ConvolutionMode.Same ?
                            width : (width - kernel[2]) + 1;

                    outDepth += zeroPadding[0] + zeroPadding[1];
                    outHeight += zeroPadding[2] + zeroPadding[3];
                    outWidth += zeroPadding[4] + zeroPadding[5];

                    INDArray input = Nd4j.rand(new int[]{miniBatchSize, convNIn, depth, height, width});
                    INDArray labels = Nd4j.zeros(miniBatchSize, finalNOut);
                    for (int i = 0; i < miniBatchSize; i++) {
                        labels.putScalar(new int[]{i, i % finalNOut}, 1.0);
                    }

                    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                            .dataType(DataType.DOUBLE)
                            .updater(new NoOp()).weightInit(WeightInit.LECUN_NORMAL)
                            .dist(new NormalDistribution(0, 1))
                            .list()
                            .layer(0, new Convolution3D.Builder().activation(afn).kernelSize(kernel)
                                    .nIn(convNIn).nOut(convNOut1).hasBias(false)
                                    .convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
                                    .build())
                            .layer(1, new Convolution3D.Builder().activation(afn).kernelSize(1, 1, 1)
                                    .nIn(convNOut1).nOut(convNOut2).hasBias(false)
                                    .convolutionMode(mode).dataFormat(Convolution3D.DataFormat.NCDHW)
                                    .build())
                            .layer(2, new ZeroPadding3DLayer.Builder(zeroPadding).build())
                            .layer(3, new DenseLayer.Builder().nOut(denseNOut).build())
                            .layer(new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                    .activation(Activation.SOFTMAX).nOut(finalNOut).build())
                            .inputPreProcessor(3,
                                    new Cnn3DToFeedForwardPreProcessor(outDepth, outHeight, outWidth,
                                            convNOut2, true))
                            .setInputType(InputType.convolutional3D(depth, height, width, convNIn)).build();

                    String json = conf.toJson();
                    MultiLayerConfiguration c2 = MultiLayerConfiguration.fromJson(json);
                    assertEquals(conf, c2);

                    MultiLayerNetwork net = new MultiLayerNetwork(conf);
                    net.init();

                    String msg = "Minibatch size = " + miniBatchSize + ", activationFn=" + afn
                            + ", kernel = " + Arrays.toString(kernel) + ", mode = " + mode.toString()
                            + ", input depth " + depth + ", input height " + height
                            + ", input width " + width;

                    if (PRINT_RESULTS) {
                        log.info(msg);
//                        for (int j = 0; j < net.getnLayers(); j++) {
//                            log.info("Layer " + j + " # params: " + net.getLayer(j).numParams());
//                        }
                    }

                    boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(net).input(input)
                            .labels(labels).subset(true).maxPerParam(512));

                    assertTrue(msg, gradOK);

                    TestUtils.testModelSerialization(net);
                }

            }
        }
    }