Java Code Examples for org.deeplearning4j.nn.conf.MultiLayerConfiguration#getConf()

The following examples show how to use org.deeplearning4j.nn.conf.MultiLayerConfiguration#getConf() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestVAE.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testForwardPass() {

    int[][] encLayerSizes = new int[][] {{12}, {12, 13}, {12, 13, 14}};
    for (int i = 0; i < encLayerSizes.length; i++) {

        MultiLayerConfiguration mlc = new NeuralNetConfiguration.Builder().list().layer(0,
                        new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder().nIn(10)
                                        .nOut(5).encoderLayerSizes(encLayerSizes[i]).decoderLayerSizes(13).build())
                        .build();

        NeuralNetConfiguration c = mlc.getConf(0);
        org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder vae =
                        (org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) c.getLayer();

        MultiLayerNetwork net = new MultiLayerNetwork(mlc);
        net.init();

        INDArray in = Nd4j.rand(1, 10);

        //        net.output(in);
        List<INDArray> out = net.feedForward(in);
        assertArrayEquals(new long[] {1, 10}, out.get(0).shape());
        assertArrayEquals(new long[] {1, 5}, out.get(1).shape());
    }
}
 
Example 2
Source File: TestVAE.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testInitialization() {

    MultiLayerConfiguration mlc =
                    new NeuralNetConfiguration.Builder().list()
                                    .layer(0, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                                    .nIn(10).nOut(5).encoderLayerSizes(12).decoderLayerSizes(13)
                                                    .build())
                                    .build();

    NeuralNetConfiguration c = mlc.getConf(0);
    org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder vae =
                    (org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) c.getLayer();

    long allParams = vae.initializer().numParams(c);

    //                  Encoder         Encoder -> p(z|x)       Decoder         //p(x|z)
    int expNumParams = (10 * 12 + 12) + (12 * (2 * 5) + (2 * 5)) + (5 * 13 + 13) + (13 * (2 * 10) + (2 * 10));
    assertEquals(expNumParams, allParams);

    MultiLayerNetwork net = new MultiLayerNetwork(mlc);
    net.init();

    System.out.println("Exp num params: " + expNumParams);
    assertEquals(expNumParams, net.getLayer(0).params().length());
    Map<String, INDArray> paramTable = net.getLayer(0).paramTable();
    int count = 0;
    for (INDArray arr : paramTable.values()) {
        count += arr.length();
    }
    assertEquals(expNumParams, count);

    assertEquals(expNumParams, net.getLayer(0).numParams());
}
 
Example 3
Source File: TestVAE.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testPretrainSimple() {

    int inputSize = 3;

    MultiLayerConfiguration mlc = new NeuralNetConfiguration.Builder().list()
                    .layer(0, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .nIn(inputSize).nOut(4).encoderLayerSizes(5).decoderLayerSizes(6).build())
                    .build();

    NeuralNetConfiguration c = mlc.getConf(0);
    org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder vae =
                    (org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) c.getLayer();

    long allParams = vae.initializer().numParams(c);

    MultiLayerNetwork net = new MultiLayerNetwork(mlc);
    net.init();
    net.initGradientsView(); //TODO this should happen automatically

    Map<String, INDArray> paramTable = net.getLayer(0).paramTable();
    Map<String, INDArray> gradTable =
                    ((org.deeplearning4j.nn.layers.variational.VariationalAutoencoder) net.getLayer(0))
                                    .getGradientViews();

    assertEquals(paramTable.keySet(), gradTable.keySet());
    for (String s : paramTable.keySet()) {
        assertEquals(paramTable.get(s).length(), gradTable.get(s).length());
        assertArrayEquals(paramTable.get(s).shape(), gradTable.get(s).shape());
    }

    System.out.println("Num params: " + net.numParams());

    INDArray data = Nd4j.rand(1, inputSize);


    net.pretrainLayer(0, data);
}
 
Example 4
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testVariationalAutoencoderLayerSpaceBasic() {
    MultiLayerSpace mls =
                    new MultiLayerSpace.Builder()
                                    .updater(new Sgd(0.005)).seed(
                                                    12345)
                                    .addLayer(new VariationalAutoencoderLayerSpace.Builder()
                                                    .nIn(new IntegerParameterSpace(50, 75)).nOut(200)
                                                    .encoderLayerSizes(234, 567).decoderLayerSizes(123, 456)
                                                    .reconstructionDistribution(
                                                                    new DiscreteParameterSpace<ReconstructionDistribution>(
                                                                                    new GaussianReconstructionDistribution(),
                                                                                    new BernoulliReconstructionDistribution()))
                                                    .build())
                                    .build();

    int numParams = mls.numParameters();

    //Assign numbers to each leaf ParameterSpace object (normally done by candidate generator - manual here for testing)
    List<ParameterSpace> noDuplicatesList = LeafUtils.getUniqueObjects(mls.collectLeaves());

    //Second: assign each a number
    int c = 0;
    for (ParameterSpace ps : noDuplicatesList) {
        int np = ps.numParameters();
        if (np == 1) {
            ps.setIndices(c++);
        } else {
            int[] values = new int[np];
            for (int j = 0; j < np; j++)
                values[c++] = j;
            ps.setIndices(values);
        }
    }

    double[] zeros = new double[numParams];

    DL4JConfiguration configuration = mls.getValue(zeros);

    MultiLayerConfiguration conf = configuration.getMultiLayerConfiguration();
    assertEquals(1, conf.getConfs().size());

    NeuralNetConfiguration nnc = conf.getConf(0);
    VariationalAutoencoder vae = (VariationalAutoencoder) nnc.getLayer();

    assertEquals(50, vae.getNIn());
    assertEquals(200, vae.getNOut());

    assertArrayEquals(new int[] {234, 567}, vae.getEncoderLayerSizes());
    assertArrayEquals(new int[] {123, 456}, vae.getDecoderLayerSizes());

    assertTrue(vae.getOutputDistribution() instanceof GaussianReconstructionDistribution);



    double[] ones = new double[numParams];
    for (int i = 0; i < ones.length; i++)
        ones[i] = 1.0;

    configuration = mls.getValue(ones);

    conf = configuration.getMultiLayerConfiguration();
    assertEquals(1, conf.getConfs().size());

    nnc = conf.getConf(0);
    vae = (VariationalAutoencoder) nnc.getLayer();

    assertEquals(75, vae.getNIn());
    assertEquals(200, vae.getNOut());

    assertArrayEquals(new int[] {234, 567}, vae.getEncoderLayerSizes());
    assertArrayEquals(new int[] {123, 456}, vae.getDecoderLayerSizes());

    assertTrue(vae.getOutputDistribution() instanceof BernoulliReconstructionDistribution);
}