Java Code Examples for org.deeplearning4j.nn.multilayer.MultiLayerNetwork#pretrainLayer()

The following examples show how to use org.deeplearning4j.nn.multilayer.MultiLayerNetwork#pretrainLayer() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestVAE.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testPretrainSimple() {

    int inputSize = 3;

    MultiLayerConfiguration mlc = new NeuralNetConfiguration.Builder().list()
                    .layer(0, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .nIn(inputSize).nOut(4).encoderLayerSizes(5).decoderLayerSizes(6).build())
                    .build();

    NeuralNetConfiguration c = mlc.getConf(0);
    org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder vae =
                    (org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder) c.getLayer();

    long allParams = vae.initializer().numParams(c);

    MultiLayerNetwork net = new MultiLayerNetwork(mlc);
    net.init();
    net.initGradientsView(); //TODO this should happen automatically

    Map<String, INDArray> paramTable = net.getLayer(0).paramTable();
    Map<String, INDArray> gradTable =
                    ((org.deeplearning4j.nn.layers.variational.VariationalAutoencoder) net.getLayer(0))
                                    .getGradientViews();

    assertEquals(paramTable.keySet(), gradTable.keySet());
    for (String s : paramTable.keySet()) {
        assertEquals(paramTable.get(s).length(), gradTable.get(s).length());
        assertArrayEquals(paramTable.get(s).shape(), gradTable.get(s).shape());
    }

    System.out.println("Num params: " + net.numParams());

    INDArray data = Nd4j.rand(1, inputSize);


    net.pretrainLayer(0, data);
}
 
Example 2
Source File: TestVAE.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testVaeWeightNoise(){

    for(boolean ws : new boolean[]{false, true}) {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(12345L)
                .trainingWorkspaceMode(ws ? WorkspaceMode.ENABLED : WorkspaceMode.NONE)
                .inferenceWorkspaceMode(ws ? WorkspaceMode.ENABLED : WorkspaceMode.NONE)
                .weightNoise(new WeightNoise(new org.deeplearning4j.nn.conf.distribution.NormalDistribution(0.1, 0.3)))
                .list().layer(0,
                        new VariationalAutoencoder.Builder().nIn(10).nOut(3)
                                .encoderLayerSizes(5).decoderLayerSizes(6)
                                .pzxActivationFunction(Activation.TANH)
                                .reconstructionDistribution(new GaussianReconstructionDistribution())
                                .activation(new ActivationTanH())
                                .build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        INDArray arr = Nd4j.rand(3, 10);
        net.pretrainLayer(0, arr);

    }


}
 
Example 3
Source File: TestCompGraphUnsupervised.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void compareImplementations() throws Exception {

    for(WorkspaceMode wsm : new WorkspaceMode[]{WorkspaceMode.NONE, WorkspaceMode.ENABLED}) {

        MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder()
                .seed(12345)
                .updater(new Adam(1e-3))
                .weightInit(WeightInit.XAVIER)
                .inferenceWorkspaceMode(wsm)
                .trainingWorkspaceMode(wsm)
                .list()
                .layer(new VariationalAutoencoder.Builder()
                        .nIn(784)
                        .nOut(32)
                        .encoderLayerSizes(16)
                        .decoderLayerSizes(16)
                        .activation(Activation.TANH)
                        .pzxActivationFunction(Activation.SIGMOID)
                        .reconstructionDistribution(new BernoulliReconstructionDistribution(Activation.SIGMOID))
                        .build())
                .layer(new VariationalAutoencoder.Builder()
                        .nIn(32)
                        .nOut(8)
                        .encoderLayerSizes(16)
                        .decoderLayerSizes(16)
                        .activation(Activation.TANH)
                        .pzxActivationFunction(Activation.SIGMOID)
                        .reconstructionDistribution(new GaussianReconstructionDistribution(Activation.TANH))
                        .build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf2);
        net.init();

        ComputationGraph cg = net.toComputationGraph();
        cg.getConfiguration().setInferenceWorkspaceMode(wsm);
        cg.getConfiguration().setTrainingWorkspaceMode(wsm);
        DataSetIterator ds = new EarlyTerminationDataSetIterator(new MnistDataSetIterator(1, true, 12345), 1);
        Nd4j.getRandom().setSeed(12345);
        net.pretrainLayer(0, ds);

        ds = new EarlyTerminationDataSetIterator(new MnistDataSetIterator(1, true, 12345), 1);
        Nd4j.getRandom().setSeed(12345);
        cg.pretrainLayer("0", ds);

        assertEquals(net.params(), cg.params());
    }
}