Java Code Examples for org.deeplearning4j.nn.conf.MultiLayerConfiguration#fromYaml()

The following examples show how to use org.deeplearning4j.nn.conf.MultiLayerConfiguration#fromYaml() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestCustomActivation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testCustomActivationFn() {
        //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1)).list()
                        .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).activation(new CustomActivation()).build())
                        .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
                        .build();

        String json = conf.toJson();
        String yaml = conf.toYaml();

//        System.out.println(json);

        MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
        assertEquals(conf, confFromJson);

        MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
        assertEquals(conf, confFromYaml);

    }
 
Example 2
Source File: CustomPreprocessorTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testCustomPreprocessor() {
        //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
        MultiLayerConfiguration conf =
                        new NeuralNetConfiguration.Builder().list()
                                        .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                        .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10)
                                                .activation(Activation.SOFTMAX).nOut(10).build())
                                        .inputPreProcessor(0, new MyCustomPreprocessor())
                                        .build();

        String json = conf.toJson();
        String yaml = conf.toYaml();

//        System.out.println(json);

        MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
        assertEquals(conf, confFromJson);

        MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
        assertEquals(conf, confFromYaml);

        assertTrue(confFromJson.getInputPreProcess(0) instanceof MyCustomPreprocessor);

    }
 
Example 3
Source File: ModelGuesser.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Load the model from the given file path
 * @param path the path of the file to "guess"
 *
 * @return the loaded model
 * @throws Exception
 */
public static Object loadConfigGuess(String path) throws Exception {
    String input = FileUtils.readFileToString(new File(path));
    //note here that we load json BEFORE YAML. YAML
    //turns out to load just fine *accidentally*
    try {
        return MultiLayerConfiguration.fromJson(input);
    } catch (Exception e) {
        log.warn("Tried multi layer config from json", e);
        try {
            return KerasModelImport.importKerasModelConfiguration(path);
        } catch (Exception e1) {
            log.warn("Tried keras model config", e);
            try {
                return KerasModelImport.importKerasSequentialConfiguration(path);
            } catch (Exception e2) {
                log.warn("Tried keras sequence config", e);
                try {
                    return ComputationGraphConfiguration.fromJson(input);
                } catch (Exception e3) {
                    log.warn("Tried computation graph from json");
                    try {
                        return MultiLayerConfiguration.fromYaml(input);
                    } catch (Exception e4) {
                        log.warn("Tried multi layer configuration from yaml");
                        try {
                            return ComputationGraphConfiguration.fromYaml(input);
                        } catch (Exception e5) {
                            throw new ModelGuesserException("Unable to load configuration from path " + path
                                    + " (invalid config file or not a known config type)");
                        }
                    }
                }
            }
        }
    }
}
 
Example 4
Source File: TestCustomLayers.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testJsonMultiLayerNetwork() {
        MultiLayerConfiguration conf =
                        new NeuralNetConfiguration.Builder().list()
                                        .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                        .layer(1, new CustomLayer(3.14159)).layer(2,
                                                        new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                                                .activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
                                        .build();

        String json = conf.toJson();
        String yaml = conf.toYaml();

//        System.out.println(json);

        MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
        assertEquals(conf, confFromJson);

        MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
        assertEquals(conf, confFromYaml);
    }
 
Example 5
Source File: TestVAE.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testJsonYaml() {

    MultiLayerConfiguration config = new NeuralNetConfiguration.Builder().seed(12345).list()
                    .layer(0, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .reconstructionDistribution(new GaussianReconstructionDistribution(Activation.IDENTITY))
                                    .nIn(3).nOut(4).encoderLayerSizes(5).decoderLayerSizes(6).build())
                    .layer(1, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .reconstructionDistribution(new GaussianReconstructionDistribution(Activation.TANH))
                                    .nIn(7).nOut(8).encoderLayerSizes(9).decoderLayerSizes(10).build())
                    .layer(2, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .reconstructionDistribution(new BernoulliReconstructionDistribution()).nIn(11)
                                    .nOut(12).encoderLayerSizes(13).decoderLayerSizes(14).build())
                    .layer(3, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .reconstructionDistribution(new ExponentialReconstructionDistribution(Activation.TANH))
                                    .nIn(11).nOut(12).encoderLayerSizes(13).decoderLayerSizes(14).build())
                    .layer(4, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .lossFunction(new ActivationTanH(), LossFunctions.LossFunction.MSE).nIn(11)
                                    .nOut(12).encoderLayerSizes(13).decoderLayerSizes(14).build())
                    .layer(5, new org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder.Builder()
                                    .reconstructionDistribution(new CompositeReconstructionDistribution.Builder()
                                                    .addDistribution(5, new GaussianReconstructionDistribution())
                                                    .addDistribution(5,
                                                                    new GaussianReconstructionDistribution(Activation.TANH))
                                                    .addDistribution(5, new BernoulliReconstructionDistribution())
                                                    .build())
                                    .nIn(15).nOut(16).encoderLayerSizes(17).decoderLayerSizes(18).build())
                    .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(18)
                                    .nOut(19).activation(new ActivationTanH()).build())
                    .build();

    String asJson = config.toJson();
    String asYaml = config.toYaml();

    MultiLayerConfiguration fromJson = MultiLayerConfiguration.fromJson(asJson);
    MultiLayerConfiguration fromYaml = MultiLayerConfiguration.fromYaml(asYaml);

    assertEquals(config, fromJson);
    assertEquals(config, fromYaml);
}
 
Example 6
Source File: TestCustomLayers.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCustomOutputLayerMLN() {
        //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
        MultiLayerConfiguration conf =
                        new NeuralNetConfiguration.Builder().seed(12345).list()
                                        .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                        .layer(1, new CustomOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                                        .activation(Activation.SOFTMAX)
                                                        .nIn(10).nOut(10).build())
                                        .build();

        String json = conf.toJson();
        String yaml = conf.toYaml();

//        System.out.println(json);

        MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
        assertEquals(conf, confFromJson);

        MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
        assertEquals(conf, confFromYaml);

        //Third: check initialization
        Nd4j.getRandom().setSeed(12345);
        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        assertTrue(net.getLayer(1) instanceof CustomOutputLayerImpl);

        //Fourth: compare to an equivalent standard output layer (should be identical)
        MultiLayerConfiguration conf2 =
                        new NeuralNetConfiguration.Builder().seed(12345).weightInit(WeightInit.XAVIER)
                                        .list()
                                        .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1,
                                                        new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                                                .activation(Activation.SOFTMAX).nIn(10).nOut(10).build())
                                        .build();
        Nd4j.getRandom().setSeed(12345);
        MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
        net2.init();

        assertEquals(net2.params(), net.params());

        INDArray testFeatures = Nd4j.rand(1, 10);
        INDArray testLabels = Nd4j.zeros(1, 10);
        testLabels.putScalar(0, 3, 1.0);
        DataSet ds = new DataSet(testFeatures, testLabels);

        assertEquals(net2.output(testFeatures), net.output(testFeatures));
        assertEquals(net2.score(ds), net.score(ds), 1e-6);
    }