org.deeplearning4j.nn.conf.layers.OutputLayer Java Examples

The following examples show how to use org.deeplearning4j.nn.conf.layers.OutputLayer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestFailureListener.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Ignore
    @Test
    public void testFailureIter5() throws Exception {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .updater(new Adam(1e-4))
                .list()
                .layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
                .build();
        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        net.setListeners(new FailureTestingListener(
//                FailureTestingListener.FailureMode.OOM,
                FailureTestingListener.FailureMode.SYSTEM_EXIT_1,
                new FailureTestingListener.IterationEpochTrigger(false, 10)));

        DataSetIterator iter = new IrisDataSetIterator(5,150);

        net.fit(iter);
    }
 
Example #2
Source File: RegressionMathFunctions.java    From dl4j-tutorials with MIT License 6 votes vote down vote up
/** Returns the network configuration, 2 hidden DenseLayers of size 50.
 */
private static MultiLayerConfiguration getDeepDenseLayerNetworkConfiguration() {
    final int numHiddenNodes = 100;
    return new NeuralNetConfiguration.Builder()
            .seed(seed)
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .weightInit(WeightInit.XAVIER)
            .updater(new Nesterovs(learningRate, 0.9))
            .list()
            .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                    .activation(Activation.RELU).build())
            .layer(1, new DenseLayer.Builder().nIn(numHiddenNodes).nOut(numHiddenNodes)
                    .activation(Activation.RELU).build())
            .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
                    .activation(Activation.IDENTITY)
                    .nIn(numHiddenNodes).nOut(numOutputs).build())
            .pretrain(false).backprop(true).build();
}
 
Example #3
Source File: ModelSerializerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testWriteMLNModel() throws Exception {
    int nIn = 5;
    int nOut = 6;

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).l1(0.01)
                    .l2(0.01).updater(new Sgd(0.1)).activation(Activation.TANH).weightInit(WeightInit.XAVIER).list()
                    .layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).build())
                    .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()).layer(2, new OutputLayer.Builder()
                                    .lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut).build())
                    .build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    File tempFile = tempDir.newFile();

    ModelSerializer.writeModel(net, tempFile, true);

    MultiLayerNetwork network = ModelSerializer.restoreMultiLayerNetwork(tempFile);

    assertEquals(network.getLayerWiseConfigurations().toJson(), net.getLayerWiseConfigurations().toJson());
    assertEquals(net.params(), network.params());
    assertEquals(net.getUpdater().getStateViewArray(), network.getUpdater().getStateViewArray());
}
 
Example #4
Source File: TestInstantiation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testInitPretrained() throws IOException {
    ignoreIfCuda();
    ZooModel model = ResNet50.builder().numClasses(0).build(); //num labels doesn't matter since we're getting pretrained imagenet
    assertTrue(model.pretrainedAvailable(PretrainedType.IMAGENET));

    ComputationGraph initializedModel = (ComputationGraph) model.initPretrained();
    INDArray f = Nd4j.rand(new int[]{1, 3, 224, 224});
    INDArray[] result = initializedModel.output(f);
    assertArrayEquals(result[0].shape(), new long[]{1, 1000});

    //Test fitting. Not ewe need to use transfer learning, as ResNet50 has a dense layer, not an OutputLayer
    initializedModel = new TransferLearning.GraphBuilder(initializedModel)
            .removeVertexAndConnections("fc1000")
            .addLayer("fc1000", new OutputLayer.Builder()
                    .lossFunction(LossFunctions.LossFunction.MCXENT)
                    .nIn(2048).nOut(1000).activation(Activation.SOFTMAX).build(), "flatten_1")
            .setOutputs("fc1000")
            .build();
    initializedModel.fit(new org.nd4j.linalg.dataset.DataSet(f, TestUtils.randomOneHot(1, 1000, 12345)));

}
 
Example #5
Source File: ModelSerializerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testWriteCGModel() throws Exception {
    ComputationGraphConfiguration config = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(0.1))
                    .graphBuilder().addInputs("in")
                    .addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out",
                                    new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3)
                                            .activation(Activation.SOFTMAX).build(),
                                    "dense")
                    .setOutputs("out").build();

    ComputationGraph cg = new ComputationGraph(config);
    cg.init();

    File tempFile = tempDir.newFile();

    ModelSerializer.writeModel(cg, tempFile, true);

    ComputationGraph network = ModelSerializer.restoreComputationGraph(tempFile);

    assertEquals(network.getConfiguration().toJson(), cg.getConfiguration().toJson());
    assertEquals(cg.params(), network.params());
    assertEquals(cg.getUpdater().getStateViewArray(), network.getUpdater().getStateViewArray());
}
 
Example #6
Source File: CacheModeTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private static ComputationGraphConfiguration getConfCG(CacheMode cacheMode){
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
            .activation(Activation.TANH)
            .inferenceWorkspaceMode(WorkspaceMode.ENABLED)
            .trainingWorkspaceMode(WorkspaceMode.ENABLED)
            .seed(12345)
            .cacheMode(cacheMode)
            .graphBuilder()
            .addInputs("in")
            .layer("0", new ConvolutionLayer.Builder().nOut(3).build(), "in")
            .layer("1", new ConvolutionLayer.Builder().nOut(3).build(), "0")
            .layer("2", new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).build(), "1")
            .setOutputs("2")
            .setInputTypes(InputType.convolutionalFlat(28, 28, 1))
            .build();

    return conf;
}
 
Example #7
Source File: ModelSerializerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testJavaSerde_1() throws Exception {
    int nIn = 5;
    int nOut = 6;

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).l1(0.01)
            .graphBuilder()
            .addInputs("in")
            .layer("0", new OutputLayer.Builder().nIn(nIn).nOut(nOut).build(), "in")
            .setOutputs("0")
            .validateOutputLayerConfig(false)
            .build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    DataSet dataSet = trivialDataSet();
    NormalizerStandardize norm = new NormalizerStandardize();
    norm.fit(dataSet);

    val b = SerializationUtils.serialize(net);

    ComputationGraph restored = SerializationUtils.deserialize(b);

    assertEquals(net, restored);
}
 
Example #8
Source File: GraphTestCase.java    From jstarcraft-ai with Apache License 2.0 6 votes vote down vote up
private ComputationGraph getOldFunction() {
    NeuralNetConfiguration.Builder netBuilder = new NeuralNetConfiguration.Builder();
    // 设置随机种子
    netBuilder.seed(6);
    netBuilder.setL1(l1Regularization);
    netBuilder.setL1Bias(l1Regularization);
    netBuilder.setL2(l2Regularization);
    netBuilder.setL2Bias(l2Regularization);
    netBuilder.weightInit(WeightInit.XAVIER_UNIFORM);
    netBuilder.updater(new Sgd(learnRatio)).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT);

    GraphBuilder graphBuilder = netBuilder.graphBuilder();
    graphBuilder.addInputs("leftInput", "rightInput");
    graphBuilder.addLayer("leftEmbed", new EmbeddingLayer.Builder().nIn(5).nOut(5).hasBias(true).activation(Activation.IDENTITY).build(), "leftInput");
    graphBuilder.addLayer("rightEmbed", new EmbeddingLayer.Builder().nIn(5).nOut(5).hasBias(true).activation(Activation.IDENTITY).build(), "rightInput");
    graphBuilder.addVertex("embed", new MergeVertex(), "leftEmbed", "rightEmbed");
    graphBuilder.addLayer("output", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nIn(10).nOut(1).build(), "embed");
    graphBuilder.setOutputs("output");

    ComputationGraphConfiguration configuration = graphBuilder.build();
    ComputationGraph graph = new ComputationGraph(configuration);
    graph.init();
    return graph;
}
 
Example #9
Source File: TestCustomLayers.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void checkInitializationFF() {
    //Actually create a network with a custom layer; check initialization and forward pass

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().list()
                    .layer(0, new DenseLayer.Builder().nIn(9).nOut(10).build()).layer(1, new CustomLayer(3.14159)) //hard-coded nIn/nOut of 10
                    .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10).nOut(11).build())
                    .build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    assertEquals(9 * 10 + 10, net.getLayer(0).numParams());
    assertEquals(10 * 10 + 10, net.getLayer(1).numParams());
    assertEquals(10 * 11 + 11, net.getLayer(2).numParams());

    //Check for exceptions...
    net.output(Nd4j.rand(1, 9));
    net.fit(new DataSet(Nd4j.rand(1, 9), Nd4j.rand(1, 11)));
}
 
Example #10
Source File: SameDiffCustomLayerTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testInputValidationSameDiffVertex(){
    final ComputationGraphConfiguration config = new NeuralNetConfiguration.Builder().graphBuilder()
            .addVertex("a", new ValidatingSameDiffVertex(), "input")
            .addLayer("output", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.SIGMOID).nOut(2).build(), "a")
            .addInputs("input")
            .setInputTypes(InputType.feedForward(2))
            .setOutputs("output")
            .build();

    final ComputationGraph net = new ComputationGraph(config);
    net.init();

    final INDArray goodInput = Nd4j.rand(1, 2);
    final INDArray badInput = Nd4j.rand(2, 2);

    net.fit(new INDArray[]{goodInput}, new INDArray[]{goodInput});

    exceptionRule.expect(IllegalArgumentException.class);
    exceptionRule.expectMessage("Expected Message");
    net.fit(new INDArray[]{badInput}, new INDArray[]{badInput});
}
 
Example #11
Source File: NeuralNetworks.java    From Machine-Learning-in-Java with MIT License 6 votes vote down vote up
private static MultiLayerNetwork softMaxRegression(int seed,
		int iterations, int numRows, int numColumns, int outputNum) {
	MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
			.seed(seed)
			.gradientNormalization(
					GradientNormalization.ClipElementWiseAbsoluteValue)
			.gradientNormalizationThreshold(1.0)
			.iterations(iterations)
			.momentum(0.5)
			.momentumAfter(Collections.singletonMap(3, 0.9))
			.optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
			.list(1)
			.layer(0,
					new OutputLayer.Builder(
							LossFunction.NEGATIVELOGLIKELIHOOD)
							.activation("softmax")
							.nIn(numColumns * numRows).nOut(outputNum)
							.build()).pretrain(true).backprop(false)
			.build();

	MultiLayerNetwork model = new MultiLayerNetwork(conf);

	return model;
}
 
Example #12
Source File: DataSetIteratorTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testLfwModel() throws Exception {
        final int numRows = 28;
        final int numColumns = 28;
        int numChannels = 3;
        int outputNum = LFWLoader.NUM_LABELS;
        int numSamples = LFWLoader.NUM_IMAGES;
        int batchSize = 2;
        int seed = 123;
        int listenerFreq = 1;

        LFWDataSetIterator lfw = new LFWDataSetIterator(batchSize, numSamples,
                        new int[] {numRows, numColumns, numChannels}, outputNum, false, true, 1.0, new Random(seed));

        MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                        .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
                        .layer(0, new ConvolutionLayer.Builder(5, 5).nIn(numChannels).nOut(6)
                                        .weightInit(WeightInit.XAVIER).activation(Activation.RELU).build())
                        .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
                                        .stride(1, 1).build())
                        .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                        .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
                                        .build())
                        .setInputType(InputType.convolutionalFlat(numRows, numColumns, numChannels))
                        ;

        MultiLayerNetwork model = new MultiLayerNetwork(builder.build());
        model.init();

        model.setListeners(new ScoreIterationListener(listenerFreq));

        model.fit(lfw.next());

        DataSet dataTest = lfw.next();
        INDArray output = model.output(dataTest.getFeatures());
        Evaluation eval = new Evaluation(outputNum);
        eval.eval(dataTest.getLabels(), output);
//        System.out.println(eval.stats());
    }
 
Example #13
Source File: PolicyTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testACPolicy() throws Exception {
        ComputationGraph cg = new ComputationGraph(new NeuralNetConfiguration.Builder().seed(444).graphBuilder().addInputs("input")
                .addLayer("output", new OutputLayer.Builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build(), "input").setOutputs("output").build());
        MultiLayerNetwork mln = new MultiLayerNetwork(new NeuralNetConfiguration.Builder().seed(555).list()
                .layer(0, new OutputLayer.Builder().nOut(1).lossFunction(LossFunctions.LossFunction.XENT).activation(Activation.SIGMOID).build()).build());

        ACPolicy policy = new ACPolicy(new DummyAC(cg));
        assertNotNull(policy.rnd);

        policy = new ACPolicy(new DummyAC(mln));
        assertNotNull(policy.rnd);

        INDArray input = Nd4j.create(new double[] {1.0, 0.0}, new long[]{1,2});
        for (int i = 0; i < 100; i++) {
            assertEquals(0, (int)policy.nextAction(input));
        }

        input = Nd4j.create(new double[] {0.0, 1.0}, new long[]{1,2});
        for (int i = 0; i < 100; i++) {
            assertEquals(1, (int)policy.nextAction(input));
        }

        input = Nd4j.create(new double[] {0.1, 0.2, 0.3, 0.4}, new long[]{1, 4});
        int[] count = new int[4];
        for (int i = 0; i < 100; i++) {
            count[policy.nextAction(input)]++;
        }
//        System.out.println(count[0] + " " + count[1] + " " + count[2] + " " + count[3]);
        assertTrue(count[0] < 20);
        assertTrue(count[1] < 30);
        assertTrue(count[2] < 40);
        assertTrue(count[3] < 50);
    }
 
Example #14
Source File: AbstractZooModel.java    From wekaDeeplearning4j with GNU General Public License v3.0 5 votes vote down vote up
/**
 * We need a layer with the correct number of outputs
 * @return Default output layer
 */
protected OutputLayer createOutputLayer() {
    return new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
            .nIn(m_numFExtractOutputs).nOut(numLabels)
            .weightInit(new NormalDistribution(0, 0.2 * (2.0 / (4096 + numLabels)))) //This weight init dist gave better results than Xavier
            .activation(Activation.SOFTMAX).build();
}
 
Example #15
Source File: ModelTupleStreamTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
protected Model buildMultiLayerNetworkModel(int numInputs, int numOutputs) throws Exception {

    final MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
        .list(
            new OutputLayer.Builder()
                           .nIn(numInputs)
                           .nOut(numOutputs)
                           .activation(Activation.IDENTITY)
                           .lossFunction(LossFunctions.LossFunction.MSE)
                           .build()
            )
        .build();

    final MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();

    final float[] floats = new float[(numInputs+1)*numOutputs];
    final float base0 = 0.01f;
    float base = base0;
    for (int ii=0; ii<floats.length; ++ii)
    {
      base *= 2;
      if (base > 1/base0) base = base0;
      floats[ii] = base;
    }

    final INDArray params = Nd4j.create(floats);
    model.setParams(params);

    return model;
  }
 
Example #16
Source File: TestGraphNodes.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testStackVertexEmbedding() {
    Nd4j.getRandom().setSeed(12345);
    GraphVertex unstack = new StackVertex(null, "", -1, Nd4j.dataType());

    INDArray in1 = Nd4j.zeros(5, 1);
    INDArray in2 = Nd4j.zeros(5, 1);
    for (int i = 0; i < 5; i++) {
        in1.putScalar(i, 0, i);
        in2.putScalar(i, 0, i);
    }

    INDArray l = Nd4j.rand(5, 5);
    MultiDataSet ds = new org.nd4j.linalg.dataset.MultiDataSet(new INDArray[] {in1, in2}, new INDArray[] {l, l},
                    null, null);


    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in1", "in2")
                    .addVertex("stack", new org.deeplearning4j.nn.conf.graph.StackVertex(), "in1", "in2")
                    .addLayer("1", new EmbeddingLayer.Builder().nIn(5).nOut(5).build(), "stack")
                    .addVertex("unstack1", new org.deeplearning4j.nn.conf.graph.UnstackVertex(0, 2), "1")
                    .addVertex("unstack2", new org.deeplearning4j.nn.conf.graph.UnstackVertex(0, 2), "1")
                    .addLayer("out1", new OutputLayer.Builder().activation(Activation.TANH)
                                    .lossFunction(LossFunctions.LossFunction.L2).nIn(5).nOut(5).build(), "unstack1")
                    .addLayer("out2", new OutputLayer.Builder().activation(Activation.TANH)
                                    .lossFunction(LossFunctions.LossFunction.L2).nIn(5).nOut(5).build(), "unstack2")
                    .setOutputs("out1", "out2").build();

    ComputationGraph g = new ComputationGraph(conf);
    g.init();

    g.feedForward(new INDArray[] {in1, in2}, false);

    g.fit(ds);

}
 
Example #17
Source File: ActivationLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testActivationInheritanceCG() {

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123)
            .weightInit(WeightInit.XAVIER)
            .activation(Activation.RATIONALTANH)
            .graphBuilder()
            .addInputs("in")
            .addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
            .addLayer("1", new ActivationLayer(), "0")
            .addLayer("2", new ActivationLayer.Builder().build(), "1")
            .addLayer("3", new ActivationLayer.Builder().activation(Activation.ELU).build(), "2")
            .addLayer("4", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                    .activation(Activation.SOFTMAX).nIn(10).nOut(10).build(), "3")
            .setOutputs("4")
            .build();

    ComputationGraph network = new ComputationGraph(conf);
    network.init();

    assertNotNull(((ActivationLayer)network.getLayer("1").conf().getLayer()).getActivationFn());

    assertTrue(((DenseLayer)network.getLayer("0").conf().getLayer()).getActivationFn() instanceof ActivationRationalTanh);
    assertTrue(((ActivationLayer)network.getLayer("1").conf().getLayer()).getActivationFn() instanceof ActivationRationalTanh);
    assertTrue(((ActivationLayer)network.getLayer("2").conf().getLayer()).getActivationFn() instanceof ActivationRationalTanh);
    assertTrue(((ActivationLayer)network.getLayer("3").conf().getLayer()).getActivationFn() instanceof ActivationELU);
    assertTrue(((OutputLayer)network.getLayer("4").conf().getLayer()).getActivationFn() instanceof ActivationSoftmax);
}
 
Example #18
Source File: CDAEModel.java    From jstarcraft-rns with Apache License 2.0 5 votes vote down vote up
@Override
protected MultiLayerConfiguration getNetworkConfiguration() {
    NeuralNetConfiguration.ListBuilder factory = new NeuralNetConfiguration.Builder().seed(6)
            // .gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
            // .gradientNormalizationThreshold(1.0)
            .updater(new Nesterovs(learnRatio, momentum)).weightInit(WeightInit.XAVIER).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).l2(weightRegularization).list();
    factory.layer(0, new CDAEConfiguration.Builder().nIn(inputDimension).nOut(hiddenDimension).activation(Activation.fromString(hiddenActivation)).setNumUsers(userSize).build());
    factory.layer(1, new OutputLayer.Builder().nIn(hiddenDimension).nOut(inputDimension).lossFunction(LossFunctions.LossFunction.SQUARED_LOSS).activation(Activation.fromString(outputActivation)).build());
    factory.pretrain(false).backprop(true);
    MultiLayerConfiguration configuration = factory.build();
    return configuration;
}
 
Example #19
Source File: ScoringModelTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
protected Model buildComputationGraphModel(int numFeatures) throws Exception {

    final ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
        .graphBuilder()
        .addInputs("inputLayer")
        .addLayer("outputLayer",
          new OutputLayer.Builder().nIn(numFeatures).nOut(1).lossFunction(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).build(),
          "inputLayer")
        .setOutputs("outputLayer")
        .build();

    final ComputationGraph model = new ComputationGraph(conf);
    model.init();

    final float[] floats = new float[numFeatures+1];
    float base = 1f;
    for (int ii=0; ii<floats.length; ++ii)
    {
      base *= 2;
      floats[ii] = base;
    }

    final INDArray params = Nd4j.create(floats);
    model.setParams(params);

    return model;
  }
 
Example #20
Source File: TestEarlyStoppingCompGraph.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBadTuning() {
    //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition

    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(5.0)) //Intentionally huge LR
                    .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
                    .addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
                    .setOutputs("0").build();
    ComputationGraph net = new ComputationGraph(conf);
    net.setListeners(new ScoreIterationListener(1));

    DataSetIterator irisIter = new IrisDataSetIterator(150, 150);
    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5000))
                    .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES),
                                    new MaxScoreIterationTerminationCondition(10)) //Initial score is ~2.5
                    .scoreCalculator(new DataSetLossCalculatorCG(irisIter, true)).modelSaver(saver).build();

    IEarlyStoppingTrainer trainer = new EarlyStoppingGraphTrainer(esConf, net, irisIter);
    EarlyStoppingResult result = trainer.fit();

    assertTrue(result.getTotalEpochs() < 5);
    assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition,
                    result.getTerminationReason());
    String expDetails = new MaxScoreIterationTerminationCondition(10).toString();
    assertEquals(expDetails, result.getTerminationDetails());

    assertEquals(0, result.getBestModelEpoch());
    assertNotNull(result.getBestModel());
}
 
Example #21
Source File: TestListeners.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testListeners() {
    TestListener.clearCounts();

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list().layer(0,
                    new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(10).nOut(10)
                                    .activation(Activation.TANH).build());

    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();

    testListenersForModel(model, Collections.singletonList(new TestListener()));
}
 
Example #22
Source File: TestEarlyStoppingCompGraph.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEarlyStoppingListenersCG() {
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
            .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER)
            .graphBuilder()
            .addInputs("in")
            .layer("0", new OutputLayer.Builder().nIn(4).nOut(3)
                    .activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
            .setOutputs("0")
            .build();
    ComputationGraph net = new ComputationGraph(conf);

    TestEarlyStopping.TestListener tl = new TestEarlyStopping.TestListener();
    net.setListeners(tl);

    DataSetIterator irisIter = new IrisDataSetIterator(50, 150);
    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf =
            new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5))
                    .iterationTerminationConditions(
                            new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES))
                    .scoreCalculator(new DataSetLossCalculator(irisIter, true)).modelSaver(saver)
                    .build();

    IEarlyStoppingTrainer<ComputationGraph> trainer = new EarlyStoppingGraphTrainer(esConf, net, irisIter);

    trainer.fit();

    assertEquals(5, tl.getCountEpochStart());
    assertEquals(5, tl.getCountEpochEnd());
    assertEquals(5 * 150/50, tl.getIterCount());

    assertEquals(4, tl.getMaxEpochStart());
    assertEquals(4, tl.getMaxEpochEnd());
}
 
Example #23
Source File: TestSystemInfoPrintListener.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testListener() throws Exception {
        SystemInfoPrintListener systemInfoPrintListener = SystemInfoPrintListener.builder()
                .printOnEpochStart(true).printOnEpochEnd(true)
                .build();

        File tmpFile = testDir.newFile("tmpfile-log.txt");
        assertEquals(0, tmpFile.length() );

        SystemInfoFilePrintListener systemInfoFilePrintListener = SystemInfoFilePrintListener.builder()
                .printOnEpochStart(true).printOnEpochEnd(true).printFileTarget(tmpFile)
                .build();
        tmpFile.deleteOnExit();

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .list()
                .layer(new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();
        net.setListeners(systemInfoFilePrintListener);

        DataSetIterator iter = new IrisDataSetIterator(10, 150);

        net.fit(iter, 3);

//        System.out.println(FileUtils.readFileToString(tmpFile));
    }
 
Example #24
Source File: TestVertxUI.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testUIMultipleSessions() throws Exception {

    for (int session = 0; session < 3; session++) {

        StatsStorage ss = new InMemoryStatsStorage();

        UIServer uiServer = UIServer.getInstance();
        uiServer.attach(ss);

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
                .layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build())
                .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                        .activation(Activation.SOFTMAX).nIn(4).nOut(3).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();
        net.setListeners(new StatsListener(ss, 1), new ScoreIterationListener(1));

        DataSetIterator iter = new IrisDataSetIterator(150, 150);

        for (int i = 0; i < 20; i++) {
            net.fit(iter);
            Thread.sleep(100);
        }
    }
}
 
Example #25
Source File: ModelSerializerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testInvalidLoading2() throws Exception {
    int nIn = 5;
    int nOut = 6;

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).l1(0.01)
            .l2(0.01).updater(new Sgd(0.1)).activation(Activation.TANH).weightInit(WeightInit.XAVIER).list()
            .layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).build())
            .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()).layer(2, new OutputLayer.Builder()
                    .lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut).build())
            .build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    File tempFile = tempDir.newFile("testInvalidLoading2.bin");

    ModelSerializer.writeModel(net, tempFile, true);

    try {
        ModelSerializer.restoreComputationGraph(tempFile);
        fail();
    } catch (Exception e){
        String msg = e.getMessage();
        assertTrue(msg, msg.contains("JSON") && msg.contains("restoreMultiLayerNetwork"));
    }
}
 
Example #26
Source File: TransferLearningComplex.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testAddOutput() {
        NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.9))
                        .activation(Activation.IDENTITY);

        ComputationGraphConfiguration conf = overallConf.graphBuilder().addInputs("inCentre", "inRight")
                        .addLayer("denseCentre0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "inCentre")
                        .addLayer("denseRight0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "inRight")
                        .addVertex("mergeRight", new MergeVertex(), "denseCentre0", "denseRight0")
                        .addLayer("outRight",
                                        new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(4).nOut(2).build(),
                                        "mergeRight")
                        .setOutputs("outRight").build();
        ComputationGraph modelToTune = new ComputationGraph(conf);
        modelToTune.init();

        ComputationGraph modelNow =
                        new TransferLearning.GraphBuilder(modelToTune)
                                        .addLayer("outCentre",
                                                        new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(2)
                                                                        .nOut(3).build(),
                                                        "denseCentre0")
                                        .setOutputs("outRight", "outCentre").build();

        assertEquals(2, modelNow.getNumOutputArrays());
        MultiDataSet rand = new MultiDataSet(new INDArray[] {Nd4j.rand(2, 2), Nd4j.rand(2, 2)},
                        new INDArray[] {Nd4j.rand(2, 2), Nd4j.rand(2, 3)});
        modelNow.fit(rand);
//        log.info(modelNow.summary());
//        log.info(modelNow.summary(InputType.feedForward(2),InputType.feedForward(2)));
        modelNow.summary();
        modelNow.summary(InputType.feedForward(2),InputType.feedForward(2));
    }
 
Example #27
Source File: TestEarlyStoppingSparkCompGraph.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testListeners() {
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
                    .addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
                    .setOutputs("0").build();
    ComputationGraph net = new ComputationGraph(conf);
    net.setListeners(new ScoreIterationListener(5));


    JavaRDD<DataSet> irisData = getIris();

    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5))
                    .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(2, TimeUnit.MINUTES))
                    .scoreCalculator(new SparkLossCalculatorComputationGraph(
                                    irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc()))
                    .modelSaver(saver).build();

    LoggingEarlyStoppingListener listener = new LoggingEarlyStoppingListener();

    TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);

    IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm,
                    esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
    trainer.setListener(listener);

    trainer.fit();

    assertEquals(1, listener.onStartCallCount);
    assertEquals(5, listener.onEpochCallCount);
    assertEquals(1, listener.onCompletionCallCount);
}
 
Example #28
Source File: TestPreProcessors.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCnnToDense() {
    MultiLayerConfiguration conf =
            new NeuralNetConfiguration.Builder()
                    .list().layer(0,
                    new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                            4, 4) // 28*28*1 => 15*15*10
                            .nIn(1).nOut(10).padding(2, 2)
                            .stride(2, 2)
                            .weightInit(WeightInit.RELU)
                            .activation(Activation.RELU)
                            .build())
                    .layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder()
                            .activation(Activation.RELU).nOut(200).build())
                    .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(200)
                            .nOut(5).weightInit(WeightInit.RELU)
                            .activation(Activation.SOFTMAX).build())
                    .setInputType(InputType.convolutionalFlat(28, 28, 1))
                    .build();

    assertNotNull(conf.getInputPreProcess(0));
    assertNotNull(conf.getInputPreProcess(1));

    assertTrue(conf.getInputPreProcess(0) instanceof FeedForwardToCnnPreProcessor);
    assertTrue(conf.getInputPreProcess(1) instanceof CnnToFeedForwardPreProcessor);

    FeedForwardToCnnPreProcessor ffcnn = (FeedForwardToCnnPreProcessor) conf.getInputPreProcess(0);
    CnnToFeedForwardPreProcessor cnnff = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(1);

    assertEquals(28, ffcnn.getInputHeight());
    assertEquals(28, ffcnn.getInputWidth());
    assertEquals(1, ffcnn.getNumChannels());

    assertEquals(15, cnnff.getInputHeight());
    assertEquals(15, cnnff.getInputWidth());
    assertEquals(10, cnnff.getNumChannels());

    assertEquals(15 * 15 * 10, ((FeedForwardLayer) conf.getConf(1).getLayer()).getNIn());
}
 
Example #29
Source File: AutoEncoderTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void sanityCheckIssue5662(){
    int mergeSize = 50;
    int encdecSize = 25;
    int in1Size = 20;
    int in2Size = 15;
    int hiddenSize = 10;

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
            .weightInit(WeightInit.XAVIER)
            .graphBuilder()
            .addInputs("in1", "in2")
            .addLayer("1", new DenseLayer.Builder().nOut(mergeSize).build(), "in1")
            .addLayer("2", new DenseLayer.Builder().nOut(mergeSize).build(), "in2")
            .addVertex("merge", new MergeVertex(), "1", "2")
            .addLayer("e",new AutoEncoder.Builder().nOut(encdecSize).corruptionLevel(0.2).build(),"merge")
            .addLayer("hidden",new AutoEncoder.Builder().nOut(hiddenSize).build(),"e")
            .addLayer("decoder",new AutoEncoder.Builder().nOut(encdecSize).corruptionLevel(0.2).build(),"hidden")
            .addLayer("L4", new DenseLayer.Builder().nOut(mergeSize).build(), "decoder")
            .addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(in1Size).build(),"L4")
            .addLayer("out2",new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(in2Size).build(),"L4")
            .setOutputs("out1","out2")
            .setInputTypes(InputType.feedForward(in1Size), InputType.feedForward(in2Size))

            .build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    MultiDataSet mds = new org.nd4j.linalg.dataset.MultiDataSet(
            new INDArray[]{Nd4j.create(1, in1Size), Nd4j.create(1, in2Size)},
            new INDArray[]{Nd4j.create(1, in1Size), Nd4j.create(1, in2Size)});

    net.summary(InputType.feedForward(in1Size), InputType.feedForward(in2Size));
    net.fit(new SingletonMultiDataSetIterator(mds));
}
 
Example #30
Source File: MiscTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testTransferVGG() throws Exception {
        //https://github.com/deeplearning4j/deeplearning4j/issues/5167
        DataSet ds = new DataSet();
        ds.setFeatures(Nd4j.create(1, 3, 224, 224));
        ds.setLabels(Nd4j.create(1, 2));

        ComputationGraph model = (ComputationGraph)(
                VGG16.builder().build()
                        .initPretrained(PretrainedType.IMAGENET));
//        System.out.println(model.summary());

        ComputationGraph transferModel = new TransferLearning.GraphBuilder(model)
                .setFeatureExtractor("fc2")
                .removeVertexKeepConnections("predictions")
                .addLayer("predictions",
                        new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                .nIn(4096).nOut(2)
                                .weightInit(WeightInit.XAVIER)
                                .activation(Activation.SOFTMAX).build(), "fc2")
                .build();

//        System.out.println(transferModel.summary());
//        System.out.println("Fitting");
        transferModel.fit(ds);

        ComputationGraph g2 = TestUtils.testModelSerialization(transferModel);
        g2.fit(ds);
    }