Java Code Examples for org.deeplearning4j.nn.multilayer.MultiLayerNetwork#setListeners()

The following examples show how to use org.deeplearning4j.nn.multilayer.MultiLayerNetwork#setListeners() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestFailureListener.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Ignore
    @Test
    public void testFailureIter5() throws Exception {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .updater(new Adam(1e-4))
                .list()
                .layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
                .build();
        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        net.setListeners(new FailureTestingListener(
//                FailureTestingListener.FailureMode.OOM,
                FailureTestingListener.FailureMode.SYSTEM_EXIT_1,
                new FailureTestingListener.IterationEpochTrigger(false, 10)));

        DataSetIterator iter = new IrisDataSetIterator(5,150);

        net.fit(iter);
    }
 
Example 2
Source File: TestEarlyStoppingSpark.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testNoImprovementNEpochsTermination() {
    //Idea: terminate training if score (test set loss) does not improve for 5 consecutive epochs
    //Simulate this by setting LR = 0.0

    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).list()
                    .layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.setListeners(new ScoreIterationListener(5));

    JavaRDD<DataSet> irisData = getIris();

    EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
                    new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(100),
                                                    new ScoreImprovementEpochTerminationCondition(5))
                                    .iterationTerminationConditions(new MaxScoreIterationTerminationCondition(7.5)) //Initial score is ~2.5
                                    .scoreCalculator(new SparkDataSetLossCalculator(irisData, true, sc.sc()))
                                    .modelSaver(saver).build();

    IEarlyStoppingTrainer<MultiLayerNetwork> trainer = new SparkEarlyStoppingTrainer(getContext().sc(),
                    new ParameterAveragingTrainingMaster(true, 4, 1, 150 / 10, 1, 0), esConf, net, irisData);
    EarlyStoppingResult result = trainer.fit();

    //Expect no score change due to 0 LR -> terminate after 6 total epochs
    assertTrue(result.getTotalEpochs() < 12); //Normally expect 6 epochs exactly; get a little more than that here due to rounding + order of operations
    assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason());
    String expDetails = new ScoreImprovementEpochTerminationCondition(5).toString();
    assertEquals(expDetails, result.getTerminationDetails());
}
 
Example 3
Source File: OCNNOutputLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private MultiLayerNetwork getSingleLayer() {
        int numHidden = 2;

        MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
                .seed(12345)
                .weightInit(WeightInit.XAVIER)
                .miniBatch(true)
                .updater(new Adam(0.1))
//                .updater(Nesterovs.builder()
//                        .momentum(0.1)
//                        .learningRateSchedule(new StepSchedule(
//                                ScheduleType.EPOCH,
//                                1e-2,
//                                0.1,
//                                20)).build())
                .list(new DenseLayer.Builder().activation(new ActivationReLU())
                                .nIn(4).nOut(2).build(),
                        new  org.deeplearning4j.nn.conf.ocnn.OCNNOutputLayer.Builder()
                                .nIn(2).activation(new ActivationSigmoid()).initialRValue(0.1)
                                .nu(0.1)
                                .hiddenLayerSize(numHidden).build())
                .build();
        MultiLayerNetwork network = new MultiLayerNetwork(configuration);
        network.init();
        network.setListeners(new ScoreIterationListener(1));
        return network;
    }
 
Example 4
Source File: TestEarlyStopping.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testNoImprovementNEpochsTermination() {
    //Idea: terminate training if score (test set loss) does not improve for 5 consecutive epochs
    //Simulate this by setting LR = 0.0

    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).list()
                    .layer(0, new OutputLayer.Builder().nIn(4).nOut(3)
                            .activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.setListeners(new ScoreIterationListener(1));

    DataSetIterator irisIter = new IrisDataSetIterator(150, 150);

    EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
                    new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(100),
                                                    new ScoreImprovementEpochTerminationCondition(5))
                                    .iterationTerminationConditions(
                                                    new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES),
                                                    new MaxScoreIterationTerminationCondition(50)) //Initial score is ~8
                                    .scoreCalculator(new DataSetLossCalculator(irisIter, true)).modelSaver(saver)
                                    .build();

    IEarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, net, irisIter);
    EarlyStoppingResult result = trainer.fit();

    //Expect no score change due to 0 LR -> terminate after 6 total epochs
    assertEquals(6, result.getTotalEpochs());
    assertEquals(0, result.getBestModelEpoch());
    assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason());
    String expDetails = new ScoreImprovementEpochTerminationCondition(5).toString();
    assertEquals(expDetails, result.getTerminationDetails());
}
 
Example 5
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEvaluativeListenerSimple(){
    //Sanity check: https://github.com/deeplearning4j/deeplearning4j/issues/5351

    // Network config
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()

            .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(42)
            .updater(new Sgd(1e-6)).list()
            .layer(0, new DenseLayer.Builder().nIn(4).nOut(2).activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER).build())
            .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                    LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER)
                    .activation(Activation.SOFTMAX).build())
            .build();

    // Instantiate model
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    // Train-test split
    DataSetIterator iter = new IrisDataSetIterator(30, 150);
    DataSetIterator iterTest = new IrisDataSetIterator(30, 150);

    net.setListeners(new EvaluativeListener(iterTest, 3));

    for( int i=0; i<3; i++ ){
        net.fit(iter);
    }
}
 
Example 6
Source File: TestParallelEarlyStoppingUI.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
@Ignore //To be run manually
public void testParallelStatsListenerCompatibility() throws Exception {
    UIServer uiServer = UIServer.getInstance();

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd()).weightInit(WeightInit.XAVIER).list()
                    .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build())
                    .layer(1, new OutputLayer.Builder().nIn(3).nOut(3)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);

    // it's important that the UI can report results from parallel training
    // there's potential for StatsListener to fail if certain properties aren't set in the model
    StatsStorage statsStorage = new InMemoryStatsStorage();
    net.setListeners(new StatsListener(statsStorage));
    uiServer.attach(statsStorage);

    DataSetIterator irisIter = new IrisDataSetIterator(50, 500);
    EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
                    new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(500))
                                    .scoreCalculator(new DataSetLossCalculator(irisIter, true))
                                    .evaluateEveryNEpochs(2).modelSaver(saver).build();

    IEarlyStoppingTrainer<MultiLayerNetwork> trainer =
                    new EarlyStoppingParallelTrainer<>(esConf, net, irisIter, null, 3, 6, 2);

    EarlyStoppingResult<MultiLayerNetwork> result = trainer.fit();
    System.out.println(result);

    assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason());
}
 
Example 7
Source File: TestVertxUI.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testUIMultipleSessions() throws Exception {

    for (int session = 0; session < 3; session++) {

        StatsStorage ss = new InMemoryStatsStorage();

        UIServer uiServer = UIServer.getInstance();
        uiServer.attach(ss);

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
                .layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build())
                .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                        .activation(Activation.SOFTMAX).nIn(4).nOut(3).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();
        net.setListeners(new StatsListener(ss, 1), new ScoreIterationListener(1));

        DataSetIterator iter = new IrisDataSetIterator(150, 150);

        for (int i = 0; i < 20; i++) {
            net.fit(iter);
            Thread.sleep(100);
        }
    }
}
 
Example 8
Source File: TestParallelEarlyStopping.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBadTuning() {
    //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition

    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(1.0)) //Intentionally huge LR
                    .weightInit(WeightInit.XAVIER).list()
                    .layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.setListeners(new ScoreIterationListener(1));

    DataSetIterator irisIter = new IrisDataSetIterator(10, 150);
    EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
                    new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5000))
                                    .iterationTerminationConditions(
                                                    new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES),
                                                    new MaxScoreIterationTerminationCondition(10)) //Initial score is ~2.5
                                    .scoreCalculator(new DataSetLossCalculator(irisIter, true)).modelSaver(saver)
                                    .build();

    IEarlyStoppingTrainer<MultiLayerNetwork> trainer =
                    new EarlyStoppingParallelTrainer<>(esConf, net, irisIter, null, 2, 2, 1);
    EarlyStoppingResult result = trainer.fit();

    assertTrue(result.getTotalEpochs() < 5);
    assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition,
                    result.getTerminationReason());
    String expDetails = new MaxScoreIterationTerminationCondition(10).toString();
    assertEquals(expDetails, result.getTerminationDetails());

    assertTrue(result.getBestModelEpoch() <= 0);
    assertNotNull(result.getBestModel());
}
 
Example 9
Source File: DataSetIteratorTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public void runCifar(boolean preProcessCifar) throws Exception {
        final int height = 32;
        final int width = 32;
        int channels = 3;
        int outputNum = CifarLoader.NUM_LABELS;
        int batchSize = 5;
        int seed = 123;
        int listenerFreq = 1;

        Cifar10DataSetIterator cifar = new Cifar10DataSetIterator(batchSize);

        MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                        .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
                        .layer(0, new ConvolutionLayer.Builder(5, 5).nIn(channels).nOut(6).weightInit(WeightInit.XAVIER)
                                        .activation(Activation.RELU).build())
                        .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
                                        .build())
                        .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                        .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
                                        .build())

                        .setInputType(InputType.convolutionalFlat(height, width, channels));

        MultiLayerNetwork model = new MultiLayerNetwork(builder.build());
        model.init();

        //model.setListeners(Arrays.asList((TrainingListener) new ScoreIterationListener(listenerFreq)));

        CollectScoresIterationListener listener = new CollectScoresIterationListener(listenerFreq);
        model.setListeners(listener);

        model.fit(cifar);

        cifar = new Cifar10DataSetIterator(batchSize);
        Evaluation eval = new Evaluation(cifar.getLabels());
        while (cifar.hasNext()) {
            DataSet testDS = cifar.next(batchSize);
            INDArray output = model.output(testDS.getFeatures());
            eval.eval(testDS.getLabels(), output);
        }
//        System.out.println(eval.stats(true));
        listener.exportScores(System.out);
    }
 
Example 10
Source File: LearnIrisBackprop.java    From aifh with Apache License 2.0 4 votes vote down vote up
/**
 * The main method.
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 0.1;
        int splitTrainNum = (int) (150 * .75);

        int numInputs = 4;
        int numOutputs = 3;
        int numHiddenNodes = 50;

        // Setup training data.
        final InputStream istream = LearnIrisBackprop.class.getResourceAsStream("/iris.csv");
        if( istream==null ) {
            System.out.println("Cannot access data set, make sure the resources are available.");
            System.exit(1);
        }
        final NormalizeDataSet ds = NormalizeDataSet.load(istream);
        final CategoryMap species = ds.encodeOneOfN(4); // species is column 4
        istream.close();

        DataSet next = ds.extractSupervised(0, 4, 4, 3);
        next.shuffle();

        // Training and validation data split
        SplitTestAndTrain testAndTrain = next.splitTestAndTrain(splitTrainNum, new Random(seed));
        DataSet trainSet = testAndTrain.getTrain();
        DataSet validationSet = testAndTrain.getTest();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainSet.asList(), trainSet.numExamples());

        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(), validationSet.numExamples());

        // Create neural network.
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(seed)
                .iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .learningRate(learningRate)
                .updater(Updater.NESTEROVS).momentum(0.9)
                .list(2)
                .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                        .weightInit(WeightInit.XAVIER)
                        .activation("relu")
                        .build())
                .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD)
                        .weightInit(WeightInit.XAVIER)
                        .activation("softmax")
                        .nIn(numHiddenNodes).nOut(numOutputs).build())
                .pretrain(false).backprop(true).build();


        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                .epochTerminationConditions(new MaxEpochsTerminationCondition(500)) //Max of 50 epochs
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(25))
                .evaluateEveryNEpochs(1)
                .scoreCalculator(new DataSetLossCalculator(validationSetIterator, true))     //Calculate test set score
                .modelSaver(saver)
                .build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        Evaluation eval = new Evaluation(numOutputs);
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            System.out.println(features + ":Prediction("+findSpecies(labels,species)
                    +"):Actual("+findSpecies(predicted,species)+")" + predicted );
            eval.eval(labels, predicted);
        }

        //Print the evaluation statistics
        System.out.println(eval.stats());
    } catch(Exception ex) {
        ex.printStackTrace();
    }
}
 
Example 11
Source File: LearnXORBackprop.java    From aifh with Apache License 2.0 4 votes vote down vote up
/**
 * The main method.
 * @param args Not used.
 */
public static void main(String[] args) {
    int seed = 43;
    double learningRate = 0.4;
    int nEpochs = 100;

    int numInputs = XOR_INPUT[0].length;
    int numOutputs = XOR_IDEAL[0].length;
    int numHiddenNodes = 4;

    // Setup training data.
    INDArray xorInput = Nd4j.create(XOR_INPUT);
    INDArray xorIdeal = Nd4j.create(XOR_IDEAL);
    DataSet xorDataSet = new DataSet(xorInput,xorIdeal);

    // Create neural network.
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .seed(seed)
            .iterations(1)
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .learningRate(learningRate)
            .updater(Updater.NESTEROVS).momentum(0.9)
            .list(2)
            .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                    .weightInit(WeightInit.XAVIER)
                    .activation("relu")
                    .build())
            .layer(1, new OutputLayer.Builder(LossFunction.MSE)
                    .weightInit(WeightInit.XAVIER)
                    .activation("identity")
                    .nIn(numHiddenNodes).nOut(numOutputs).build())
            .pretrain(false).backprop(true).build();


    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    model.setListeners(new ScoreIterationListener(1));


    // Train
    for ( int n = 0; n < nEpochs; n++) {
        model.fit( xorDataSet );
    }


    // Evaluate
    System.out.println("Evaluating neural network.");
    for(int i=0;i<4;i++) {
        INDArray input = xorInput.getRow(i);
        INDArray output = model.output(input);
        System.out.println( input + " : " + output);
    }
}
 
Example 12
Source File: TestListeners.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testListenerCalls(){

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .list()
            .layer(new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
            .build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    TestListener tl = new TestListener();
    net.setListeners(tl);

    DataSetIterator irisIter = new IrisDataSetIterator(50, 150);

    net.fit(irisIter, 2);

    List<Triple<Call,Integer,Integer>> exp = new ArrayList<>();
    exp.add(new Triple<>(Call.EPOCH_START, 0, 0));
    exp.add(new Triple<>(Call.ON_FWD, 0, 0));
    exp.add(new Triple<>(Call.ON_BWD, 0, 0));
    exp.add(new Triple<>(Call.ON_GRAD, 0, 0));
    exp.add(new Triple<>(Call.ITER_DONE, 0, 0));
    exp.add(new Triple<>(Call.ON_FWD, 1, 0));
    exp.add(new Triple<>(Call.ON_BWD, 1, 0));
    exp.add(new Triple<>(Call.ON_GRAD, 1, 0));
    exp.add(new Triple<>(Call.ITER_DONE, 1, 0));
    exp.add(new Triple<>(Call.ON_FWD, 2, 0));
    exp.add(new Triple<>(Call.ON_BWD, 2, 0));
    exp.add(new Triple<>(Call.ON_GRAD, 2, 0));
    exp.add(new Triple<>(Call.ITER_DONE, 2, 0));
    exp.add(new Triple<>(Call.EPOCH_END, 3, 0));    //Post updating iter count, pre update epoch count

    exp.add(new Triple<>(Call.EPOCH_START, 3, 1));
    exp.add(new Triple<>(Call.ON_FWD, 3, 1));
    exp.add(new Triple<>(Call.ON_BWD, 3, 1));
    exp.add(new Triple<>(Call.ON_GRAD, 3, 1));
    exp.add(new Triple<>(Call.ITER_DONE, 3, 1));
    exp.add(new Triple<>(Call.ON_FWD, 4, 1));
    exp.add(new Triple<>(Call.ON_BWD, 4, 1));
    exp.add(new Triple<>(Call.ON_GRAD, 4, 1));
    exp.add(new Triple<>(Call.ITER_DONE, 4, 1));
    exp.add(new Triple<>(Call.ON_FWD, 5, 1));
    exp.add(new Triple<>(Call.ON_BWD, 5, 1));
    exp.add(new Triple<>(Call.ON_GRAD, 5, 1));
    exp.add(new Triple<>(Call.ITER_DONE, 5, 1));
    exp.add(new Triple<>(Call.EPOCH_END, 6, 1));


    assertEquals(exp, tl.getCalls());


    tl = new TestListener();

    ComputationGraph cg = net.toComputationGraph();
    cg.setListeners(tl);

    cg.fit(irisIter, 2);

    assertEquals(exp, tl.getCalls());
}
 
Example 13
Source File: CustomerRetentionPredictionExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) throws IOException, InterruptedException {

       final int labelIndex=11;
       final int batchSize=8;
       final int numClasses=2;
       final INDArray weightsArray = Nd4j.create(new double[]{0.57, 0.75});

       final RecordReader recordReader = generateReader(new ClassPathResource("Churn_Modelling.csv").getFile());
       final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize)
                                                                .classification(labelIndex,numClasses)
                                                                .build();
       final DataNormalization dataNormalization = new NormalizerStandardize();
       dataNormalization.fit(dataSetIterator);
       dataSetIterator.setPreProcessor(dataNormalization);
       final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8);

       log.info("Building Model------------------->>>>>>>>>");

        final MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
                                                                    .weightInit(WeightInit.RELU_UNIFORM)
                                                                    .updater(new Adam(0.015D))
                                                                    .list()
                                                                    .layer(new DenseLayer.Builder().nIn(11).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(4).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new OutputLayer.Builder(new LossMCXENT(weightsArray)).nIn(4).nOut(2).activation(Activation.SOFTMAX).build())
                                                                    .build();

        final UIServer uiServer = UIServer.getInstance();
        final StatsStorage statsStorage = new InMemoryStatsStorage();

        final MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(configuration);
        multiLayerNetwork.init();
        multiLayerNetwork.setListeners(new ScoreIterationListener(100),
                                       new StatsListener(statsStorage));
        uiServer.attach(statsStorage);
        multiLayerNetwork.fit(dataSetIteratorSplitter.getTrainIterator(),100);

        final Evaluation evaluation =  multiLayerNetwork.evaluate(dataSetIteratorSplitter.getTestIterator(),Arrays.asList("0","1"));
        System.out.println(evaluation.stats());

        final File file = new File("model.zip");
        ModelSerializer.writeModel(multiLayerNetwork,file,true);
        ModelSerializer.addNormalizerToModel(file,dataNormalization);


    }
 
Example 14
Source File: TestConvolutionalListener.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
@Ignore //Should be run manually
public void testUI() throws Exception {

    int nChannels = 1; // Number of input channels
    int outputNum = 10; // The number of possible outcomes
    int batchSize = 64; // Test batch size

    DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, 12345);

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) // Training iterations as above
                    .l2(0.0005).weightInit(WeightInit.XAVIER)
                    .updater(new Nesterovs(0.01, 0.9)).list()
                    .layer(0, new ConvolutionLayer.Builder(5, 5)
                                    //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
                                    .nIn(nChannels).stride(1, 1).nOut(20).activation(Activation.IDENTITY).build())
                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                    .stride(2, 2).build())
                    .layer(2, new ConvolutionLayer.Builder(5, 5)
                                    //Note that nIn need not be specified in later layers
                                    .stride(1, 1).nOut(50).activation(Activation.IDENTITY).build())
                    .layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                    .stride(2, 2).build())
                    .layer(4, new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build())
                    .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .nOut(outputNum).activation(Activation.SOFTMAX).build())
                    .setInputType(InputType.convolutionalFlat(28, 28, 1)) //See note below
                    .build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    net.setListeners(new ConvolutionalIterationListener(1), new ScoreIterationListener(1));

    for (int i = 0; i < 10; i++) {
        net.fit(mnistTrain.next());
        Thread.sleep(1000);
    }

    ComputationGraph cg = net.toComputationGraph();
    cg.setListeners(new ConvolutionalIterationListener(1), new ScoreIterationListener(1));
    for (int i = 0; i < 10; i++) {
        cg.fit(mnistTrain.next());
        Thread.sleep(1000);
    }



    Thread.sleep(100000);
}
 
Example 15
Source File: TestStatsListener.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testListenerBasic() {

    for (boolean useJ7 : new boolean[] {false, true}) {

        DataSet ds = new IrisDataSetIterator(150, 150).next();

        MultiLayerConfiguration conf =
                        new NeuralNetConfiguration.Builder()
                                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                                        .list().layer(0,
                                                        new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                                                .activation(Activation.SOFTMAX).nIn(4).nOut(3).build())
                                        .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        StatsStorage ss = new MapDBStatsStorage(); //in-memory

        if (useJ7) {
            net.setListeners(new J7StatsListener(ss, 1));
        } else {
            net.setListeners(new StatsListener(ss, 1));
        }


        for (int i = 0; i < 3; i++) {
            net.fit(ds);
        }

        List<String> sids = ss.listSessionIDs();
        assertEquals(1, sids.size());
        String sessionID = ss.listSessionIDs().get(0);
        assertEquals(1, ss.listTypeIDsForSession(sessionID).size());
        String typeID = ss.listTypeIDsForSession(sessionID).get(0);
        assertEquals(1, ss.listWorkerIDsForSession(sessionID).size());
        String workerID = ss.listWorkerIDsForSession(sessionID).get(0);

        Persistable staticInfo = ss.getStaticInfo(sessionID, typeID, workerID);
        assertNotNull(staticInfo);
        System.out.println(staticInfo);

        List<Persistable> updates = ss.getAllUpdatesAfter(sessionID, typeID, workerID, 0);
        assertEquals(3, updates.size());
        for (Persistable p : updates) {
            System.out.println(p);
        }

    }

}
 
Example 16
Source File: TestVertxUIMultiSession.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testUIAutoAttach() throws Exception {
    HashMap<String, StatsStorage> statsStorageForSession = new HashMap<>();

    Function<String, StatsStorage> statsStorageProvider = statsStorageForSession::get;
    UIServer uIServer = UIServer.getInstance(true, statsStorageProvider);

    for (int session = 0; session < 3; session++) {
        int layerSize = session + 4;

        InMemoryStatsStorage ss = new InMemoryStatsStorage();
        String sessionId = Integer.toString(session);
        statsStorageForSession.put(sessionId, ss);
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).list()
                .layer(0, new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(layerSize).build())
                .layer(1, new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                        .activation(Activation.SOFTMAX).nIn(layerSize).nOut(3).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        StatsListener statsListener = new StatsListener(ss, 1);
        statsListener.setSessionID(sessionId);
        net.setListeners(statsListener, new ScoreIterationListener(1));
        uIServer.attach(ss);

        DataSetIterator iter = new IrisDataSetIterator(150, 150);

        for (int i = 0; i < 20; i++) {
            net.fit(iter);
        }

        assertTrue(uIServer.isAttached(statsStorageForSession.get(sessionId)));
        uIServer.detach(ss);
        assertFalse(uIServer.isAttached(statsStorageForSession.get(sessionId)));

        /*
         * Visiting /train/:sessionId to auto-attach StatsStorage
         */
        String sessionUrl = trainingSessionUrl(uIServer.getAddress(), sessionId);
        HttpURLConnection conn = (HttpURLConnection) new URL(sessionUrl).openConnection();
        conn.connect();

        assertEquals(HttpResponseStatus.OK.code(), conn.getResponseCode());
        assertTrue(uIServer.isAttached(statsStorageForSession.get(sessionId)));
    }
}
 
Example 17
Source File: ActorCriticFactorySeparateStdDense.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public ActorCriticSeparate buildActorCritic(int[] numInputs, int numOutputs) {
    int nIn = 1;
    for (int i : numInputs) {
        nIn *= i;
    }
    NeuralNetConfiguration.ListBuilder confB = new NeuralNetConfiguration.Builder().seed(Constants.NEURAL_NET_SEED)
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam())
            .weightInit(WeightInit.XAVIER)
            .l2(conf.getL2())
            .list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
                    .activation(Activation.RELU).build());


    for (int i = 1; i < conf.getNumLayers(); i++) {
        confB.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
                .activation(Activation.RELU).build());
    }

    if (conf.isUseLSTM()) {
        confB.layer(conf.getNumLayers(), new LSTM.Builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());

        confB.layer(conf.getNumLayers() + 1, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
                .nIn(conf.getNumHiddenNodes()).nOut(1).build());
    } else {
        confB.layer(conf.getNumLayers(), new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY)
                .nIn(conf.getNumHiddenNodes()).nOut(1).build());
    }

    confB.setInputType(conf.isUseLSTM() ? InputType.recurrent(nIn) : InputType.feedForward(nIn));
    MultiLayerConfiguration mlnconf2 = confB.build();
    MultiLayerNetwork model = new MultiLayerNetwork(mlnconf2);
    model.init();
    if (conf.getListeners() != null) {
        model.setListeners(conf.getListeners());
    } else {
        model.setListeners(new ScoreIterationListener(Constants.NEURAL_NET_ITERATION_LISTENER));
    }

    NeuralNetConfiguration.ListBuilder confB2 = new NeuralNetConfiguration.Builder().seed(Constants.NEURAL_NET_SEED)
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam())
            .weightInit(WeightInit.XAVIER)
            //.regularization(true)
            //.l2(conf.getL2())
            .list().layer(0, new DenseLayer.Builder().nIn(nIn).nOut(conf.getNumHiddenNodes())
                    .activation(Activation.RELU).build());


    for (int i = 1; i < conf.getNumLayers(); i++) {
        confB2.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
                .activation(Activation.RELU).build());
    }

    if (conf.isUseLSTM()) {
        confB2.layer(conf.getNumLayers(), new LSTM.Builder().nOut(conf.getNumHiddenNodes()).activation(Activation.TANH).build());

        confB2.layer(conf.getNumLayers() + 1, new RnnOutputLayer.Builder(new ActorCriticLoss())
                .activation(Activation.SOFTMAX).nIn(conf.getNumHiddenNodes()).nOut(numOutputs).build());
    } else {
        confB2.layer(conf.getNumLayers(), new OutputLayer.Builder(new ActorCriticLoss())
                .activation(Activation.SOFTMAX).nIn(conf.getNumHiddenNodes()).nOut(numOutputs).build());
    }

    confB2.setInputType(conf.isUseLSTM() ? InputType.recurrent(nIn) : InputType.feedForward(nIn));
    MultiLayerConfiguration mlnconf = confB2.build();
    MultiLayerNetwork model2 = new MultiLayerNetwork(mlnconf);
    model2.init();
    if (conf.getListeners() != null) {
        model2.setListeners(conf.getListeners());
    } else {
        model2.setListeners(new ScoreIterationListener(Constants.NEURAL_NET_ITERATION_LISTENER));
    }


    return new ActorCriticSeparate(model, model2);
}
 
Example 18
Source File: ManualTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testCNNActivations2() throws Exception {

    int nChannels = 1;
    int outputNum = 10;
    int batchSize = 64;
    int nEpochs = 10;
    int seed = 123;

    log.info("Load data....");
    DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, 12345);
    DataSetIterator mnistTest = new MnistDataSetIterator(batchSize, false, 12345);

    log.info("Build model....");
    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                    .l2(0.0005)
                    .weightInit(WeightInit.XAVIER)
                    .updater(new Nesterovs(0.01, 0.9)).list()
                    .layer(0, new ConvolutionLayer.Builder(5, 5)
                                    //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
                                    .nIn(nChannels).stride(1, 1).nOut(20).activation(Activation.IDENTITY).build())
                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                    .stride(2, 2).build())
                    .layer(2, new ConvolutionLayer.Builder(5, 5)
                                    //Note that nIn needed be specified in later layers
                                    .stride(1, 1).nOut(50).activation(Activation.IDENTITY).build())
                    .layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                    .stride(2, 2).build())
                    .layer(4, new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build())
                    .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .nOut(outputNum).activation(Activation.SOFTMAX).build())
                    .setInputType(InputType.convolutional(28, 28, nChannels));

    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    /*
    ParallelWrapper wrapper = new ParallelWrapper.Builder(model)
        .averagingFrequency(1)
        .prefetchBuffer(12)
        .workers(2)
        .reportScoreAfterAveraging(false)
        .useLegacyAveraging(false)
        .build();
    */

    log.info("Train model....");
    model.setListeners(new ConvolutionalIterationListener(1));

    //((NativeOpExecutioner) Nd4j.getExecutioner()).getLoop().setOmpNumThreads(8);

    long timeX = System.currentTimeMillis();
    //        nEpochs = 2;
    for (int i = 0; i < nEpochs; i++) {
        long time1 = System.currentTimeMillis();
        model.fit(mnistTrain);
        //wrapper.fit(mnistTrain);
        long time2 = System.currentTimeMillis();
        log.info("*** Completed epoch {}, Time elapsed: {} ***", i, (time2 - time1));
    }
    long timeY = System.currentTimeMillis();

    log.info("Evaluate model....");
    Evaluation eval = new Evaluation(outputNum);
    while (mnistTest.hasNext()) {
        DataSet ds = mnistTest.next();
        INDArray output = model.output(ds.getFeatures(), false);
        eval.eval(ds.getLabels(), output);
    }
    log.info(eval.stats());
    mnistTest.reset();

    log.info("****************Example finished********************");
}
 
Example 19
Source File: DQNFactoryStdDense.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public DQN buildDQN(int[] numInputs, int numOutputs) {
    int nIn = 1;

    for (int i : numInputs) {
        nIn *= i;
    }

    NeuralNetConfiguration.ListBuilder confB = new NeuralNetConfiguration.Builder().seed(Constants.NEURAL_NET_SEED)
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam())
            .weightInit(WeightInit.XAVIER)
            .l2(conf.getL2())
            .list()
            .layer(0,
                    new DenseLayer.Builder()
                            .nIn(nIn)
                            .nOut(conf.getNumHiddenNodes())
                            .activation(Activation.RELU).build()
            );


    for (int i = 1; i < conf.getNumLayers(); i++) {
        confB.layer(i, new DenseLayer.Builder().nIn(conf.getNumHiddenNodes()).nOut(conf.getNumHiddenNodes())
                .activation(Activation.RELU).build());
    }

    confB.layer(conf.getNumLayers(),
            new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
                    .activation(Activation.IDENTITY)
                    .nIn(conf.getNumHiddenNodes())
                    .nOut(numOutputs)
                    .build()
    );


    MultiLayerConfiguration mlnconf = confB.build();
    MultiLayerNetwork model = new MultiLayerNetwork(mlnconf);
    model.init();
    if (conf.getListeners() != null) {
        model.setListeners(conf.getListeners());
    } else {
        model.setListeners(new ScoreIterationListener(Constants.NEURAL_NET_ITERATION_LISTENER));
    }
    return new DQN(model);
}
 
Example 20
Source File: LearnDigitsDropout.java    From aifh with Apache License 2.0 4 votes vote down vote up
/**
 * The main method.
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 1e-2;
        int nEpochs = 50;
        int batchSize = 500;

        // Setup training data.
        System.out.println("Please wait, reading MNIST training data.");
        String dir = System.getProperty("user.dir");
        MNISTReader trainingReader = MNIST.loadMNIST(dir, true);
        MNISTReader validationReader = MNIST.loadMNIST(dir, false);

        DataSet trainingSet = trainingReader.getData();
        DataSet validationSet = validationReader.getData();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize);
        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(), validationReader.getNumRows());

        System.out.println("Training set size: " + trainingReader.getNumImages());
        System.out.println("Validation set size: " + validationReader.getNumImages());

        System.out.println(trainingSet.get(0).getFeatures().size(1));
        System.out.println(validationSet.get(0).getFeatures().size(1));

        int numInputs = trainingReader.getNumCols()*trainingReader.getNumRows();
        int numOutputs = 10;
        int numHiddenNodes = 100;

        // Create neural network.
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(seed)
                .iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .learningRate(learningRate)
                .updater(Updater.NESTEROVS).momentum(0.9)
                .list(2)
                .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                        .weightInit(WeightInit.XAVIER)
                        .activation("relu")
                        .build())
                .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD)
                        .weightInit(WeightInit.XAVIER)
                        .activation("softmax")
                        .nIn(numHiddenNodes).nOut(numOutputs).build())
                .pretrain(false).backprop(true).build();


        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                //.epochTerminationConditions(new MaxEpochsTerminationCondition(10))
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5))
                .evaluateEveryNEpochs(1)
                .scoreCalculator(new DataSetLossCalculator(validationSetIterator, true))     //Calculate test set score
                .modelSaver(saver)
                .build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        Evaluation eval = new Evaluation(numOutputs);
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            eval.eval(labels, predicted);
        }

        //Print the evaluation statistics
        System.out.println(eval.stats());
    } catch(Exception ex) {
        ex.printStackTrace();
    }

}