Java Code Examples for org.deeplearning4j.nn.multilayer.MultiLayerNetwork#evaluate()

The following examples show how to use org.deeplearning4j.nn.multilayer.MultiLayerNetwork#evaluate() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestComputationGraphNetwork.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testCGEvaluation() {

    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration configuration = getIrisGraphConfiguration();
    ComputationGraph graph = new ComputationGraph(configuration);
    graph.init();

    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration mlnConfig = getIrisMLNConfiguration();
    MultiLayerNetwork net = new MultiLayerNetwork(mlnConfig);
    net.init();

    DataSetIterator iris = new IrisDataSetIterator(75, 150);

    net.fit(iris);
    iris.reset();
    graph.fit(iris);

    iris.reset();
    Evaluation evalExpected = net.evaluate(iris);
    iris.reset();
    Evaluation evalActual = graph.evaluate(iris);

    assertEquals(evalExpected.accuracy(), evalActual.accuracy(), 0e-4);
}
 
Example 2
Source File: AccuracyCalculator.java    From Java-Machine-Learning-for-Computer-Vision with MIT License 5 votes vote down vote up
@Override
public double calculateScore(MultiLayerNetwork network) {
    Evaluation evaluate = network.evaluate(dataSetIterator);
    double accuracy = evaluate.accuracy();
    log.info("Accuracy at iteration" + i++ + " " + accuracy);
    return 1 - evaluate.accuracy();
}
 
Example 3
Source File: DataSetAccuracyLossCalculator.java    From dl4j-tutorials with MIT License 5 votes vote down vote up
@Override
public double calculateScore(MultiLayerNetwork network) {

    double sum = 0;
    for (DataSetIterator dataSetIterator : dataSetIterators) {
        Evaluation eval = network.evaluate(dataSetIterator);
        sum += eval.accuracy();
    }

    return sum / dataSetIterators.length;
}
 
Example 4
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEvalSplitting2(){
    List<List<Writable>> seqFeatures = new ArrayList<>();
    List<Writable> step = Arrays.<Writable>asList(new FloatWritable(0), new FloatWritable(0), new FloatWritable(0));
    for( int i=0; i<30; i++ ){
        seqFeatures.add(step);
    }
    List<List<Writable>> seqLabels = Collections.singletonList(Collections.<Writable>singletonList(new FloatWritable(0)));

    SequenceRecordReader fsr = new CollectionSequenceRecordReader(Collections.singletonList(seqFeatures));
    SequenceRecordReader lsr = new CollectionSequenceRecordReader(Collections.singletonList(seqLabels));


    DataSetIterator testData = new SequenceRecordReaderDataSetIterator(fsr, lsr, 1, -1, true,
            SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_END);

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(123)
            .list()
            .layer(0, new LSTM.Builder().activation(Activation.TANH).nIn(3).nOut(3).build())
            .layer(1, new RnnOutputLayer.Builder().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.XENT)
                    .nIn(3).nOut(1).build())
            .backpropType(BackpropType.TruncatedBPTT).tBPTTForwardLength(10).tBPTTBackwardLength(10)
            .build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    net.evaluate(testData);
}
 
Example 5
Source File: CustomerRetentionPredictionExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) throws IOException, InterruptedException {

       final int labelIndex=11;
       final int batchSize=8;
       final int numClasses=2;
       final INDArray weightsArray = Nd4j.create(new double[]{0.57, 0.75});

       final RecordReader recordReader = generateReader(new ClassPathResource("Churn_Modelling.csv").getFile());
       final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize)
                                                                .classification(labelIndex,numClasses)
                                                                .build();
       final DataNormalization dataNormalization = new NormalizerStandardize();
       dataNormalization.fit(dataSetIterator);
       dataSetIterator.setPreProcessor(dataNormalization);
       final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8);

       log.info("Building Model------------------->>>>>>>>>");

        final MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
                                                                    .weightInit(WeightInit.RELU_UNIFORM)
                                                                    .updater(new Adam(0.015D))
                                                                    .list()
                                                                    .layer(new DenseLayer.Builder().nIn(11).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(4).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new OutputLayer.Builder(new LossMCXENT(weightsArray)).nIn(4).nOut(2).activation(Activation.SOFTMAX).build())
                                                                    .build();

        final UIServer uiServer = UIServer.getInstance();
        final StatsStorage statsStorage = new InMemoryStatsStorage();

        final MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(configuration);
        multiLayerNetwork.init();
        multiLayerNetwork.setListeners(new ScoreIterationListener(100),
                                       new StatsListener(statsStorage));
        uiServer.attach(statsStorage);
        multiLayerNetwork.fit(dataSetIteratorSplitter.getTrainIterator(),100);

        final Evaluation evaluation =  multiLayerNetwork.evaluate(dataSetIteratorSplitter.getTestIterator(),Arrays.asList("0","1"));
        System.out.println(evaluation.stats());

        final File file = new File("model.zip");
        ModelSerializer.writeModel(multiLayerNetwork,file,true);
        ModelSerializer.addNormalizerToModel(file,dataNormalization);


    }
 
Example 6
Source File: CustomerRetentionPredictionExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) throws IOException, InterruptedException {

       final int labelIndex=11;
       final int batchSize=8;
       final int numClasses=2;
       final INDArray weightsArray = Nd4j.create(new double[]{0.57, 0.75});

       final RecordReader recordReader = generateReader(new ClassPathResource("Churn_Modelling.csv").getFile());
       final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize)
                                                                .classification(labelIndex,numClasses)
                                                                .build();
       final DataNormalization dataNormalization = new NormalizerStandardize();
       dataNormalization.fit(dataSetIterator);
       dataSetIterator.setPreProcessor(dataNormalization);
       final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8);

       log.info("Building Model------------------->>>>>>>>>");

        final MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
                                                                    .weightInit(WeightInit.RELU_UNIFORM)
                                                                    .updater(new Adam(0.015D))
                                                                    .list()
                                                                    .layer(new DenseLayer.Builder().nIn(11).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(4).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new OutputLayer.Builder(new LossMCXENT(weightsArray)).nIn(4).nOut(2).activation(Activation.SOFTMAX).build())
                                                                    .build();

        final UIServer uiServer = UIServer.getInstance();
        final StatsStorage statsStorage = new InMemoryStatsStorage();

        final MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(configuration);
        multiLayerNetwork.init();
        multiLayerNetwork.setListeners(new ScoreIterationListener(100),
                                       new StatsListener(statsStorage));
        uiServer.attach(statsStorage);
        multiLayerNetwork.fit(dataSetIteratorSplitter.getTrainIterator(),100);

        final Evaluation evaluation =  multiLayerNetwork.evaluate(dataSetIteratorSplitter.getTestIterator(),Arrays.asList("0","1"));
        System.out.println(evaluation.stats());

        final File file = new File("model.zip");
        ModelSerializer.writeModel(multiLayerNetwork,file,true);
        ModelSerializer.addNormalizerToModel(file,dataNormalization);


    }
 
Example 7
Source File: Trainer.java    From dl4j-quickstart with Apache License 2.0 4 votes vote down vote up
public static void main(String... args) throws java.io.IOException {
    // create the data iterators for emnist
    DataSetIterator emnistTrain = new EmnistDataSetIterator(emnistSet, batchSize, true);
    DataSetIterator emnistTest = new EmnistDataSetIterator(emnistSet, batchSize, false);

    int outputNum = EmnistDataSetIterator.numLabels(emnistSet);

    // network configuration (not yet initialized)
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .seed(rngSeed)
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(new Adam())
            .l2(1e-4)
            .list()
            .layer(new DenseLayer.Builder()
                    .nIn(numRows * numColumns) // Number of input datapoints.
                    .nOut(1000) // Number of output datapoints.
                    .activation(Activation.RELU) // Activation function.
                    .weightInit(WeightInit.XAVIER) // Weight initialization.
                    .build())
            .layer(new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                    .nIn(1000)
                    .nOut(outputNum)
                    .activation(Activation.SOFTMAX)
                    .weightInit(WeightInit.XAVIER)
                    .build())
            .pretrain(false).backprop(true)
            .build();

    // create the MLN
    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();

    // pass a training listener that reports score every N iterations
    network.addListeners(new ScoreIterationListener(reportingInterval));

    // here we set up an early stopping trainer
    // early stopping is useful when your trainer runs for
    // a long time or you need to programmatically stop training
    EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
            .epochTerminationConditions(new MaxEpochsTerminationCondition(5))
            .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(20, TimeUnit.MINUTES))
            .scoreCalculator(new DataSetLossCalculator(emnistTest, true))
            .evaluateEveryNEpochs(1)
            .modelSaver(new LocalFileModelSaver(System.getProperty("user.dir")))
            .build();

    // training
    EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, network, emnistTrain);
    EarlyStoppingResult result = trainer.fit();

    // print out early stopping results
    System.out.println("Termination reason: " + result.getTerminationReason());
    System.out.println("Termination details: " + result.getTerminationDetails());
    System.out.println("Total epochs: " + result.getTotalEpochs());
    System.out.println("Best epoch number: " + result.getBestModelEpoch());
    System.out.println("Score at best epoch: " + result.getBestModelScore());

    // evaluate basic performance
    Evaluation eval = network.evaluate(emnistTest);
    System.out.println(eval.accuracy());
    System.out.println(eval.precision());
    System.out.println(eval.recall());

    // evaluate ROC and calculate the Area Under Curve
    ROCMultiClass roc = network.evaluateROCMultiClass(emnistTest);
    System.out.println(roc.calculateAverageAUC());

    // calculate AUC for a single class
    int classIndex = 0;
    System.out.println(roc.calculateAUC(classIndex));

    // optionally, you can print all stats from the evaluations
    System.out.println(eval.stats());
    System.out.println(roc.stats());
}
 
Example 8
Source File: DL4JXORHelloWorld.java    From neo4j-ml-procedures with Apache License 2.0 4 votes vote down vote up
/**
     * The main method.
     * @param args No arguments are used.
     */
    public static void main(final String args[]) throws IOException, InterruptedException {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(123)
                .iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .learningRate(0.1)
//                 .useDropConnect(false)
//                .biasInit(0)
                .miniBatch(false)
//                .updater(Updater.SGD)
                .list()
                .layer(0, new DenseLayer.Builder().nIn(2).nOut(2)
//                        .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0,1))
                        .activation(Activation.SIGMOID)
                        .build())
                .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
//                        .weightInit(WeightInit.DISTRIBUTION).dist(new UniformDistribution(0,1))
                        .activation(Activation.SIGMOID)
                        .nIn(2).nOut(1).build())
                .pretrain(false).backprop(true).build();


        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));  //Print score every 10 parameter updates


        DoublesDataSetIterator iterator = new DoublesDataSetIterator(Arrays.asList(
                makePair(XOR_INPUT[0],XOR_IDEAL[0]),
                makePair(XOR_INPUT[1],XOR_IDEAL[1]),
                makePair(XOR_INPUT[2],XOR_IDEAL[2]),
                makePair(XOR_INPUT[3],XOR_IDEAL[3]))
                ,1);

        for (int n = 0; n < 10000; n++) {
            model.fit(iterator);
        }

        Evaluation eval = model.evaluate(iterator);
        List<Prediction> predictionErrors = eval.getPredictionErrors();
        System.out.println("\n\n+++++ Prediction Errors +++++");
        if (predictionErrors != null) {
            for (Prediction p : predictionErrors) {
                System.out.printf("Predicted class: %d, Actual class: %d\t%s%n", p.getPredictedClass(), p.getActualClass(), p.getRecordMetaData(RecordMetaData.class));
            }
        }
        //Print the evaluation statistics
        System.out.println(eval.stats());

        INDArray data = Nd4j.zeros(2, 2);
        data.putScalar(0,0,0d);
        data.putScalar(0,1,1d);
        data.putScalar(1,0,1d);
        data.putScalar(1,1,1d);
        INDArray output = model.output(data);

        for (int i=0;i<data.rows();i++) {
            System.out.println(data.getRow(i) +" -> "+ output.getRow(i));
        }
    }
 
Example 9
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testIris() {

        // Network config
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()

                        .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(42)
                        .updater(new Sgd(1e-6)).list()
                        .layer(0, new DenseLayer.Builder().nIn(4).nOut(2).activation(Activation.TANH)
                                        .weightInit(WeightInit.XAVIER).build())
                        .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                        LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3).weightInit(WeightInit.XAVIER)
                                                        .activation(Activation.SOFTMAX).build())

                        .build();

        // Instantiate model
        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.addListeners(new ScoreIterationListener(1));

        // Train-test split
        DataSetIterator iter = new IrisDataSetIterator(150, 150);
        DataSet next = iter.next();
        next.shuffle();
        SplitTestAndTrain trainTest = next.splitTestAndTrain(5, new Random(42));

        // Train
        DataSet train = trainTest.getTrain();
        train.normalizeZeroMeanZeroUnitVariance();

        // Test
        DataSet test = trainTest.getTest();
        test.normalizeZeroMeanZeroUnitVariance();
        INDArray testFeature = test.getFeatures();
        INDArray testLabel = test.getLabels();

        // Fitting model
        model.fit(train);
        // Get predictions from test feature
        INDArray testPredictedLabel = model.output(testFeature);

        // Eval with class number
        org.nd4j.evaluation.classification.Evaluation eval = new org.nd4j.evaluation.classification.Evaluation(3); //// Specify class num here
        eval.eval(testLabel, testPredictedLabel);
        double eval1F1 = eval.f1();
        double eval1Acc = eval.accuracy();

        // Eval without class number
        org.nd4j.evaluation.classification.Evaluation eval2 = new org.nd4j.evaluation.classification.Evaluation(); //// No class num
        eval2.eval(testLabel, testPredictedLabel);
        double eval2F1 = eval2.f1();
        double eval2Acc = eval2.accuracy();

        //Assert the two implementations give same f1 and accuracy (since one batch)
        assertTrue(eval1F1 == eval2F1 && eval1Acc == eval2Acc);

        org.nd4j.evaluation.classification.Evaluation evalViaMethod = model.evaluate(new ListDataSetIterator<>(Collections.singletonList(test)));
        checkEvaluationEquality(eval, evalViaMethod);

//        System.out.println(eval.getConfusionMatrix().toString());
//        System.out.println(eval.getConfusionMatrix().toCSV());
//        System.out.println(eval.getConfusionMatrix().toHTML());
//        System.out.println(eval.confusionToString());

        eval.getConfusionMatrix().toString();
        eval.getConfusionMatrix().toCSV();
        eval.getConfusionMatrix().toHTML();
        eval.confusionToString();
    }
 
Example 10
Source File: CapsNetMNISTTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testCapsNetOnMNIST(){
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .seed(123)
            .updater(new Adam())
            .list()
            .layer(new ConvolutionLayer.Builder()
                    .nOut(16)
                    .kernelSize(9, 9)
                    .stride(3, 3)
                    .build())
            .layer(new PrimaryCapsules.Builder(8, 8)
                    .kernelSize(7, 7)
                    .stride(2, 2)
                    .build())
            .layer(new CapsuleLayer.Builder(10, 16, 3).build())
            .layer(new CapsuleStrengthLayer.Builder().build())
            .layer(new ActivationLayer.Builder(new ActivationSoftmax()).build())
            .layer(new LossLayer.Builder(new LossNegativeLogLikelihood()).build())
            .setInputType(InputType.convolutionalFlat(28, 28, 1))
            .build();

    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();

    int rngSeed = 12345;
    try {
        MnistDataSetIterator mnistTrain = new MnistDataSetIterator(64, true, rngSeed);
        MnistDataSetIterator mnistTest = new MnistDataSetIterator(64, false, rngSeed);

        for (int i = 0; i < 2; i++) {
            model.fit(mnistTrain);
        }

        Evaluation eval = model.evaluate(mnistTest);

        assertTrue("Accuracy not over 95%", eval.accuracy() > 0.95);
        assertTrue("Precision not over 95%", eval.precision() > 0.95);
        assertTrue("Recall not over 95%", eval.recall() > 0.95);
        assertTrue("F1-score not over 95%", eval.f1() > 0.95);

    } catch (IOException e){
        System.out.println("Could not load MNIST.");
    }
}
 
Example 11
Source File: ParallelWrapperTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testParallelWrapperRun() throws Exception {

    int nChannels = 1;
    int outputNum = 10;

    // for GPU you usually want to have higher batchSize
    int batchSize = 128;
    int nEpochs = 5;
    int seed = 123;

    log.info("Load data....");
    DataSetIterator mnistTrain = new EarlyTerminationDataSetIterator(new MnistDataSetIterator(batchSize, true, 12345), 15);
    DataSetIterator mnistTest = new EarlyTerminationDataSetIterator(new MnistDataSetIterator(batchSize, false, 12345), 4);

    assertTrue(mnistTrain.hasNext());
    val t0 = mnistTrain.next();

    log.info("F: {}; L: {};", t0.getFeatures().shape(), t0.getLabels().shape());

    log.info("Build model....");
    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                    .l2(0.0005)
                    //.learningRateDecayPolicy(LearningRatePolicy.Inverse).lrPolicyDecayRate(0.001).lrPolicyPower(0.75)
                    .weightInit(WeightInit.XAVIER)
                    .updater(new Nesterovs(0.01, 0.9)).list()
                    .layer(0, new ConvolutionLayer.Builder(5, 5)
                                    //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
                                    .nIn(nChannels).stride(1, 1).nOut(20).activation(Activation.IDENTITY).build())
                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                    .stride(2, 2).build())
                    .layer(2, new ConvolutionLayer.Builder(5, 5)
                                    //Note that nIn needed be specified in later layers
                                    .stride(1, 1).nOut(50).activation(Activation.IDENTITY).build())
                    .layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                    .stride(2, 2).build())
                    .layer(4, new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build())
                    .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .nOut(outputNum).activation(Activation.SOFTMAX).build())
                    .setInputType(InputType.convolutionalFlat(28, 28, nChannels));

    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();

    // ParallelWrapper will take care of load balancing between GPUs.
    ParallelWrapper wrapper = new ParallelWrapper.Builder(model)
                    // DataSets prefetching options. Set this value with respect to number of actual devices
                    .prefetchBuffer(24)

                    // set number of workers equal or higher then number of available devices. x1-x2 are good values to start with
                    .workers(2)

                    // rare averaging improves performance, but might reduce model accuracy
                    .averagingFrequency(3)

                    // if set to TRUE, on every averaging model score will be reported
                    .reportScoreAfterAveraging(true)

                    // optinal parameter, set to false ONLY if your system has support P2P memory access across PCIe (hint: AWS do not support P2P)
                    .build();

    log.info("Train model....");
    model.setListeners(new ScoreIterationListener(100));
    long timeX = System.currentTimeMillis();

    // optionally you might want to use MultipleEpochsIterator instead of manually iterating/resetting over your iterator
    //MultipleEpochsIterator mnistMultiEpochIterator = new MultipleEpochsIterator(nEpochs, mnistTrain);

    for (int i = 0; i < nEpochs; i++) {
        long time1 = System.currentTimeMillis();

        // Please note: we're feeding ParallelWrapper with iterator, not model directly
        //            wrapper.fit(mnistMultiEpochIterator);
        wrapper.fit(mnistTrain);
        long time2 = System.currentTimeMillis();
        log.info("*** Completed epoch {}, time: {} ***", i, (time2 - time1));
    }
    long timeY = System.currentTimeMillis();
    log.info("*** Training complete, time: {} ***", (timeY - timeX));

    Evaluation eval = model.evaluate(mnistTest);
    log.info(eval.stats());
    mnistTest.reset();

    double acc = eval.accuracy();
    assertTrue(String.valueOf(acc), acc > 0.5);

    wrapper.shutdown();
}
 
Example 12
Source File: EvaluationScoreFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public double score(MultiLayerNetwork net, DataSetIterator iterator) {
    Evaluation e = net.evaluate(iterator);
    return e.scoreForMetric(metric);
}
 
Example 13
Source File: TestSetAccuracyScoreFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public double score(MultiLayerNetwork net, DataSetIterator iterator) {
    Evaluation e = net.evaluate(iterator);
    return e.accuracy();
}
 
Example 14
Source File: TestSetF1ScoreFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public double score(MultiLayerNetwork net, DataSetIterator iterator) {
    Evaluation e = net.evaluate(iterator);
    return e.f1();
}
 
Example 15
Source File: ScoreUtil.java    From deeplearning4j with Apache License 2.0 2 votes vote down vote up
/**
 *
 * @param model
 * @param testData
 * @return
 */
public static Evaluation getEvaluation(MultiLayerNetwork model, DataSetIterator testData) {
    return model.evaluate(testData);
}