org.nd4j.evaluation.classification.Evaluation Java Examples

The following examples show how to use org.nd4j.evaluation.classification.Evaluation. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MultiLayerNetwork.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
 * Evaluate the network (for classification) on the provided data set, with top N accuracy in addition to standard accuracy.
 * For 'standard' accuracy evaluation only, use topN = 1
 *
 * @param iterator   Iterator (data) to evaluate on
 * @param labelsList List of labels. May be null.
 * @param topN       N value for top N accuracy evaluation
 * @return Evaluation object, summarizing the results of the evaluation on the provided DataSetIterator
 */
public Evaluation evaluate(DataSetIterator iterator, List<String> labelsList, int topN) {
    if (layers == null || !(getOutputLayer() instanceof IOutputLayer)) {
        throw new IllegalStateException("Cannot evaluate network with no output layer");
    }
    if (labelsList == null) {
        try {
            labelsList = iterator.getLabels();
        } catch (Throwable t){ }    //Ignore, maybe UnsupportedOperationException etc
    }

    Layer outputLayer = getOutputLayer();
    if(getLayerWiseConfigurations().isValidateOutputLayerConfig()){
        OutputLayerUtil.validateOutputLayerForClassifierEvaluation(outputLayer.conf().getLayer(), Evaluation.class);
    }

    Evaluation e = new org.deeplearning4j.eval.Evaluation(labelsList, topN);
    doEvaluation(iterator, e);

    return e;
}
 
Example #2
Source File: UIListener.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public Builder trainEvaluationMetrics(String name, int labelIdx, Evaluation.Metric... metrics){
    if(trainEvalMetrics == null){
        trainEvalMetrics = new LinkedHashMap<>();
    }
    Pair<String,Integer> p = new Pair<>(name, labelIdx);
    if(!trainEvalMetrics.containsKey(p)){
        trainEvalMetrics.put(p, new ArrayList<Evaluation.Metric>());
    }
    List<Evaluation.Metric> l = trainEvalMetrics.get(p);
    for(Evaluation.Metric m : metrics){
        if(!l.contains(m)){
            l.add(m);
        }
    }
    return this;
}
 
Example #3
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testConfusionMatrixStats() {

    Evaluation e = new Evaluation();

    INDArray c0 = Nd4j.create(new double[] {1, 0, 0}, new long[]{1, 3});
    INDArray c1 = Nd4j.create(new double[] {0, 1, 0}, new long[]{1, 3});
    INDArray c2 = Nd4j.create(new double[] {0, 0, 1}, new long[]{1, 3});

    apply(e, 3, c2, c0); //Predicted class 2 when actually class 0, 3 times
    apply(e, 2, c0, c1); //Predicted class 0 when actually class 1, 2 times

    String s1 = " 0 0 3 | 0 = 0";   //First row: predicted 2, actual 0 - 3 times
    String s2 = " 2 0 0 | 1 = 1";   //Second row: predicted 0, actual 1 - 2 times

    String stats = e.stats();
    assertTrue(stats, stats.contains(s1));
    assertTrue(stats, stats.contains(s2));
}
 
Example #4
Source File: TestSparkMultiLayerParameterAveraging.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testEvaluation() {

    SparkDl4jMultiLayer sparkNet = getBasicNetwork();
    MultiLayerNetwork netCopy = sparkNet.getNetwork().clone();

    Evaluation evalExpected = new Evaluation();
    INDArray outLocal = netCopy.output(input, Layer.TrainingMode.TEST);
    evalExpected.eval(labels, outLocal);

    Evaluation evalActual = sparkNet.evaluate(sparkData);

    assertEquals(evalExpected.accuracy(), evalActual.accuracy(), 1e-3);
    assertEquals(evalExpected.f1(), evalActual.f1(), 1e-3);
    assertEquals(evalExpected.getNumRowCounter(), evalActual.getNumRowCounter(), 1e-3);
    assertMapEquals(evalExpected.falseNegatives(), evalActual.falseNegatives());
    assertMapEquals(evalExpected.falsePositives(), evalActual.falsePositives());
    assertMapEquals(evalExpected.trueNegatives(), evalActual.trueNegatives());
    assertMapEquals(evalExpected.truePositives(), evalActual.truePositives());
    assertEquals(evalExpected.precision(), evalActual.precision(), 1e-3);
    assertEquals(evalExpected.recall(), evalActual.recall(), 1e-3);
    assertEquals(evalExpected.getConfusionMatrix(), evalActual.getConfusionMatrix());
}
 
Example #5
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testStringHashLabels() {
    INDArray trueOutcome = FeatureUtil.toOutcomeVector(0, 2);
    INDArray predictedOutcome = FeatureUtil.toOutcomeVector(0, 2);

    Map<Integer, String> labelsMap = new HashMap<>();
    labelsMap.put(0, "hobbs");
    labelsMap.put(1, "cal");

    Evaluation eval = new Evaluation(labelsMap);

    eval.eval(trueOutcome, predictedOutcome);
    assertEquals(1, eval.classCount(0));
    assertEquals(labelsMap.get(0), eval.getClassLabel(0));

}
 
Example #6
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testStringListLabels() {
    INDArray trueOutcome = FeatureUtil.toOutcomeVector(0, 2);
    INDArray predictedOutcome = FeatureUtil.toOutcomeVector(0, 2);

    List<String> labelsList = new ArrayList<>();
    labelsList.add("hobbs");
    labelsList.add("cal");

    Evaluation eval = new Evaluation(labelsList);

    eval.eval(trueOutcome, predictedOutcome);
    assertEquals(1, eval.classCount(0));
    assertEquals(labelsList.get(0), eval.getClassLabel(0));

}
 
Example #7
Source File: TestLegacyJsonLoading.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testEvalLegacyFormat() throws Exception {

        File f = new ClassPathResource("regression_testing/eval_100b/evaluation.json").getFile();
        String s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
//        System.out.println(s);

        Evaluation e = Evaluation.fromJson(s);

        assertEquals(0.78, e.accuracy(), 1e-4);
        assertEquals(0.80, e.precision(), 1e-4);
        assertEquals(0.7753, e.f1(), 1e-3);

        f = new ClassPathResource("regression_testing/eval_100b/regressionEvaluation.json").getFile();
        s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
        RegressionEvaluation re = RegressionEvaluation.fromJson(s);
        assertEquals(6.53809e-02, re.meanSquaredError(0), 1e-4);
        assertEquals(3.46236e-01, re.meanAbsoluteError(1), 1e-4);

        f = new ClassPathResource("regression_testing/eval_100b/rocMultiClass.json").getFile();
        s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
        ROCMultiClass r = ROCMultiClass.fromJson(s);

        assertEquals(0.9838, r.calculateAUC(0), 1e-4);
        assertEquals(0.7934, r.calculateAUC(1), 1e-4);
    }
 
Example #8
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testLabelReset(){

        Map<Integer,String> m = new HashMap<>();
        m.put(0, "False");
        m.put(1, "True");

        Evaluation e1 = new Evaluation(m);
        INDArray zero = Nd4j.create(new double[]{1,0}).reshape(1,2);
        INDArray one = Nd4j.create(new double[]{0,1}).reshape(1,2);

        e1.eval(zero, zero);
        e1.eval(zero, zero);
        e1.eval(one, zero);
        e1.eval(one, one);
        e1.eval(one, one);
        e1.eval(one, one);

        String s1 = e1.stats();
//        System.out.println(s1);

        e1.reset();
        e1.eval(zero, zero);
        e1.eval(zero, zero);
        e1.eval(one, zero);
        e1.eval(one, one);
        e1.eval(one, one);
        e1.eval(one, one);

        String s2 = e1.stats();
        assertEquals(s1, s2);
    }
 
Example #9
Source File: RNNTestCases.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public IEvaluation[] getNewEvaluations(){
    return new IEvaluation[]{
            new Evaluation(),
            new ROCMultiClass(),
            new EvaluationCalibration()
    };
}
 
Example #10
Source File: MultiLayerNetwork.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Perform inference and then calculate the F1 score of the output(input) vs. the labels.
 *
 * @param input  the input to perform inference with
 * @param labels the true labels
 * @return the score for the given input,label pairs
 */
@Override
public double f1Score(INDArray input, INDArray labels) {
    feedForward(input);
    setLabels(labels);
    Evaluation eval = new Evaluation();
    eval.eval(labels, output(input));
    return eval.f1();
}
 
Example #11
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testEvalInvalid() {
        Evaluation e = new Evaluation(5);
        e.eval(0, 1);
        e.eval(1, 0);
        e.eval(1, 1);

//        System.out.println(e.stats());
        e.stats();

        assertFalse(e.stats().contains("\uFFFD"));
    }
 
Example #12
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testSingleClassBinaryClassification() {

        Evaluation eval = new Evaluation(1);

        for (int xe = 0; xe < 3; xe++) {
            INDArray zero = Nd4j.create(1,1);
            INDArray one = Nd4j.ones(1,1);

            //One incorrect, three correct
            eval.eval(one, zero);
            eval.eval(one, one);
            eval.eval(one, one);
            eval.eval(zero, zero);

//            System.out.println(eval.stats());
            eval.stats();

            assertEquals(0.75, eval.accuracy(), 1e-6);
            assertEquals(4, eval.getNumRowCounter());

            assertEquals(1, (int) eval.truePositives().get(0));
            assertEquals(2, (int) eval.truePositives().get(1));
            assertEquals(1, (int) eval.falseNegatives().get(1));

            eval.reset();
        }
    }
 
Example #13
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private static void checkEvaluationEquality(Evaluation evalExpected, Evaluation evalActual) {
    assertEquals(evalExpected.accuracy(), evalActual.accuracy(), 1e-3);
    assertEquals(evalExpected.f1(), evalActual.f1(), 1e-3);
    assertEquals(evalExpected.getNumRowCounter(), evalActual.getNumRowCounter(), 1e-3);
    assertMapEquals(evalExpected.falseNegatives(), evalActual.falseNegatives());
    assertMapEquals(evalExpected.falsePositives(), evalActual.falsePositives());
    assertMapEquals(evalExpected.trueNegatives(), evalActual.trueNegatives());
    assertMapEquals(evalExpected.truePositives(), evalActual.truePositives());
    assertEquals(evalExpected.precision(), evalActual.precision(), 1e-3);
    assertEquals(evalExpected.recall(), evalActual.recall(), 1e-3);
    assertEquals(evalExpected.falsePositiveRate(), evalActual.falsePositiveRate(), 1e-3);
    assertEquals(evalExpected.falseNegativeRate(), evalActual.falseNegativeRate(), 1e-3);
    assertEquals(evalExpected.falseAlarmRate(), evalActual.falseAlarmRate(), 1e-3);
    assertEquals(evalExpected.getConfusionMatrix(), evalActual.getConfusionMatrix());
}
 
Example #14
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBinaryCase() {
    INDArray ones10 = Nd4j.ones(10, 1);
    INDArray ones4 = Nd4j.ones(4, 1);
    INDArray zeros4 = Nd4j.zeros(4, 1);
    INDArray ones3 = Nd4j.ones(3, 1);
    INDArray zeros3 = Nd4j.zeros(3, 1);
    INDArray zeros2 = Nd4j.zeros(2, 1);

    Evaluation e = new Evaluation();
    e.eval(ones10, ones10); //10 true positives
    e.eval(ones3, zeros3); //3 false negatives
    e.eval(zeros4, ones4); //4 false positives
    e.eval(zeros2, zeros2); //2 true negatives


    assertEquals((10 + 2) / (double) (10 + 3 + 4 + 2), e.accuracy(), 1e-6);
    assertEquals(10, (int) e.truePositives().get(1));
    assertEquals(3, (int) e.falseNegatives().get(1));
    assertEquals(4, (int) e.falsePositives().get(1));
    assertEquals(2, (int) e.trueNegatives().get(1));

    //If we switch the label around: tp becomes tn, fp becomes fn, etc
    assertEquals(10, (int) e.trueNegatives().get(0));
    assertEquals(3, (int) e.falsePositives().get(0));
    assertEquals(4, (int) e.falseNegatives().get(0));
    assertEquals(2, (int) e.truePositives().get(0));
}
 
Example #15
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testFalsePerfectRecall() {
    int testSize = 100;
    int numClasses = 5;
    int winner = 1;
    int seed = 241;

    INDArray labels = Nd4j.zeros(testSize, numClasses);
    INDArray predicted = Nd4j.zeros(testSize, numClasses);

    Nd4j.getRandom().setSeed(seed);
    Random r = new Random(seed);

    //Modelling the situation when system predicts the same class every time
    for (int i = 0; i < testSize; i++) {
        //Generating random prediction but with a guaranteed winner
        INDArray rand = Nd4j.rand(1, numClasses);
        rand.put(0, winner, rand.sumNumber());
        rand.divi(rand.sumNumber());
        predicted.put(new INDArrayIndex[] {NDArrayIndex.point(i), all()}, rand);
        //Generating random label
        int label = r.nextInt(numClasses);
        labels.putScalar(new int[] {i, label}, 1.0);
    }

    //Explicitly specify the amount of classes
    Evaluation eval = new Evaluation(numClasses);
    eval.eval(labels, predicted);

    //For sure we shouldn't arrive at 100% recall unless we guessed everything right for every class
    assertNotEquals(1.0, eval.recall());
}
 
Example #16
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEvaluationNaNs(){

    Evaluation e = new Evaluation();
    INDArray predictions = Nd4j.create(new double[]{0.1, Double.NaN, 0.3}, new long[]{1,3});
    INDArray labels = Nd4j.create(new double[]{0, 0, 1}, new long[]{1,3});

    try {
        e.eval(labels, predictions);
    } catch (IllegalStateException ex){
        assertTrue(ex.getMessage().contains("NaN"));
    }

}
 
Example #17
Source File: SparkComputationGraph.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Evaluate the single-output network on a directory containing a set of MultiDataSet objects to be loaded with a {@link MultiDataSetLoader}.
 * Uses default batch size of {@link #DEFAULT_EVAL_SCORE_BATCH_SIZE}
 * @param path Path/URI to the directory containing the datasets to load
 * @return Evaluation
 */
public Evaluation evaluate(String path, MultiDataSetLoader loader){
    JavaRDD<String> data;
    try {
        data = SparkUtils.listPaths(sc, path);
    } catch (IOException e){
        throw new RuntimeException("Error listing files for evaluation of files at path: " + path, e);
    }
    return (Evaluation) doEvaluation(data, DEFAULT_EVAL_WORKERS, DEFAULT_EVAL_SCORE_BATCH_SIZE, null, loader, new Evaluation())[0];
}
 
Example #18
Source File: SparkComputationGraph.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Evaluate the single-output network on a directory containing a set of DataSet objects to be loaded with a {@link DataSetLoader}.
 * Uses default batch size of {@link #DEFAULT_EVAL_SCORE_BATCH_SIZE}
 * @param path Path/URI to the directory containing the datasets to load
 * @return Evaluation
 */
public Evaluation evaluate(String path, DataSetLoader loader){
    JavaRDD<String> data;
    try {
        data = SparkUtils.listPaths(sc, path);
    } catch (IOException e){
        throw new RuntimeException("Error listing files for evaluation of files at path: " + path, e);
    }
    return (Evaluation) doEvaluation(data, DEFAULT_EVAL_WORKERS, DEFAULT_EVAL_SCORE_BATCH_SIZE, loader, (MultiDataSetLoader)null, new Evaluation())[0];
}
 
Example #19
Source File: SparkDl4jMultiLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Evaluate on a directory containing a set of DataSet objects to be loaded with a {@link DataSetLoader}.
 * Uses default batch size of {@link #DEFAULT_EVAL_SCORE_BATCH_SIZE}
 * @param path Path/URI to the directory containing the datasets to load
 * @return Evaluation
 */
public <T extends Evaluation> T evaluate(String path, int batchSize, DataSetLoader loader){
    JavaRDD<String> paths;
    try {
        paths = SparkUtils.listPaths(sc, path);
    } catch (IOException e) {
        throw new RuntimeException("Error listing paths in directory", e);
    }

    JavaRDD<DataSet> rdd = paths.map(new LoadDataSetFunction(loader, new RemoteFileSourceFactory(BroadcastHadoopConfigHolder.get(sc))));
    return (T)doEvaluation(rdd, batchSize, new org.deeplearning4j.eval.Evaluation())[0];
}
 
Example #20
Source File: SameDiffTrainingTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testTrainingEvalVarNotReqForLoss(){
        //If a variable is not required for the loss - normally it won't be calculated
        //But we want to make sure it IS calculated here - so we can perform evaluation on it

        SameDiff sd = SameDiff.create();
        SDVariable in = sd.placeHolder("in", DataType.FLOAT, -1, 4);
        SDVariable label = sd.placeHolder("label", DataType.FLOAT, -1, 3);
        SDVariable w = sd.var("w", Nd4j.rand(DataType.FLOAT, 4, 3));
        SDVariable z = in.mmul(w);
        SDVariable out = sd.nn.softmax("softmax", z);
        SDVariable loss = sd.loss.logLoss("loss", label, out);
        SDVariable notRequiredForLoss = sd.nn.softmax("notRequiredForLoss", z);

        sd.setTrainingConfig(TrainingConfig.builder()
                .updater(new Adam(0.001))
                .dataSetFeatureMapping("in")
                .dataSetLabelMapping("label")
                .trainEvaluation("notRequiredForLoss", 0, new Evaluation())
                .build());

//        sd.setListeners(new ScoreListener(1));

        DataSet ds = new DataSet(Nd4j.rand(DataType.FLOAT, 3, 4), Nd4j.createFromArray(new float[][]{{1,0,0}, {0,1,0}, {0,0,1}}));

        History h = sd.fit()
                .train(new SingletonDataSetIterator(ds), 4)
                .exec();

        List<Double> l = h.trainingEval(Evaluation.Metric.ACCURACY);
        assertEquals(4, l.size());
    }
 
Example #21
Source File: ListenerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCustomListener() {
    SameDiff sd = SameDiff.create();
    SDVariable in = sd.placeHolder("input", DataType.FLOAT, -1, 4);
    SDVariable label = sd.placeHolder("label", DataType.FLOAT, -1, 3);
    SDVariable w = sd.var("w", Nd4j.rand(DataType.FLOAT, 4, 3));
    SDVariable b = sd.var("b", Nd4j.rand(DataType.FLOAT, 3));
    SDVariable z = sd.nn().linear("z", in, w, b);
    SDVariable out = sd.nn().softmax("out", z, 1);
    SDVariable loss = sd.loss().softmaxCrossEntropy("loss", label, out, null);

    //Create and set the training configuration
    double learningRate = 1e-3;
    TrainingConfig config = new TrainingConfig.Builder()
            .l2(1e-4)                               //L2 regularization
            .updater(new Adam(learningRate))        //Adam optimizer with specified learning rate
            .dataSetFeatureMapping("input")         //DataSet features array should be associated with variable "input"
            .dataSetLabelMapping("label")           //DataSet label array should be associated with variable "label
            .addEvaluations(false,"out",0,new Evaluation())
            .build();
    sd.setTrainingConfig(config);

    CustomListener listener = new CustomListener();
    Map<String,INDArray> m = sd.output()
            .data(new IrisDataSetIterator(150, 150))
            .output("out")
            .listeners(listener)
            .exec();

    assertEquals(1, m.size());
    assertTrue(m.containsKey("out"));
    assertNotNull(listener.z);
    assertNotNull(listener.out);

}
 
Example #22
Source File: UIListenerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testUIListenerBasic() throws Exception {
    Nd4j.getRandom().setSeed(12345);

    IrisDataSetIterator iter = new IrisDataSetIterator(150, 150);

    SameDiff sd = getSimpleNet();

    File dir = testDir.newFolder();
    File f = new File(dir, "logFile.bin");
    UIListener l = UIListener.builder(f)
            .plotLosses(1)
            .trainEvaluationMetrics("softmax", 0, Evaluation.Metric.ACCURACY, Evaluation.Metric.F1)
            .updateRatios(1)
            .build();

    sd.setListeners(l);

    sd.setTrainingConfig(TrainingConfig.builder()
            .dataSetFeatureMapping("in")
            .dataSetLabelMapping("label")
            .updater(new Adam(1e-1))
            .weightDecay(1e-3, true)
            .build());

    sd.fit(iter, 20);

    //Test inference after training with UI Listener still around
    Map<String, INDArray> m = new HashMap<>();
    iter.reset();
    m.put("in", iter.next().getFeatures());
    INDArray out = sd.outputSingle(m, "softmax");
    assertNotNull(out);
    assertArrayEquals(new long[]{150, 3}, out.shape());
}
 
Example #23
Source File: UIListener.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public ListenerResponse epochEnd(SameDiff sd, At at, LossCurve lossCurve, long epochTimeMillis) {

    //If any training evaluation, report it here:
    if(epochTrainEval != null){
        long time = System.currentTimeMillis();
        for(Map.Entry<Pair<String,Integer>,Evaluation> e : epochTrainEval.entrySet()){
            String n = "evaluation/" + e.getKey().getFirst();   //TODO what if user does same eval with multiple labels? Doesn't make sense... add validation to ensure this?

            List<Evaluation.Metric> l = trainEvalMetrics.get(e.getKey());
            for(Evaluation.Metric m : l) {
                String mName = n + "/train/" + m.toString().toLowerCase();
                if (!wroteEvalNames) {
                    if(!writer.registeredEventName(mName)) {    //Might have been registered if continuing training
                        writer.registerEventNameQuiet(mName);
                    }
                }

                double score = e.getValue().scoreForMetric(m);
                try{
                    writer.writeScalarEvent(mName, LogFileWriter.EventSubtype.EVALUATION, time, at.iteration(), at.epoch(), score);
                } catch (IOException ex){
                    throw new RuntimeException("Error writing to log file", ex);
                }
            }

            wroteEvalNames = true;
        }
    }

    epochTrainEval = null;
    return ListenerResponse.CONTINUE;
}
 
Example #24
Source File: SameDiffRNNTestCases.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public IEvaluation[] getNewEvaluations() {
    return new IEvaluation[]{
            new Evaluation(),
            new ROCMultiClass(),
            new EvaluationCalibration()
    };
}
 
Example #25
Source File: EvaluativeListener.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public EvaluativeListener(@NonNull MultiDataSet multiDataSet, int frequency, @NonNull InvocationType type) {
    this(multiDataSet, frequency, type, new Evaluation());
}
 
Example #26
Source File: CustomerRetentionPredictionExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) throws IOException, InterruptedException {

       final int labelIndex=11;
       final int batchSize=8;
       final int numClasses=2;
       final INDArray weightsArray = Nd4j.create(new double[]{0.57, 0.75});

       final RecordReader recordReader = generateReader(new ClassPathResource("Churn_Modelling.csv").getFile());
       final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize)
                                                                .classification(labelIndex,numClasses)
                                                                .build();
       final DataNormalization dataNormalization = new NormalizerStandardize();
       dataNormalization.fit(dataSetIterator);
       dataSetIterator.setPreProcessor(dataNormalization);
       final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8);

       log.info("Building Model------------------->>>>>>>>>");

        final MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
                                                                    .weightInit(WeightInit.RELU_UNIFORM)
                                                                    .updater(new Adam(0.015D))
                                                                    .list()
                                                                    .layer(new DenseLayer.Builder().nIn(11).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(4).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new OutputLayer.Builder(new LossMCXENT(weightsArray)).nIn(4).nOut(2).activation(Activation.SOFTMAX).build())
                                                                    .build();

        final UIServer uiServer = UIServer.getInstance();
        final StatsStorage statsStorage = new InMemoryStatsStorage();

        final MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(configuration);
        multiLayerNetwork.init();
        multiLayerNetwork.setListeners(new ScoreIterationListener(100),
                                       new StatsListener(statsStorage));
        uiServer.attach(statsStorage);
        multiLayerNetwork.fit(dataSetIteratorSplitter.getTrainIterator(),100);

        final Evaluation evaluation =  multiLayerNetwork.evaluate(dataSetIteratorSplitter.getTestIterator(),Arrays.asList("0","1"));
        System.out.println(evaluation.stats());

        final File file = new File("model.zip");
        ModelSerializer.writeModel(multiLayerNetwork,file,true);
        ModelSerializer.addNormalizerToModel(file,dataNormalization);


    }
 
Example #27
Source File: EvaluativeListener.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public EvaluativeListener(@NonNull DataSet dataSet, int frequency, @NonNull InvocationType type) {
    this(dataSet, frequency, type, new Evaluation());
}
 
Example #28
Source File: TestEarlyStoppingCompGraph.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testClassificationScoreFunctionSimple() throws Exception {

    for(Evaluation.Metric metric : Evaluation.Metric.values()) {
        log.info("Metric: " + metric);

        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                .graphBuilder()
                .addInputs("in")
                .layer("0", new DenseLayer.Builder().nIn(784).nOut(32).build(), "in")
                .layer("1", new OutputLayer.Builder().nIn(32).nOut(10).activation(Activation.SOFTMAX).build(), "0")
                .setOutputs("1")
                .build();

        ComputationGraph net = new ComputationGraph(conf);
        net.init();

        DataSetIterator iter = new MnistDataSetIterator(32, false, 12345);

        List<DataSet> l = new ArrayList<>();
        for( int i=0; i<10; i++ ){
            DataSet ds = iter.next();
            l.add(ds);
        }

        iter = new ExistingDataSetIterator(l);

        EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
        EarlyStoppingConfiguration<ComputationGraph> esConf =
                new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                        .epochTerminationConditions(new MaxEpochsTerminationCondition(5))
                        .iterationTerminationConditions(
                                new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES))
                        .scoreCalculator(new ClassificationScoreCalculator(metric, iter)).modelSaver(saver)
                        .build();

        EarlyStoppingGraphTrainer trainer = new EarlyStoppingGraphTrainer(esConf, net, iter);
        EarlyStoppingResult<ComputationGraph> result = trainer.fit();

        assertNotNull(result.getBestModel());
    }
}
 
Example #29
Source File: EvaluationScoreFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public double score(ComputationGraph graph, MultiDataSetIterator iterator) {
    Evaluation e = graph.evaluate(iterator);
    return e.scoreForMetric(metric);
}
 
Example #30
Source File: TestEarlyStopping.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testEarlyStoppingMaximizeScore() throws Exception {
    Nd4j.getRandom().setSeed(12345);

    int outputs = 2;

    DataSet ds = new DataSet(
            Nd4j.rand(new int[]{3, 10, 50}),
            TestUtils.randomOneHotTimeSeries(3, outputs, 50, 12345));
    DataSetIterator train = new ExistingDataSetIterator(
            Arrays.asList(ds, ds, ds, ds, ds, ds, ds, ds, ds, ds));
    DataSetIterator test = new SingletonDataSetIterator(ds);


    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .seed(123)
            .weightInit(WeightInit.XAVIER)
            .updater(new Adam(0.1))
            .activation(Activation.ELU)
            .l2(1e-5)
            .gradientNormalization(GradientNormalization
                    .ClipElementWiseAbsoluteValue)
            .gradientNormalizationThreshold(1.0)
            .list()
            .layer(0, new LSTM.Builder()
                    .nIn(10)
                    .nOut(10)
                    .activation(Activation.TANH)
                    .gateActivationFunction(Activation.SIGMOID)
                    .dropOut(0.5)
                    .build())
            .layer(1, new RnnOutputLayer.Builder()
                    .nIn(10)
                    .nOut(outputs)
                    .activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT)
                    .build())
            .build();

    File f = testDir.newFolder();
    EarlyStoppingModelSaver<MultiLayerNetwork> saver = new LocalFileModelSaver(f.getAbsolutePath());
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
            new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                    .epochTerminationConditions(
                            new MaxEpochsTerminationCondition(10),
                            new ScoreImprovementEpochTerminationCondition(1))
                    .iterationTerminationConditions(
                            new MaxTimeIterationTerminationCondition(10, TimeUnit.MINUTES))
                    .scoreCalculator(new ClassificationScoreCalculator(Evaluation.Metric.F1, test))
                    .modelSaver(saver)
                    .saveLastModel(true)
                    .build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    EarlyStoppingTrainer t = new EarlyStoppingTrainer(esConf, net, train);
    EarlyStoppingResult<MultiLayerNetwork> result = t.fit();

    Map<Integer,Double> map = result.getScoreVsEpoch();
    for( int i=1; i<map.size(); i++ ){
        if(i == map.size() - 1){
            assertTrue(map.get(i) <+ map.get(i-1));
        } else {
            assertTrue(map.get(i) > map.get(i-1));
        }
    }
}