Java Code Examples for org.deeplearning4j.eval.Evaluation#f1()

The following examples show how to use org.deeplearning4j.eval.Evaluation#f1() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DenseTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMLPMultiLayerBackprop() {
    MultiLayerNetwork model = getDenseMLNConfig(true, false);
    model.fit(iter);

    MultiLayerNetwork model2 = getDenseMLNConfig(true, false);
    model2.fit(iter);
    iter.reset();

    DataSet test = iter.next();

    assertEquals(model.params(), model2.params());

    Evaluation eval = new Evaluation();
    INDArray output = model.output(test.getFeatures());
    eval.eval(test.getLabels(), output);
    double f1Score = eval.f1();

    Evaluation eval2 = new Evaluation();
    INDArray output2 = model2.output(test.getFeatures());
    eval2.eval(test.getLabels(), output2);
    double f1Score2 = eval2.f1();

    assertEquals(f1Score, f1Score2, 1e-4);

}
 
Example 2
Source File: RnnLossLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**{@inheritDoc}
 */
@Override
public double f1Score(INDArray examples, INDArray labels) {
    INDArray out = activate(examples, false, null);
    Evaluation eval = new Evaluation();
    eval.evalTimeSeries(labels, out, maskArray);
    return eval.f1();
}
 
Example 3
Source File: CnnLossLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public double f1Score(INDArray examples, INDArray labels) {
    INDArray out = activate(examples, false, null); //TODO
    Evaluation eval = new Evaluation();
    eval.evalTimeSeries(labels, out, maskArray);
    return eval.f1();
}
 
Example 4
Source File: Cnn3DLossLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public double f1Score(INDArray examples, INDArray labels) {
    INDArray out = activate(examples, false, null); //TODO
    Evaluation eval = new Evaluation();
    eval.evalTimeSeries(labels, out, maskArray);
    return eval.f1();
}
 
Example 5
Source File: ConvolutionLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCNNMLNPretrain() throws Exception {
    // Note CNN does not do pretrain
    int numSamples = 10;
    int batchSize = 10;
    DataSetIterator mnistIter = new MnistDataSetIterator(batchSize, numSamples, true);

    MultiLayerNetwork model = getCNNMLNConfig(false, true);
    model.fit(mnistIter);

    mnistIter.reset();

    MultiLayerNetwork model2 = getCNNMLNConfig(false, true);
    model2.fit(mnistIter);
    mnistIter.reset();

    DataSet test = mnistIter.next();

    Evaluation eval = new Evaluation();
    INDArray output = model.output(test.getFeatures());
    eval.eval(test.getLabels(), output);
    double f1Score = eval.f1();

    Evaluation eval2 = new Evaluation();
    INDArray output2 = model2.output(test.getFeatures());
    eval2.eval(test.getLabels(), output2);
    double f1Score2 = eval2.f1();

    assertEquals(f1Score, f1Score2, 1e-4);


}
 
Example 6
Source File: ConvolutionLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCNNMLNBackprop() throws Exception {
    int numSamples = 10;
    int batchSize = 10;
    DataSetIterator mnistIter = new MnistDataSetIterator(batchSize, numSamples, true);

    MultiLayerNetwork model = getCNNMLNConfig(true, false);
    model.fit(mnistIter);

    MultiLayerNetwork model2 = getCNNMLNConfig(true, false);
    model2.fit(mnistIter);

    mnistIter.reset();
    DataSet test = mnistIter.next();

    Evaluation eval = new Evaluation();
    INDArray output = model.output(test.getFeatures());
    eval.eval(test.getLabels(), output);
    double f1Score = eval.f1();

    Evaluation eval2 = new Evaluation();
    INDArray output2 = model2.output(test.getFeatures());
    eval2.eval(test.getLabels(), output2);
    double f1Score2 = eval2.f1();

    assertEquals(f1Score, f1Score2, 1e-4);

}
 
Example 7
Source File: DenseTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMLPMultiLayerPretrain() {
    // Note CNN does not do pretrain
    MultiLayerNetwork model = getDenseMLNConfig(false, true);
    model.fit(iter);

    MultiLayerNetwork model2 = getDenseMLNConfig(false, true);
    model2.fit(iter);
    iter.reset();

    DataSet test = iter.next();

    assertEquals(model.params(), model2.params());

    Evaluation eval = new Evaluation();
    INDArray output = model.output(test.getFeatures());
    eval.eval(test.getLabels(), output);
    double f1Score = eval.f1();

    Evaluation eval2 = new Evaluation();
    INDArray output2 = model2.output(test.getFeatures());
    eval2.eval(test.getLabels(), output2);
    double f1Score2 = eval2.f1();

    assertEquals(f1Score, f1Score2, 1e-4);

}
 
Example 8
Source File: Vasttext.java    From scava with Eclipse Public License 2.0 4 votes vote down vote up
private HashMap<String, Object> evaluate(DataIteratorConstructor vasttextMemoryDataContrustor)
{
	if(vasttextTextAndNumeric==null && vasttextText==null)
		throw new UnsupportedOperationException("Train or load a model before evaluate");
	HashMap<String, Object> evaluationResults = new HashMap<String, Object>(); 
	MultiDataSetIterator test = vasttextMemoryDataContrustor.getDataIterator();
	if(multiLabel)
	{
		System.err.println("Evaluating multi-label model:");
		EvaluationMultiLabel evaluationMultilLabel =  new EvaluationMultiLabel(labelsSize, multiLabelActivation);
		//I do not know why it ask for an array of evaluations
		//We skip the net2.evaluate otherwise the evaluation would be using Evaluation instead of EvaluationMultiLabel
		if(vasttextTextAndNumeric!=null)
			evaluationMultilLabel = vasttextTextAndNumeric.doEvaluation(test, new EvaluationMultiLabel[] {evaluationMultilLabel})[0];
		else if(vasttextText!=null)
			evaluationMultilLabel = vasttextText.doEvaluation(test, new EvaluationMultiLabel[] {evaluationMultilLabel})[0];

		evaluationResults.put("HammingLoss", evaluationMultilLabel.HammingLoss());
		evaluationResults.put("SubsetLoss", evaluationMultilLabel.subset01Loss());
		evaluationResults.put("MicroFscore", evaluationMultilLabel.MicroF());
		evaluationResults.put("MacroFscore", evaluationMultilLabel.MacroF());
		evaluationResults.put("VectorsSummary", evaluationMultilLabel.ActualVsPrediction());
		evaluationResults.put("Labels", labels);
	}
	else
	{
		Evaluation evaluation=null;
		if(vasttextTextAndNumeric!=null)
			evaluation = vasttextTextAndNumeric.evaluate(test);
		else //For an extrange reason you can train with a multidataset but not test with it
			evaluation = vasttextText.evaluate(new MultiDataSetWrapperIterator(test));
		evaluationResults.put("ConfusionMatrix", evaluation.confusionMatrix());
		evaluationResults.put("Labels", labels);
		double macro=0.0;
		double microNum=0.0;
		double microDen=0.0;
		int entryConfusion=0;
		ConfusionMatrix<Integer> confusion = evaluation.getConfusion();
		for(String label: labels)
		{
			evaluationResults.put("Precision"+label, evaluation.precision(labels.indexOf(label)));
			evaluationResults.put("Recall"+label, evaluation.recall(labels.indexOf(label)));
			evaluationResults.put("Fscore"+label, evaluation.f1(labels.indexOf(label)));
			macro+=evaluation.f1(labels.indexOf(label));
			for(String label2 :labels)
			{
				entryConfusion=confusion.getCount(labels.indexOf(label), labels.indexOf(label2));
				if(label.equals(label2))
					microNum+=entryConfusion;
				microDen+=entryConfusion;
			}
		}
		evaluationResults.put("MacroFscore", macro/labels.size());
		if(microDen>0.0)
			evaluationResults.put("MicroFscore", microNum/microDen);
		else
			evaluationResults.put("MicroFscore", 0.0);
	}
	
	return evaluationResults;
}
 
Example 9
Source File: TestSetF1ScoreFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public double score(MultiLayerNetwork net, DataSetIterator iterator) {
    Evaluation e = net.evaluate(iterator);
    return e.f1();
}
 
Example 10
Source File: TestSetF1ScoreFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public double score(ComputationGraph graph, DataSetIterator iterator) {
    Evaluation e = graph.evaluate(iterator);
    return e.f1();
}
 
Example 11
Source File: TestSetF1ScoreFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public double score(ComputationGraph graph, MultiDataSetIterator iterator) {
    Evaluation e = graph.evaluate(iterator);
    return e.f1();
}
 
Example 12
Source File: LossLayer.java    From deeplearning4j with Apache License 2.0 3 votes vote down vote up
/**
 * Returns the f1 score for the given examples.
 * Think of this to be like a percentage right.
 * The higher the number the more it got right.
 * This is on a scale from 0 to 1.
 *
 * @param examples te the examples to classify (one example in each row)
 * @param labels   the true labels
 * @return the scores for each ndarray
 */
@Override
public double f1Score(INDArray examples, INDArray labels) {
    Evaluation eval = new Evaluation();
    eval.eval(labels, activate(examples, false, LayerWorkspaceMgr.noWorkspacesImmutable()));
    return eval.f1();
}