Java Code Examples for weka.classifiers.Evaluation#errorRate()

The following examples show how to use weka.classifiers.Evaluation#errorRate() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TransformEnsembles.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
public void findCVWeights() throws Exception {
	cvWeights=new double[nosTransforms];
	int folds=numInstances;
	if(folds>THRESHOLD1){
			folds=10;
	}
	System.out.print("\n Finding CV Accuracy: ");
	for(int i=0;i<nosTransforms;i++){
		 Evaluation evaluation = new Evaluation(train.get(i));
                        if(i==0)
                           evaluation.crossValidateModel(AbstractClassifier.makeCopy(baseTime), train.get(i), folds, new Random());
                        else
                            evaluation.crossValidateModel(AbstractClassifier.makeCopy(base), train.get(i), folds, new Random());
		 cvWeights[i]=1-evaluation.errorRate();
		 System.out.print(","+cvWeights[i]);
	}
	 System.out.print("\n");
}
 
Example 2
Source File: CVParameterSelection.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
/**
  * Finds the best parameter combination. (recursive for each parameter
  * being optimised).
  * 
  * @param depth the index of the parameter to be optimised at this level
  * @param trainData the data the search is based on
  * @param random a random number generator
  * @throws Exception if an error occurs
  */
 protected void findParamsByCrossValidation(int depth, Instances trainData,
				     Random random)
   throws Exception {

   if (depth < m_CVParams.size()) {
     CVParameter cvParam = (CVParameter)m_CVParams.elementAt(depth);

     double upper;
     switch ((int)(cvParam.m_Lower - cvParam.m_Upper + 0.5)) {
     case 1:
upper = m_NumAttributes;
break;
     case 2:
upper = m_TrainFoldSize;
break;
     default:
upper = cvParam.m_Upper;
break;
     }
     double increment = (upper - cvParam.m_Lower) / (cvParam.m_Steps - 1);
     for(cvParam.m_ParamValue = cvParam.m_Lower; 
  cvParam.m_ParamValue <= upper; 
  cvParam.m_ParamValue += increment) {
findParamsByCrossValidation(depth + 1, trainData, random);
     }
   } else {
     
     Evaluation evaluation = new Evaluation(trainData);

     // Set the classifier options
     String [] options = createOptions();
     if (m_Debug) {
System.err.print("Setting options for " 
		 + m_Classifier.getClass().getName() + ":");
for (int i = 0; i < options.length; i++) {
  System.err.print(" " + options[i]);
}
System.err.println("");
     }
     ((OptionHandler)m_Classifier).setOptions(options);
     for (int j = 0; j < m_NumFolds; j++) {

       // We want to randomize the data the same way for every 
       // learning scheme.
Instances train = trainData.trainCV(m_NumFolds, j, new Random(1));
Instances test = trainData.testCV(m_NumFolds, j);
m_Classifier.buildClassifier(train);
evaluation.setPriors(train);
evaluation.evaluateModel(m_Classifier, test);
     }
     double error = evaluation.errorRate();
     if (m_Debug) {
System.err.println("Cross-validated error rate: " 
		   + Utils.doubleToString(error, 6, 4));
     }
     if ((m_BestPerformance == -99) || (error < m_BestPerformance)) {

m_BestPerformance = error;
m_BestClassifierOptions = createOptions();
     }
   }
 }
 
Example 3
Source File: LogisticBase.java    From tsml with GNU General Public License v3.0 2 votes vote down vote up
/**
    * Returns the misclassification error of the current model on a set of instances.
    * @param data the set of instances
    * @return the error rate
    * @throws Exception if something goes wrong
    */
   protected double getErrorRate(Instances data) throws Exception {
Evaluation eval = new Evaluation(data);
eval.evaluateModel(this,data);
return eval.errorRate();
   }