Java Code Examples for weka.classifiers.evaluation.Evaluation#evaluateModel()

The following examples show how to use weka.classifiers.evaluation.Evaluation#evaluateModel() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WekaUtilTester.java    From AILibs with GNU Affero General Public License v3.0 6 votes vote down vote up
@Test
public void checkSplit() throws Exception {

	Instances inst = new Instances(new BufferedReader(new FileReader(VOWEL_ARFF)));
	inst.setClassIndex(inst.numAttributes() - 1);
	for (Classifier c : this.portfolio) {

		/* eval for CV */
		inst.stratify(10);
		Instances train = inst.trainCV(10, 0);
		Instances test = inst.testCV(10, 0);
		Assert.assertEquals(train.size() + test.size(), inst.size());
		Evaluation eval = new Evaluation(train);
		eval.crossValidateModel(c, inst, 10, new Random(0));

		c.buildClassifier(train);
		eval.evaluateModel(c, test);
		System.out.println(eval.pctCorrect());
	}
}
 
Example 2
Source File: BestConf.java    From bestconf with Apache License 2.0 5 votes vote down vote up
public static void testCOMT2() throws Exception{
	BestConf bestconf = new BestConf();
	Instances trainingSet = DataIOFile.loadDataFromArffFile("data/trainingBestConf0.arff");
	trainingSet.setClassIndex(trainingSet.numAttributes()-1);
	
	Instances samplePoints = LHSInitializer.getMultiDimContinuous(bestconf.getAttributes(), InitialSampleSetSize, false);
	samplePoints.insertAttributeAt(trainingSet.classAttribute(), samplePoints.numAttributes());
	samplePoints.setClassIndex(samplePoints.numAttributes()-1);
	
	COMT2 comt = new COMT2(samplePoints, COMT2Iteration);
	
	comt.buildClassifier(trainingSet);
	
	Evaluation eval = new Evaluation(trainingSet);
	eval.evaluateModel(comt, trainingSet);
	System.err.println(eval.toSummaryString());
	
	Instance best = comt.getInstanceWithPossibleMaxY(samplePoints.firstInstance());
	Instances bestInstances = new Instances(trainingSet,2);
	bestInstances.add(best);
	DataIOFile.saveDataToXrffFile("data/trainingBestConf_COMT2.arff", bestInstances);
	
	//now we output the training set with the class value updated as the predicted value
	Instances output = new Instances(trainingSet, trainingSet.numInstances());
	Enumeration<Instance> enu = trainingSet.enumerateInstances();
	while(enu.hasMoreElements()){
		Instance ins = enu.nextElement();
		double[] values = ins.toDoubleArray();
		values[values.length-1] = comt.classifyInstance(ins);
		output.add(ins.copy(values));
	}
	DataIOFile.saveDataToXrffFile("data/trainingBestConf0_predict.xrff", output);
}
 
Example 3
Source File: MLPlanARFFExample.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public static void main(final String[] args) throws Exception {

		/* load data for segment dataset and create a train-test-split */
		long start = System.currentTimeMillis();
		File file = new File("testrsc/waveform.arff");
		Instances data = new Instances(new FileReader(file));
		LOGGER.info("Data read. Time to create dataset object was {}ms", System.currentTimeMillis() - start);
		data.setClassIndex(data.numAttributes() - 1);
		List<IWekaInstances> split = WekaUtil.getStratifiedSplit(new WekaInstances(data), 0, .7f);

		/* initialize mlplan with a tiny search space, and let it run for 30 seconds */
		MLPlanWekaBuilder builder = new MLPlanWekaBuilder();
		builder.withNodeEvaluationTimeOut(new Timeout(30, TimeUnit.SECONDS));
		builder.withCandidateEvaluationTimeOut(new Timeout(10, TimeUnit.SECONDS));
		builder.withTimeOut(new Timeout(30, TimeUnit.SECONDS));
		builder.withNumCpus(4);

		MLPlan<IWekaClassifier> mlplan = builder.withDataset(split.get(0)).build();
		mlplan.setPortionOfDataForPhase2(0f);
		mlplan.setLoggerName("testedalgorithm");

		try {
			start = System.currentTimeMillis();
			Classifier optimizedClassifier = mlplan.call().getClassifier();
			long trainTime = (int) (System.currentTimeMillis() - start) / 1000;
			LOGGER.info("Finished build of the classifier.");
			if (LOGGER.isInfoEnabled()) {
				LOGGER.info("Chosen model is: {}", (mlplan.getSelectedClassifier()));
			}
			LOGGER.info("Training time was {}s.", trainTime);

			/* evaluate solution produced by mlplan */
			Evaluation eval = new Evaluation(split.get(0).getInstances());
			eval.evaluateModel(optimizedClassifier, split.get(1).getInstances());
			LOGGER.info("Error Rate of the solution produced by ML-Plan: {}. Internally believed error was {}", ((100 - eval.pctCorrect()) / 100f), mlplan.getInternalValidationErrorOfSelectedClassifier());
		} catch (NoSuchElementException e) {
			LOGGER.error("Building the classifier failed.", e);
		}
	}
 
Example 4
Source File: MLPlan4BigFileInputTester.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void test() throws Exception {
	// MLPlan4BigFileInput mlplan = new MLPlan4BigFileInput(new File("testrsc/openml/41103.arff"));

	String origDataSrcName = "testrsc/openml/1240.arff";

	if (true) {
		Instances data = new Instances(new FileReader(new File(origDataSrcName)));
		data.setClassIndex(data.numAttributes() - 1);
		List<Instances> split = WekaUtil.getStratifiedSplit(data, 0, .7f);
		ArffSaver saver = new ArffSaver();
		saver.setInstances(split.get(0));
		saver.setFile(new File(origDataSrcName + ".train"));
		saver.writeBatch();
		saver.setInstances(split.get(1));
		saver.setFile(new File(origDataSrcName + ".test"));
		saver.writeBatch();
		System.exit(0);
	}

	MLPlan4BigFileInput mlplan = new MLPlan4BigFileInput(new File(origDataSrcName + ".train"));
	mlplan.setTimeout(new Timeout(5, TimeUnit.MINUTES));
	mlplan.setLoggerName("testedalgorithm");
	long start = System.currentTimeMillis();
	Classifier c = mlplan.call();
	System.out.println("Observed output: " + c + " after " + (System.currentTimeMillis() - start) + "ms. Now validating the model");

	/* check quality */
	Instances testData = new Instances(new FileReader(new File(origDataSrcName + ".test")));
	testData.setClassIndex(testData.numAttributes() - 1);
	Evaluation eval = new Evaluation(testData);
	eval.evaluateModel(c, testData);
	System.out.println(eval.toSummaryString());

	assertNotNull(c);
}
 
Example 5
Source File: MajorityConfidenceVote.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
/**
 * Builds the ensemble by assessing the classifier weights using a cross
 * validation of each classifier of the ensemble and then training the
 * classifiers using the complete <code>data</code>.
 *
 * @param data
 *            Training instances
 */
@Override
public void buildClassifier(final Instances data) throws Exception {

	this.classifierWeights = new double[this.m_Classifiers.length];

	// remove instances with missing class
	Instances newData = new Instances(data);
	newData.deleteWithMissingClass();
	this.m_structure = new Instances(newData, 0);

	// can classifier handle the data?
	this.getCapabilities().testWithFail(data);

	for (int i = 0; i < this.m_Classifiers.length; i++) {
		if (Thread.currentThread().isInterrupted()) {
			throw new InterruptedException();
		}

		// Perform cross validation to determine the classifier weights
		for (int n = 0; n < this.numFolds; n++) {
			Instances train = data.trainCV(this.numFolds, n, new Random(this.seed));
			Instances test = data.testCV(this.numFolds, n);

			this.getClassifier(i).buildClassifier(train);
			Evaluation eval = new Evaluation(train);
			eval.evaluateModel(this.getClassifier(i), test);
			this.classifierWeights[i] += eval.pctCorrect() / 100d;
		}

		this.classifierWeights[i] = Math.pow(this.classifierWeights[i], 2);
		this.classifierWeights[i] /= this.numFolds;

		this.getClassifier(i).buildClassifier(newData);
	}

	// If no classifier predicted something correctly, assume uniform distribution
	if (Arrays.stream(this.classifierWeights).allMatch(d -> d < 0.000001d)) {
		for (int i = 0; i < this.classifierWeights.length; i++) {
			this.classifierWeights[i] = 1d / this.classifierWeights.length;
		}
	}
}
 
Example 6
Source File: FiveWayMNBTrainer.java    From weka-mnb-sentiment-analysis-template-project with Apache License 2.0 4 votes vote down vote up
public void testModel() throws Exception {
    Evaluation eTest = new Evaluation(dataRaw);
    eTest.evaluateModel(classifier, dataRaw);
    String strSummary = eTest.toSummaryString();
    System.out.println(strSummary);
}
 
Example 7
Source File: ThreeWayMNBTrainer.java    From weka-mnb-sentiment-analysis-template-project with Apache License 2.0 4 votes vote down vote up
public void testModel() throws Exception {
    Evaluation eTest = new Evaluation(dataRaw);
    eTest.evaluateModel(classifier, dataRaw);
    String strSummary = eTest.toSummaryString();
    System.out.println(strSummary);
}