weka.classifiers.evaluation.Evaluation Java Examples

The following examples show how to use weka.classifiers.evaluation.Evaluation. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: WekaUtilTester.java    From AILibs with GNU Affero General Public License v3.0 6 votes vote down vote up
@Test
public void checkSplit() throws Exception {

	Instances inst = new Instances(new BufferedReader(new FileReader(VOWEL_ARFF)));
	inst.setClassIndex(inst.numAttributes() - 1);
	for (Classifier c : this.portfolio) {

		/* eval for CV */
		inst.stratify(10);
		Instances train = inst.trainCV(10, 0);
		Instances test = inst.testCV(10, 0);
		Assert.assertEquals(train.size() + test.size(), inst.size());
		Evaluation eval = new Evaluation(train);
		eval.crossValidateModel(c, inst, 10, new Random(0));

		c.buildClassifier(train);
		eval.evaluateModel(c, test);
		System.out.println(eval.pctCorrect());
	}
}
 
Example #2
Source File: BestConf.java    From bestconf with Apache License 2.0 5 votes vote down vote up
public static void testCOMT2() throws Exception{
	BestConf bestconf = new BestConf();
	Instances trainingSet = DataIOFile.loadDataFromArffFile("data/trainingBestConf0.arff");
	trainingSet.setClassIndex(trainingSet.numAttributes()-1);
	
	Instances samplePoints = LHSInitializer.getMultiDimContinuous(bestconf.getAttributes(), InitialSampleSetSize, false);
	samplePoints.insertAttributeAt(trainingSet.classAttribute(), samplePoints.numAttributes());
	samplePoints.setClassIndex(samplePoints.numAttributes()-1);
	
	COMT2 comt = new COMT2(samplePoints, COMT2Iteration);
	
	comt.buildClassifier(trainingSet);
	
	Evaluation eval = new Evaluation(trainingSet);
	eval.evaluateModel(comt, trainingSet);
	System.err.println(eval.toSummaryString());
	
	Instance best = comt.getInstanceWithPossibleMaxY(samplePoints.firstInstance());
	Instances bestInstances = new Instances(trainingSet,2);
	bestInstances.add(best);
	DataIOFile.saveDataToXrffFile("data/trainingBestConf_COMT2.arff", bestInstances);
	
	//now we output the training set with the class value updated as the predicted value
	Instances output = new Instances(trainingSet, trainingSet.numInstances());
	Enumeration<Instance> enu = trainingSet.enumerateInstances();
	while(enu.hasMoreElements()){
		Instance ins = enu.nextElement();
		double[] values = ins.toDoubleArray();
		values[values.length-1] = comt.classifyInstance(ins);
		output.add(ins.copy(values));
	}
	DataIOFile.saveDataToXrffFile("data/trainingBestConf0_predict.xrff", output);
}
 
Example #3
Source File: MLPlanCLI.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
private static void writeSingleLabelEvaluationFile(final Evaluation eval, final double internalError, final CommandLine commandLine, final Classifier bestModel) throws Exception {
	StringBuilder builder = new StringBuilder();
	builder.append("Internally believed error: ");
	builder.append(internalError);
	builder.append(System.lineSeparator());
	builder.append(System.lineSeparator());
	builder.append("Best Model: ");
	builder.append(System.lineSeparator());
	builder.append(bestModel.toString());
	builder.append(System.lineSeparator());
	builder.append(System.lineSeparator());
	builder.append(eval.toSummaryString("Summary", true));
	builder.append(System.lineSeparator());
	builder.append(eval.toClassDetailsString("Class Details"));
	builder.append(System.lineSeparator());
	builder.append("Evaluation Overview");
	builder.append(System.lineSeparator());
	builder.append(eval.toCumulativeMarginDistributionString());
	builder.append(System.lineSeparator());
	builder.append(eval.toMatrixString("Matrix"));
	if (commandLine.hasOption(printModelOption)) {
		builder.append("Classifier Representation: ");
		builder.append(System.lineSeparator());
		builder.append(System.lineSeparator());
		if (bestModel instanceof ai.libs.jaicore.ml.weka.classification.pipeline.MLPipeline) {
			builder.append(((ai.libs.jaicore.ml.weka.classification.pipeline.MLPipeline) bestModel).getBaseClassifier().toString());
		} else {
			builder.append(bestModel.toString());
		}
	}

	writeFile(commandLine.getOptionValue(resultsFileOption, resultsFile), builder.toString());
}
 
Example #4
Source File: MLPlanARFFExample.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public static void main(final String[] args) throws Exception {

		/* load data for segment dataset and create a train-test-split */
		long start = System.currentTimeMillis();
		File file = new File("testrsc/waveform.arff");
		Instances data = new Instances(new FileReader(file));
		LOGGER.info("Data read. Time to create dataset object was {}ms", System.currentTimeMillis() - start);
		data.setClassIndex(data.numAttributes() - 1);
		List<IWekaInstances> split = WekaUtil.getStratifiedSplit(new WekaInstances(data), 0, .7f);

		/* initialize mlplan with a tiny search space, and let it run for 30 seconds */
		MLPlanWekaBuilder builder = new MLPlanWekaBuilder();
		builder.withNodeEvaluationTimeOut(new Timeout(30, TimeUnit.SECONDS));
		builder.withCandidateEvaluationTimeOut(new Timeout(10, TimeUnit.SECONDS));
		builder.withTimeOut(new Timeout(30, TimeUnit.SECONDS));
		builder.withNumCpus(4);

		MLPlan<IWekaClassifier> mlplan = builder.withDataset(split.get(0)).build();
		mlplan.setPortionOfDataForPhase2(0f);
		mlplan.setLoggerName("testedalgorithm");

		try {
			start = System.currentTimeMillis();
			Classifier optimizedClassifier = mlplan.call().getClassifier();
			long trainTime = (int) (System.currentTimeMillis() - start) / 1000;
			LOGGER.info("Finished build of the classifier.");
			if (LOGGER.isInfoEnabled()) {
				LOGGER.info("Chosen model is: {}", (mlplan.getSelectedClassifier()));
			}
			LOGGER.info("Training time was {}s.", trainTime);

			/* evaluate solution produced by mlplan */
			Evaluation eval = new Evaluation(split.get(0).getInstances());
			eval.evaluateModel(optimizedClassifier, split.get(1).getInstances());
			LOGGER.info("Error Rate of the solution produced by ML-Plan: {}. Internally believed error was {}", ((100 - eval.pctCorrect()) / 100f), mlplan.getInternalValidationErrorOfSelectedClassifier());
		} catch (NoSuchElementException e) {
			LOGGER.error("Building the classifier failed.", e);
		}
	}
 
Example #5
Source File: MLPlan4BigFileInputTester.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void test() throws Exception {
	// MLPlan4BigFileInput mlplan = new MLPlan4BigFileInput(new File("testrsc/openml/41103.arff"));

	String origDataSrcName = "testrsc/openml/1240.arff";

	if (true) {
		Instances data = new Instances(new FileReader(new File(origDataSrcName)));
		data.setClassIndex(data.numAttributes() - 1);
		List<Instances> split = WekaUtil.getStratifiedSplit(data, 0, .7f);
		ArffSaver saver = new ArffSaver();
		saver.setInstances(split.get(0));
		saver.setFile(new File(origDataSrcName + ".train"));
		saver.writeBatch();
		saver.setInstances(split.get(1));
		saver.setFile(new File(origDataSrcName + ".test"));
		saver.writeBatch();
		System.exit(0);
	}

	MLPlan4BigFileInput mlplan = new MLPlan4BigFileInput(new File(origDataSrcName + ".train"));
	mlplan.setTimeout(new Timeout(5, TimeUnit.MINUTES));
	mlplan.setLoggerName("testedalgorithm");
	long start = System.currentTimeMillis();
	Classifier c = mlplan.call();
	System.out.println("Observed output: " + c + " after " + (System.currentTimeMillis() - start) + "ms. Now validating the model");

	/* check quality */
	Instances testData = new Instances(new FileReader(new File(origDataSrcName + ".test")));
	testData.setClassIndex(testData.numAttributes() - 1);
	Evaluation eval = new Evaluation(testData);
	eval.evaluateModel(c, testData);
	System.out.println(eval.toSummaryString());

	assertNotNull(c);
}
 
Example #6
Source File: ActivityRecognition.java    From Machine-Learning-in-Java with MIT License 5 votes vote down vote up
public static void main(String[] args) throws Exception{
	
	String databasePath = "data/features.arff";
	
	// Load the data in arff format
	Instances data = new Instances(new BufferedReader(new FileReader(databasePath)));
	
	// Set class the last attribute as class
	data.setClassIndex(data.numAttributes() - 1);

	// Build a basic decision tree model
	String[] options = new String[]{};
	J48 model = new J48();
	model.setOptions(options);
	model.buildClassifier(data);
	
	// Output decision tree
	System.out.println("Decision tree model:\n"+model);
	
	// Output source code implementing the decision tree
	System.out.println("Source code:\n"+model.toSource("ActivityRecognitionEngine"));
	
	// Check accuracy of model using 10-fold cross-validation
	Evaluation eval = new Evaluation(data);
	eval.crossValidateModel(model, data, 10, new Random(1), new String[] {});
	System.out.println("Model performance:\n"+eval.toSummaryString());
	
	String[] activities = new String[]{"Walk", "Walk", "Walk", "Run", "Walk", "Run", "Run", "Sit", "Sit", "Sit"};
	DiscreteLowPass dlpFilter = new DiscreteLowPass(3);
	for(String str : activities){
		System.out.println(str +" -> "+ dlpFilter.filter(str));
	}
	
}
 
Example #7
Source File: MajorityConfidenceVote.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
/**
 * Builds the ensemble by assessing the classifier weights using a cross
 * validation of each classifier of the ensemble and then training the
 * classifiers using the complete <code>data</code>.
 *
 * @param data
 *            Training instances
 */
@Override
public void buildClassifier(final Instances data) throws Exception {

	this.classifierWeights = new double[this.m_Classifiers.length];

	// remove instances with missing class
	Instances newData = new Instances(data);
	newData.deleteWithMissingClass();
	this.m_structure = new Instances(newData, 0);

	// can classifier handle the data?
	this.getCapabilities().testWithFail(data);

	for (int i = 0; i < this.m_Classifiers.length; i++) {
		if (Thread.currentThread().isInterrupted()) {
			throw new InterruptedException();
		}

		// Perform cross validation to determine the classifier weights
		for (int n = 0; n < this.numFolds; n++) {
			Instances train = data.trainCV(this.numFolds, n, new Random(this.seed));
			Instances test = data.testCV(this.numFolds, n);

			this.getClassifier(i).buildClassifier(train);
			Evaluation eval = new Evaluation(train);
			eval.evaluateModel(this.getClassifier(i), test);
			this.classifierWeights[i] += eval.pctCorrect() / 100d;
		}

		this.classifierWeights[i] = Math.pow(this.classifierWeights[i], 2);
		this.classifierWeights[i] /= this.numFolds;

		this.getClassifier(i).buildClassifier(newData);
	}

	// If no classifier predicted something correctly, assume uniform distribution
	if (Arrays.stream(this.classifierWeights).allMatch(d -> d < 0.000001d)) {
		for (int i = 0; i < this.classifierWeights.length; i++) {
			this.classifierWeights[i] = 1d / this.classifierWeights.length;
		}
	}
}
 
Example #8
Source File: RegressionTask.java    From Machine-Learning-in-Java with MIT License 4 votes vote down vote up
public static void main(String[] args) throws Exception {

		/*
		 * Load data
		 */
		CSVLoader loader = new CSVLoader();
		loader.setFieldSeparator(",");
		loader.setSource(new File("data/ENB2012_data.csv"));
		Instances data = loader.getDataSet();

		// System.out.println(data);

		/*
		 * Build regression models
		 */
		// set class index to Y1 (heating load)
		data.setClassIndex(data.numAttributes() - 2);
		// remove last attribute Y2
		Remove remove = new Remove();
		remove.setOptions(new String[] { "-R", data.numAttributes() + "" });
		remove.setInputFormat(data);
		data = Filter.useFilter(data, remove);

		// build a regression model
		LinearRegression model = new LinearRegression();
		model.buildClassifier(data);
		System.out.println(model);

		// 10-fold cross-validation
		Evaluation eval = new Evaluation(data);
		eval.crossValidateModel(model, data, 10, new Random(1), new String[] {});
		System.out.println(eval.toSummaryString());
		double coef[] = model.coefficients();
		System.out.println();

		// build a regression tree model

		M5P md5 = new M5P();
		md5.setOptions(new String[] { "" });
		md5.buildClassifier(data);
		System.out.println(md5);

		// 10-fold cross-validation
		eval.crossValidateModel(md5, data, 10, new Random(1), new String[] {});
		System.out.println(eval.toSummaryString());
		System.out.println();
		
		
		
		
		/*
		 * Bonus: Build additional models 
		 */
		
		// ZeroR modelZero = new ZeroR();
		//
		//
		//
		//
		//
		// REPTree modelTree = new REPTree();
		// modelTree.buildClassifier(data);
		// System.out.println(modelTree);
		// eval = new Evaluation(data);
		// eval.crossValidateModel(modelTree, data, 10, new Random(1), new
		// String[]{});
		// System.out.println(eval.toSummaryString());
		//
		// SMOreg modelSVM = new SMOreg();
		//
		// MultilayerPerceptron modelPerc = new MultilayerPerceptron();
		//
		// GaussianProcesses modelGP = new GaussianProcesses();
		// modelGP.buildClassifier(data);
		// System.out.println(modelGP);
		// eval = new Evaluation(data);
		// eval.crossValidateModel(modelGP, data, 10, new Random(1), new
		// String[]{});
		// System.out.println(eval.toSummaryString());

		/*
		 * Bonus: Save ARFF
		 */
		// ArffSaver saver = new ArffSaver();
		// saver.setInstances(data);
		// saver.setFile(new File(args[1]));
		// saver.setDestination(new File(args[1]));
		// saver.writeBatch();

	}
 
Example #9
Source File: FiveWayMNBTrainer.java    From weka-mnb-sentiment-analysis-template-project with Apache License 2.0 4 votes vote down vote up
public void testModel() throws Exception {
    Evaluation eTest = new Evaluation(dataRaw);
    eTest.evaluateModel(classifier, dataRaw);
    String strSummary = eTest.toSummaryString();
    System.out.println(strSummary);
}
 
Example #10
Source File: ThreeWayMNBTrainer.java    From weka-mnb-sentiment-analysis-template-project with Apache License 2.0 4 votes vote down vote up
public void testModel() throws Exception {
    Evaluation eTest = new Evaluation(dataRaw);
    eTest.evaluateModel(classifier, dataRaw);
    String strSummary = eTest.toSummaryString();
    System.out.println(strSummary);
}