meka.core.Result Java Examples

The following examples show how to use meka.core.Result. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractMekaClassifierTest.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Turns the results from the regression test into a string.
 *
 * @param result        the result to process
 * @return              the generated string
 */
protected String postProcessRegressionResults(Result result) {
	StringBuilder   processed;
	String[]        lines;

	processed = new StringBuilder();
	lines     = result.toString().split("\n");
	for (String line: lines) {
		if (line.toLowerCase().contains("time"))
			continue;
		if (processed.length() > 0)
			processed.append("\n");
		processed.append(line);
	}

	return processed.toString();
}
 
Example #2
Source File: EvaluationTests.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
public void testRepeatable() {
	// Load Music
	Instances D = loadInstances("Music.arff");
	Instances D_train = new Instances(D,0,400);
	Instances D_test = new Instances(D,400,D.numInstances()-400);
	// Train ECC
	MultiLabelClassifier h = makeECC();
	// Eval
	try {
		Result r1 = Evaluation.evaluateModel(h, D_train, D_test, "PCut1");
		Result r2 = Evaluation.evaluateModel(h, D_train, D_test, "PCut1");
		assertTrue("Experiments are Repeatable (with same result)", r1.getMeasurement("Accuracy").equals(r2.getMeasurement("Accuracy")));
	} catch(Exception e) {
		e.printStackTrace();
	}
}
 
Example #3
Source File: DeepMethodsTests.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
public void testDBPNN() {
	
	System.out.println("Test Back Prop Neural Network with pre-trained weights (via RBM)");
	DBPNN dbn = new DBPNN();
	dbn.setClassifier(new BPNN());
	dbn.setDebug(true);

	try {
		dbn.setOptions(Utils.splitOptions("-H 30 -E 500 -r 0.1 -m 0.2"));
	} catch(Exception e) {
		System.err.println("Fatal Error");
		e.printStackTrace();
		System.exit(1);
	}
	Result r = EvaluationTests.cvEvaluateClassifier(dbn);
	String s = (String)r.getMeasurement("Accuracy");
	System.out.println("DBPNN + _" + r.getMeasurement("Accuracy"));
	assertTrue("DBPNN Accuracy Correct", s.equals("0.556 +/- 0.038"));
}
 
Example #4
Source File: DeepMethodsTests.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
public void testDeepML() {
	System.out.println("Test Stacked Boltzmann Machines with an off-the-shelf multi-label classifier");
	DeepML dbn = new DeepML();

	MCC h = new MCC();
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);

	dbn.setClassifier(h);
	dbn.setE(100);
	dbn.setH(30);

	Result r = EvaluationTests.cvEvaluateClassifier(dbn);
	System.out.println("DeepML + MCC" + r.getMeasurement("Accuracy"));
	String s = (String)r.getMeasurement("Accuracy");
	assertTrue("DeepML+MCC Accuracy Correct", s.startsWith("0.53")); // Good enough 
}
 
Example #5
Source File: ShowMacroCurve.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Returns the action lister to use in the menu.
 *
 * @param history   the current history
 * @param index     the selected history item
 * @return          the listener
 */
@Override
public ActionListener getActionListener(final ResultHistoryList history, final int index) {
	final Result result = history.getResultAt(index);

	return new ActionListener() {
		@Override
		public void actionPerformed(ActionEvent e) {
			JDialog dialog = new JDialog((Frame) null, history.getSuffixAt(index), false);
			dialog.setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE);
			dialog.getContentPane().setLayout(new BorderLayout());
			Instances performance = (Instances) result.getMeasurement(CURVE_DATA_MACRO);
			try {
				VisualizePanel panel = createPanel(performance);
				dialog.getContentPane().add(panel, BorderLayout.CENTER);
			}
			catch (Exception ex) {
				System.err.println("Failed to create plot!");
				ex.printStackTrace();
			}
			dialog.setSize(800, 600);
			dialog.setLocationRelativeTo(null);
			dialog.setVisible(true);
		}
	};
}
 
Example #6
Source File: IncrementalPerformance.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Returns the action lister to use in the menu.
 *
 * @param history   the current history
 * @param index     the selected history item
 * @return          the listener
 */
@Override
public ActionListener getActionListener(final ResultHistoryList history, final int index) {
	final Result result = history.getResultAt(index);

	return new ActionListener() {
		@Override
		public void actionPerformed(ActionEvent e) {
			JDialog dialog = new JDialog((Frame) null, history.getSuffixAt(index), false);
			dialog.setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE);
			dialog.getContentPane().setLayout(new BorderLayout());
			Instances performance = (Instances) result.getMeasurement(RESULTS_SAMPLED_OVER_TIME);
			try {
				VisualizePanel panel = createPanel(performance);
				dialog.getContentPane().add(panel, BorderLayout.CENTER);
			}
			catch (Exception ex) {
				System.err.println("Failed to create plot!");
				ex.printStackTrace();
			}
			dialog.setSize(800, 600);
			dialog.setLocationRelativeTo(null);
			dialog.setVisible(true);
		}
	};
}
 
Example #7
Source File: MiscMethodsTests.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
public void testMULAN() {
	// Test MULAN
	System.out.println("Test MULAN");
	MULAN mulan = new MULAN();
	// ... RAkEL
	mulan.setMethod("RAkEL2");
	mulan.setClassifier(new SMO());
	Result r;
	r = EvaluationTests.cvEvaluateClassifier(mulan);
	System.out.println("MULAN (RAkEL): "+r.output.get("Accuracy"));
	assertTrue("MULAN (RAkEL) Accuracy Correct", ((String)r.output.get("Accuracy")).startsWith("0.58") );
	// ... MLkNN
	mulan.setMethod("MLkNN");
	r = EvaluationTests.cvEvaluateClassifier(mulan);
	System.out.println("MULAN (MLkNN): "+r.output.get("Accuracy"));
	assertTrue("MULAN (MLkNN) Accuracy Correct", r.output.get("Accuracy").equals("0.561 +/- 0.035") );
	// ... BR (and , vs MEKA's BR)
	mulan.setMethod("BR");
	r = EvaluationTests.cvEvaluateClassifier(mulan);
	System.out.println("MULAN (BR): "+r.output.get("Accuracy"));
	assertTrue("MULAN (BR) Accuracy Correct", r.output.get("Accuracy").equals("0.493 +/- 0.036") );
	BR br = new BR();
	br.setClassifier(new SMO());
	Result r_other = EvaluationTests.cvEvaluateClassifier(br);
	assertTrue("MULAN BR Equal to MEKA BR", r.output.get("Accuracy").equals(r_other.output.get("Accuracy")) );
}
 
Example #8
Source File: ShowMicroCurve.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Returns the action lister to use in the menu.
 *
 * @param history   the current history
 * @param index     the selected history item
 * @return          the listener
 */
@Override
public ActionListener getActionListener(final ResultHistoryList history, final int index) {
	final Result result = history.getResultAt(index);

	return new ActionListener() {
		@Override
		public void actionPerformed(ActionEvent e) {
			JDialog dialog = new JDialog((Frame) null, history.getSuffixAt(index), false);
			dialog.setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE);
			dialog.getContentPane().setLayout(new BorderLayout());
			Instances performance = (Instances) result.getMeasurement(CURVE_DATA_MICRO);
			try {
				VisualizePanel panel = createPanel(performance);
				dialog.getContentPane().add(panel, BorderLayout.CENTER);
			}
			catch (Exception ex) {
				System.err.println("Failed to create plot!");
				ex.printStackTrace();
			}
			dialog.setSize(800, 600);
			dialog.setLocationRelativeTo(null);
			dialog.setVisible(true);
		}
	};
}
 
Example #9
Source File: MicroCurve.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
  if (args.length != 1)
    throw new IllegalArgumentException("Required arguments: <dataset>");

  System.out.println("Loading data: " + args[0]);
  Instances data = DataSource.read(args[0]);
  MLUtils.prepareData(data);

  System.out.println("Cross-validate BR classifier");
  BR classifier = new BR();
  // further configuration of classifier
  String top = "PCut1";
  String vop = "3";
  Result result = Evaluation.cvModel(classifier, data, 10, top, vop);

  JFrame frame = new JFrame("Micro curve");
  frame.setDefaultCloseOperation(JDialog.EXIT_ON_CLOSE);
  frame.getContentPane().setLayout(new BorderLayout());
  Instances performance = (Instances) result.getMeasurement(CURVE_DATA_MICRO);
  try {
    VisualizePanel panel = createPanel(performance);
    frame.getContentPane().add(panel, BorderLayout.CENTER);
  }
  catch (Exception ex) {
    System.err.println("Failed to create plot!");
    ex.printStackTrace();
  }
  frame.setSize(800, 600);
  frame.setLocationRelativeTo(null);
  frame.setVisible(true);
}
 
Example #10
Source File: CrossValidate.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
  if (args.length != 1)
    throw new IllegalArgumentException("Required arguments: <dataset>");

  System.out.println("Loading data: " + args[0]);
  Instances data = DataSource.read(args[0]);
  MLUtils.prepareData(data);

  int numFolds = 10;
  System.out.println("Cross-validate BR classifier using " + numFolds + " folds");
  BR classifier = new BR();
  // further configuration of classifier
  String top = "PCut1";
  String vop = "3";
  Result result = Evaluation.cvModel(classifier, data, numFolds, top, vop);

  System.out.println(result);
}
 
Example #11
Source File: TrainTestSplit.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
public static void main(String[] args) throws Exception {
  if (args.length != 2)
    throw new IllegalArgumentException("Required arguments: <dataset> <percentage>");

  System.out.println("Loading data: " + args[0]);
  Instances data = DataSource.read(args[0]);
  MLUtils.prepareData(data);

  double percentage = Double.parseDouble(args[1]);
  int trainSize = (int) (data.numInstances() * percentage / 100.0);
  Instances train = new Instances(data, 0, trainSize);
  Instances test = new Instances(data, trainSize, data.numInstances() - trainSize);

  System.out.println("Build BR classifier on " + percentage + "%");
  BR classifier = new BR();
  // further configuration of classifier
  classifier.buildClassifier(train);

  System.out.println("Evaluate BR classifier on " + (100.0 - percentage) + "%");
  String top = "PCut1";
  String vop = "3";
  Result result = Evaluation.evaluateModel(classifier, train, test, top, vop);

  System.out.println(result);
}
 
Example #12
Source File: MekaClassifierTest.java    From AILibs with GNU Affero General Public License v3.0 6 votes vote down vote up
@Test
public void testFitAndPredictWithHoldoutSplitter() throws Exception {
	BR br = new BR();
	br.buildClassifier(splitterSplit.get(0).getInstances());
	Result res = Evaluation.testClassifier(br, splitterSplit.get(1).getInstances());
	double[][] mekaPredictions = res.allPredictions();

	MekaClassifier classifier = new MekaClassifier(new BR());
	classifier.fit(splitterSplit.get(0));
	IMultiLabelClassificationPredictionBatch pred = classifier.predict(splitterSplit.get(1));

	assertEquals("Number of predictions is not consistent.", splitterSplit.get(1).size(), pred.getNumPredictions());

	double[][] jaicorePredictions = pred.getPredictionMatrix();
	assertEquals("Length of prediction matrices is not consistent.", mekaPredictions.length, jaicorePredictions.length);
	assertEquals("Width of prediction matrices is not consistent.", mekaPredictions[0].length, jaicorePredictions[0].length);

	for (int i = 0; i < mekaPredictions.length; i++) {
		for (int j = 0; j < mekaPredictions[i].length; j++) {
			assertEquals("The prediction for instance " + i + " and label " + j + " is not consistent.", mekaPredictions[i][j], jaicorePredictions[i][j], 1E-8);
		}
	}
}
 
Example #13
Source File: ResultHistory.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Removes the specified entry.
 * 
 * @param index the history entry to remove
 * @return the removed item
 */
public synchronized Result remove(int index) {
	Result result;
	Date date;
	
	date   = m_Ordered.remove(index);
	m_Suffixes.remove(date);
	m_Payloads.remove(date);
	result = m_Results.remove(date);
	
	return result;
}
 
Example #14
Source File: ResultHistoryList.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Brings up a dialog to save the specified item to a file.
 * 
 * @param index the index of the item to save
 * @return true if successfully saved
 */
protected boolean save(int index) {
	boolean result;
	int retVal;
	Result res;
	File file;
	
	retVal = m_FileChooser.showSaveDialog(this);
	if (retVal != JFileChooser.APPROVE_OPTION)
		return false;
	
	file = m_FileChooser.getSelectedFile();
	res  = getResultAt(index);
	try {
		Result.writeResultToFile(res, file.getAbsolutePath());
		result = true;
	}
	catch (Exception e) {
		result = false;
		System.err.println("Failed to write result to file '" + file + "':");
		e.printStackTrace();
		JOptionPane.showMessageDialog(
				this, 
				"Failed to write result to file '" + file + "':\n" + e, 
				"Error saving",
				JOptionPane.ERROR_MESSAGE);
	}
	
	return result;
}
 
Example #15
Source File: MetaMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testEBR() {

		// Test BR
		BR br = new BR();
		br.setClassifier(new SMO());
		Result r = EvaluationTests.cvEvaluateClassifier(br);
		assertTrue("BR Accuracy Correct", r.getMeasurement("Accuracy").equals("0.493 +/- 0.036") );

		// Test EBR
		EnsembleML ebr = new EnsembleML();
		ebr.setClassifier(br);
		Result Er = EvaluationTests.cvEvaluateClassifier(ebr);
		assertTrue("EBR Accuracy Correct", Er.getMeasurement("Accuracy").equals("0.557 +/- 0.04 ") );
	}
 
Example #16
Source File: ResultHistoryList.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Removes the element at the specified location.
 * 
 * @param index the location
 * @return the removed item
 */
public Result removeElementAt(int index) {
	Result result;
	result = m_History.remove(index);
	fireIntervalRemoved(this, index, index);
	return result;
}
 
Example #17
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testCDT() {
	// Test CDT (with SMO -- -M)
	System.out.println("Test CDT");
	CDT h = new CDT();
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);
	Result r = EvaluationTests.cvEvaluateClassifier(h);
	//System.out.println("CDT ACC: "+r.getMeasurement("Accuracy"));
	assertEquals("CDT Accuracy Correct", "0.519 +/- 0.039", r.getMeasurement("Accuracy") );
}
 
Example #18
Source File: EvaluationTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testIncrementalEvaluation() {
	// Batch
	Result r1 = null, r2 = null;
	// Load Data
	Instances D = loadInstances("Music.arff");
	// Train ECCUpdateable
	BaggingMLUpdateable h = new BaggingMLUpdateable();
	CCUpdateable cc = new CCUpdateable();
	cc.setClassifier(new IBk());
	h.setClassifier(cc);
	try {
		r1 = IncrementalEvaluation.evaluateModel(h,D);
		r2 = IncrementalEvaluation.evaluateModel(h,D);
	} catch(Exception e) {
		System.err.println("FAILED TO GET r1, r2");
		e.printStackTrace();
	}
	// Good @TODO
	//assertTrue("Inc. Eval OK? ?", r1.info.get("Accuracy").equals("0.486 +/- 0.045"));
	// The same?
	if (r1==null)
		System.out.println("r1 is null");
	if (r2==null)
		System.out.println("r2 is null");

	assertTrue("Inc. Eval the same?", ((String)r1.getMeasurement("Accuracy")).equals(((String)r2.getMeasurement("Accuracy"))));
	// test/train

	// compare with non-ss
}
 
Example #19
Source File: EvaluationTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testThreshold() {
	BaggingML h = new BaggingML();
	CC cc = new CC();
	cc.setClassifier(new Logistic());
	h.setClassifier(cc);
	Result r = EvaluationTests.cvEvaluateClassifier(h,"0.5");
	assertTrue("PCutL Thresholds OK?", r.info.get("Threshold").equals("[0.4, 0.4, 0.4, 0.4, 0.6, 0.6]") );
}
 
Example #20
Source File: EvaluationTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public static Result cvEvaluateClassifier(MultiLabelClassifier h, String top) {
	Instances D = null;
	try {
		D = EvaluationTests.loadInstances("Music.arff");
		Result result = Evaluation.cvModel(h,D,5,top,"7");
		return result;
	} catch(Exception e) {
		System.err.println("");
		e.printStackTrace();
		return null;
	}
}
 
Example #21
Source File: LPMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testLC() {

		Result r = null;

		// Test LC
		LC lc = new LC();
		lc.setClassifier(new SMO());
		r = EvaluationTests.cvEvaluateClassifier(lc);
		String s = (String)r.getMeasurement("Accuracy");
		System.out.println("LC "+s);
		TestHelper.assertAlmostEquals("LC Accuracy Correct", "0.568 +/- 0.032", (String)s, 1);

		// Test PS (0,0) -- should be identical
		PS ps = new PS();
		ps.setClassifier(new SMO());
		r = EvaluationTests.cvEvaluateClassifier(ps);
		System.out.println("PS "+r.getMeasurement("Accuracy"));
		assertTrue("PS(0,0) Accuracy Identical to LC", s.equals(r.getMeasurement("Accuracy")));

		// Test PS (3,1) -- should be faster 
		ps.setP(3);
		ps.setN(1);

		r = EvaluationTests.cvEvaluateClassifier(ps);
		System.out.println("PS(3,1) "+r.getMeasurement("Accuracy"));
		TestHelper.assertAlmostEquals("PS(3,1) Accuracy Correct", "0.565 +/- 0.04", (String)r.getMeasurement("Accuracy"), 1);

		// Test EPS
		EnsembleML eps = new EnsembleML();
		eps.setClassifier(ps);
		r = EvaluationTests.cvEvaluateClassifier(eps);
		System.out.println("EPS "+r.getMeasurement("Accuracy"));
		TestHelper.assertAlmostEquals("EPS Accuracy Correct", "0.574 +/- 0.042", (String)r.getMeasurement("Accuracy"), 1);
	}
 
Example #22
Source File: MiscMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testRT() {

		// Test RT
		System.out.println("Test RT");
		RT rt = new RT();
		rt.setClassifier(new Logistic());
		Result r = EvaluationTests.cvEvaluateClassifier(rt);
		assertTrue("RT Accuracy Correct", r.output.get("Accuracy").equals("0.5   +/- 0.04 ") );
	}
 
Example #23
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testsCC() {

		// Test BR
		BR br = new BR();
		br.setClassifier(new SMO());
		Result r = EvaluationTests.cvEvaluateClassifier(br);
		assertEquals("BR Accuracy Correct", "0.493 +/- 0.036", r.getMeasurement("Accuracy"));

		// Test EBR
		EnsembleML ebr = new EnsembleML();
		ebr.setClassifier(br);
		Result Er = EvaluationTests.cvEvaluateClassifier(ebr);
		assertEquals("EBR Accuracy Correct", "0.557 +/- 0.04 ", Er.getMeasurement("Accuracy"));
	}
 
Example #24
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testMCC() {
	// Test MCC (with SMO -- -M)
	System.out.println("Test MCC");
	MCC h = new MCC();
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);
	Result r = EvaluationTests.cvEvaluateClassifier(h);
	assertEquals("MCC Accuracy Correct", "0.561 +/- 0.035", r.getMeasurement("Accuracy"));
}
 
Example #25
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testPMCC() {
	// Test MCC (with SMO -- -M)
	System.out.println("Test PMCC");
	PMCC h = new PMCC();
	h.setM(10);
	h.setChainIterations(50);
	h.setInferenceIterations(20);
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);
	Result r = EvaluationTests.cvEvaluateClassifier(h);
	assertEquals("PMCC Accuracy Correct", "0.594 +/- 0.029", r.getMeasurement("Accuracy"));
}
 
Example #26
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testPCC() {
	// Test PCC (with SMO -- -M)
	System.out.println("Test PCC");
	PCC h = new PCC();
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);
	Result r = EvaluationTests.cvEvaluateClassifier(h);
	assertEquals("PCC Accuracy Correct", "0.565 +/- 0.032", r.getMeasurement("Accuracy"));
}
 
Example #27
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testCT() {
	// Test CT (with SMO -- -M)
	System.out.println("Test CT");
	CT h = new CT();
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);
	h.setInferenceIterations(10);
	h.setChainIterations(10);
	Result r = EvaluationTests.cvEvaluateClassifier(h);
	//System.out.println("CT ACC: "+r.getMeasurement("Accuracy"));
	assertEquals("CT Accuracy Correct", "0.56  +/- 0.034", r.getMeasurement("Accuracy"));
}
 
Example #28
Source File: MLPlanCLI.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
private static void writeMultiLabelEvaluationFile(final Result result, final double internalError, final CommandLine commandLine, final Classifier bestModel) {
	StringBuilder builder = new StringBuilder();
	builder.append("Internally believed error: ");
	builder.append(internalError);
	builder.append(System.lineSeparator());
	builder.append(System.lineSeparator());
	builder.append("Best Model: ");
	builder.append(System.lineSeparator());
	builder.append(bestModel.toString());
	builder.append(System.lineSeparator());
	builder.append(System.lineSeparator());
	builder.append(result.toString());
	builder.append(System.lineSeparator());
	builder.append(System.lineSeparator());
	if (commandLine.hasOption(printModelOption)) {
		builder.append("Classifier Representation: ");
		builder.append(System.lineSeparator());
		builder.append(System.lineSeparator());
		if (bestModel instanceof ai.libs.jaicore.ml.weka.classification.pipeline.MLPipeline) {
			builder.append(((MLPipeline) bestModel).getBaseClassifier().toString());
		} else {
			builder.append(bestModel.toString());
		}
	}

	writeFile(commandLine.getOptionValue(resultsFileOption, resultsFile), builder.toString());
}
 
Example #29
Source File: Evaluation.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
/**
 *Test Classifier but threaded (Multiple)
 * @param	h		a multi-dim. classifier, ALREADY BUILT (threaded, implements MultiLabelThreaded)
 * @param	D_test 	test data
 * @return	Result	with raw prediction data ONLY
 */
public static Result testClassifierM(MultiXClassifier h, Instances D_test) throws Exception {

	int L = D_test.classIndex();
	Result result = new Result(D_test.numInstances(),L);
	if(h.getDebug()) System.out.print(":- Evaluate ");
	if(h instanceof MultiLabelClassifierThreaded){
		((MultiLabelClassifierThreaded)h).setThreaded(true);
		double y[][] = ((MultiLabelClassifierThreaded)h).distributionForInstanceM(D_test);

		for (int i = 0, c = 0; i < D_test.numInstances(); i++) {
			// Store the result
			result.addResult(y[i],D_test.instance(i));
		}
		if(h.getDebug()) System.out.println(":-");

	/*
	if(h.getDebug()) {

		for(int i = 0; i < result.size(); i++) {
			System.out.println("\t"+Arrays.toString(result.rowActual(i))+" vs "+Arrays.toString(result.rowRanking(i)));
		}


	}
	*/
	}
	return result;
}
 
Example #30
Source File: ExportPredictionsOnTestSet.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {
  if (args.length != 3)
    throw new IllegalArgumentException("Required arguments: <train> <test> <output>");

  System.out.println("Loading train: " + args[0]);
  Instances train = DataSource.read(args[0]);
  MLUtils.prepareData(train);

  System.out.println("Loading test: " + args[1]);
  Instances test = DataSource.read(args[1]);
  MLUtils.prepareData(test);

  // compatible?
  String msg = train.equalHeadersMsg(test);
  if (msg != null)
    throw new IllegalStateException(msg);

  System.out.println("Build BR classifier on " + args[0]);
  BR classifier = new BR();
  // further configuration of classifier
  classifier.buildClassifier(train);

  System.out.println("Evaluate BR classifier on " + args[1]);
  String top = "PCut1";
  String vop = "3";
  Result result = Evaluation.evaluateModel(classifier, train, test, top, vop);

  System.out.println(result);

  System.out.println("Saving predictions test set to " + args[2]);
  Instances performance = Result.getPredictionsAsInstances(result);
  DataSink.write(args[2], performance);
}