Java Code Examples for weka.classifiers.functions.SMO#setBuildCalibrationModels()

The following examples show how to use weka.classifiers.functions.SMO#setBuildCalibrationModels() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DeepMethodsTests.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
public void testDeepML() {
	System.out.println("Test Stacked Boltzmann Machines with an off-the-shelf multi-label classifier");
	DeepML dbn = new DeepML();

	MCC h = new MCC();
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);

	dbn.setClassifier(h);
	dbn.setE(100);
	dbn.setH(30);

	Result r = EvaluationTests.cvEvaluateClassifier(dbn);
	System.out.println("DeepML + MCC" + r.getMeasurement("Accuracy"));
	String s = (String)r.getMeasurement("Accuracy");
	assertTrue("DeepML+MCC Accuracy Correct", s.startsWith("0.53")); // Good enough 
}
 
Example 2
Source File: EnsembleProvider.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * Initializes the CAWPE ensemble model consisting of five classifiers (SMO,
 * KNN, J48, Logistic and MLP) using a majority voting strategy. The ensemble
 * uses Weka classifiers. It refers to "Heterogeneous ensemble of standard
 * classification algorithms" (HESCA) as described in Lines, Jason & Taylor,
 * Sarah & Bagnall, Anthony. (2018). Time Series Classification with HIVE-COTE:
 * The Hierarchical Vote Collective of Transformation-Based Ensembles. ACM
 * Transactions on Knowledge Discovery from Data. 12. 1-35. 10.1145/3182382.
 *
 * @param seed
 *            Seed used within the classifiers and the majority confidence
 *            voting scheme
 * @param numFolds
 *            Number of folds used within the determination of the classifier
 *            weights for the {@link MajorityConfidenceVote}
 * @return Returns an initialized (but untrained) ensemble model.
 * @throws Exception
 *             Thrown when the initialization has failed
 */
public static Classifier provideCAWPEEnsembleModel(final int seed, final int numFolds) throws Exception {
	Classifier[] classifiers = new Classifier[5];

	Vote voter = new MajorityConfidenceVote(numFolds, seed);

	SMO smo = new SMO();
	smo.turnChecksOff();
	smo.setBuildCalibrationModels(true);
	PolyKernel kl = new PolyKernel();
	kl.setExponent(1);
	smo.setKernel(kl);
	smo.setRandomSeed(seed);
	classifiers[0] = smo;

	IBk k = new IBk(100);
	k.setCrossValidate(true);
	EuclideanDistance ed = new EuclideanDistance();
	ed.setDontNormalize(true);
	k.getNearestNeighbourSearchAlgorithm().setDistanceFunction(ed);
	classifiers[1] = k;

	J48 c45 = new J48();
	c45.setSeed(seed);
	classifiers[2] = c45;

	classifiers[3] = new Logistic();

	classifiers[4] = new MultilayerPerceptron();

	voter.setClassifiers(classifiers);
	return voter;
}
 
Example 3
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testMCC() {
	// Test MCC (with SMO -- -M)
	System.out.println("Test MCC");
	MCC h = new MCC();
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);
	Result r = EvaluationTests.cvEvaluateClassifier(h);
	assertEquals("MCC Accuracy Correct", "0.561 +/- 0.035", r.getMeasurement("Accuracy"));
}
 
Example 4
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testPMCC() {
	// Test MCC (with SMO -- -M)
	System.out.println("Test PMCC");
	PMCC h = new PMCC();
	h.setM(10);
	h.setChainIterations(50);
	h.setInferenceIterations(20);
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);
	Result r = EvaluationTests.cvEvaluateClassifier(h);
	assertEquals("PMCC Accuracy Correct", "0.594 +/- 0.029", r.getMeasurement("Accuracy"));
}
 
Example 5
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testPCC() {
	// Test PCC (with SMO -- -M)
	System.out.println("Test PCC");
	PCC h = new PCC();
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);
	Result r = EvaluationTests.cvEvaluateClassifier(h);
	assertEquals("PCC Accuracy Correct", "0.565 +/- 0.032", r.getMeasurement("Accuracy"));
}
 
Example 6
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testCT() {
	// Test CT (with SMO -- -M)
	System.out.println("Test CT");
	CT h = new CT();
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);
	h.setInferenceIterations(10);
	h.setChainIterations(10);
	Result r = EvaluationTests.cvEvaluateClassifier(h);
	//System.out.println("CT ACC: "+r.getMeasurement("Accuracy"));
	assertEquals("CT Accuracy Correct", "0.56  +/- 0.034", r.getMeasurement("Accuracy"));
}
 
Example 7
Source File: CCMethodsTests.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public void testCDT() {
	// Test CDT (with SMO -- -M)
	System.out.println("Test CDT");
	CDT h = new CDT();
	SMO smo = new SMO();
	smo.setBuildCalibrationModels(true);
	h.setClassifier(smo);
	Result r = EvaluationTests.cvEvaluateClassifier(h);
	//System.out.println("CDT ACC: "+r.getMeasurement("Accuracy"));
	assertEquals("CDT Accuracy Correct", "0.519 +/- 0.039", r.getMeasurement("Accuracy") );
}
 
Example 8
Source File: EnsembleProvider.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
/**
 * Initializes the HIVE COTE ensemble consisting of 7 classifiers using a
 * majority voting strategy as described in J. Lines, S. Taylor and A. Bagnall,
 * "HIVE-COTE: The Hierarchical Vote Collective of Transformation-Based
 * Ensembles for Time Series Classification," 2016 IEEE 16th International
 * Conference on Data Mining (ICDM), Barcelona, 2016, pp. 1041-1046. doi:
 * 10.1109/ICDM.2016.0133.
 *
 * @param seed
 *            Seed used within the classifiers and the majority confidence
 *            voting scheme
 * @param numFolds
 *            Number of folds used within the determination of the classifier
 *            weights for the {@link MajorityConfidenceVote}
 * @return Returns the initialized (but untrained) HIVE COTE ensemble model.
 */
public static Classifier provideHIVECOTEEnsembleModel(final long seed) {
	Classifier[] classifier = new Classifier[7];

	Vote voter = new MajorityConfidenceVote(5, seed);

	// SMO poly2
	SMO smop = new SMO();
	smop.turnChecksOff();
	smop.setBuildCalibrationModels(true);
	PolyKernel kernel = new PolyKernel();
	kernel.setExponent(2);
	smop.setKernel(kernel);
	smop.setRandomSeed((int)seed);
	classifier[0] = smop;

	// Random Forest
	RandomForest rf = new RandomForest();
	rf.setSeed((int)seed);
	rf.setNumIterations(500);
	classifier[1] = rf;

	// Rotation forest
	RotationForest rotF = new RotationForest();
	rotF.setSeed((int)seed);
	rotF.setNumIterations(100);
	classifier[2] = rotF;

	// NN
	IBk nn = new IBk();
	classifier[3] = nn;

	// Naive Bayes
	NaiveBayes nb = new NaiveBayes();
	classifier[4] = nb;

	// C45
	J48 c45 = new J48();
	c45.setSeed((int)seed);
	classifier[5] = c45;

	// SMO linear
	SMO smol = new SMO();
	smol.turnChecksOff();
	smol.setBuildCalibrationModels(true);
	PolyKernel linearKernel = new PolyKernel();
	linearKernel.setExponent(1);
	smol.setKernel(linearKernel);
	classifier[6] = smol;

	voter.setClassifiers(classifier);
	return voter;
}