weka.classifiers.bayes.NaiveBayes Java Examples

The following examples show how to use weka.classifiers.bayes.NaiveBayes. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestWekaBayes.java    From Java-Data-Analysis with MIT License 8 votes vote down vote up
public static void main(String[] args) throws Exception {
//        ConverterUtils.DataSource source = new ConverterUtils.DataSource("data/AnonFruit.arff");
        DataSource source = new DataSource("data/AnonFruit.arff");
        Instances train = source.getDataSet();
        train.setClassIndex(3);  // target attribute: (Sweet)
        //build model
        NaiveBayes model=new NaiveBayes();
        model.buildClassifier(train);

        //use
        Instances test = train;
        Evaluation eval = new Evaluation(test);
        eval.evaluateModel(model,test);
        List <Prediction> predictions = eval.predictions();
        int k = 0;
        for (Instance instance : test) {
            double actual = instance.classValue();
            double prediction = eval.evaluateModelOnce(model, instance);
            System.out.printf("%2d.%4.0f%4.0f", ++k, actual, prediction);
            System.out.println(prediction != actual? " *": "");
        }
    }
 
Example #2
Source File: DevelopClassifier.java    From Hands-On-Artificial-Intelligence-with-Java-for-Beginners with MIT License 5 votes vote down vote up
/**
 * @param args the command line arguments
 */
public static void main(String[] args) {
    // TODO code application logic here
    try{
        DataSource src = new DataSource("/Users/admin/Documents/NetBeansProjects/DevelopClassifier/vote.arff");
        Instances dt = src.getDataSet();
        dt.setClassIndex(dt.numAttributes()-1);
        
        String[] options = new String[4];
        options[0] = "-C";
        options[1] = "0.1";
        options[2] = "-M";
        options[3] = "2";
        J48 tree = new J48();
        tree.setOptions(options);
        tree.buildClassifier(dt);
        System.out.println(tree.getCapabilities().toString());
        System.out.println(tree.graph());
        
        //uncomment the following three lines of code for Naive Bayes 
        NaiveBayes nb = new NaiveBayes();
        nb.buildClassifier(dt);
        System.out.println(nb.getCapabilities().toString());
        
        }        
    catch(Exception e){
        System.out.println("Error!!!!\n" + e.getMessage());
    }
}
 
Example #3
Source File: WekaCVTest.java    From Java-Data-Science-Cookbook with MIT License 5 votes vote down vote up
public void generateModel(){
	nb = new NaiveBayes();
	try {
		nb.buildClassifier(iris);
	} catch (Exception e) {

	}
}
 
Example #4
Source File: MultivariateShapeletTransformClassifier.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Classifiers used in the HIVE COTE paper
 */    
    public void configureDefaultEnsemble(){
//HIVE_SHAPELET_SVMQ    HIVE_SHAPELET_RandF    HIVE_SHAPELET_RotF    
//HIVE_SHAPELET_NN    HIVE_SHAPELET_NB    HIVE_SHAPELET_C45    HIVE_SHAPELET_SVML   
        ensemble=new CAWPE();
        ensemble.setWeightingScheme(new TrainAcc(4));
        ensemble.setVotingScheme(new MajorityConfidence());
        Classifier[] classifiers = new Classifier[7];
        String[] classifierNames = new String[7];
        
        SMO smo = new SMO();
        smo.turnChecksOff();
        smo.setBuildLogisticModels(true);
        PolyKernel kl = new PolyKernel();
        kl.setExponent(2);
        smo.setKernel(kl);
        if (seedClassifier)
            smo.setRandomSeed((int)seed);
        classifiers[0] = smo;
        classifierNames[0] = "SVMQ";

        RandomForest r=new RandomForest();
        r.setNumTrees(500);
        if(seedClassifier)
           r.setSeed((int)seed);            
        classifiers[1] = r;
        classifierNames[1] = "RandF";
            
            
        RotationForest rf=new RotationForest();
        rf.setNumIterations(100);
        if(seedClassifier)
           rf.setSeed((int)seed);
        classifiers[2] = rf;
        classifierNames[2] = "RotF";
        IBk nn=new IBk();
        classifiers[3] = nn;
        classifierNames[3] = "NN";
        NaiveBayes nb=new NaiveBayes();
        classifiers[4] = nb;
        classifierNames[4] = "NB";
        J48 c45=new J48();
        classifiers[5] = c45;
        classifierNames[5] = "C45";
        SMO svml = new SMO();
        svml.turnChecksOff();
        svml.setBuildLogisticModels(true);
        PolyKernel k2 = new PolyKernel();
        k2.setExponent(1);
        smo.setKernel(k2);
        classifiers[6] = svml;
        classifierNames[6] = "SVML";
        ensemble.setClassifiers(classifiers, classifierNames, null);
    }
 
Example #5
Source File: CAWPE.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
/**
     * Comps: NN, SVML, SVMQ, C4.5, NB,  RotF, RandF, BN,
     * Weight: TrainAcc
     * Vote: MajorityVote
     *
     * As used originally in ST_HESCA, COTE.
     * NOTE the original also contained Bayes Net (BN). We have removed it because the classifier crashes
     * unpredictably when discretising features (due to lack of variance in the feature, but not easily detected and
     * dealt with
     *
     */
    public final void setupOriginalHESCASettings() {
        this.ensembleName = "HESCA";
        
        this.weightingScheme = new TrainAcc();
        this.votingScheme = new MajorityVote();
        
        CrossValidationEvaluator cv = new CrossValidationEvaluator(seed, false, false, false, false); 
        cv.setNumFolds(10);
        this.trainEstimator = cv; 
        int numClassifiers=7;
        Classifier[] classifiers = new Classifier[numClassifiers];
        String[] classifierNames = new String[numClassifiers];

        kNN k=new kNN(100);
        k.setCrossValidate(true);
        k.normalise(false);
        k.setDistanceFunction(new EuclideanDistance());
        classifiers[0] = k;
        classifierNames[0] = "NN";

        classifiers[1] = new NaiveBayes();
        classifierNames[1] = "NB";

        classifiers[2] = new J48();
        classifierNames[2] = "C45";

        SMO svml = new SMO();
        svml.turnChecksOff();
        PolyKernel kl = new PolyKernel();
        kl.setExponent(1);
        svml.setKernel(kl);
        svml.setRandomSeed(seed);
        classifiers[3] = svml;
        classifierNames[3] = "SVML";

        SMO svmq =new SMO();
//Assumes no missing, all real valued and a discrete class variable
        svmq.turnChecksOff();
        PolyKernel kq = new PolyKernel();
        kq.setExponent(2);
        svmq.setKernel(kq);
        svmq.setRandomSeed(seed);
        classifiers[4] =svmq;
        classifierNames[4] = "SVMQ";

        RandomForest r=new RandomForest();
        r.setNumTrees(500);
        r.setSeed(seed);
        classifiers[5] = r;
        classifierNames[5] = "RandF";

        RotationForest rf=new RotationForest();
        rf.setNumIterations(50);
        rf.setSeed(seed);
        classifiers[6] = rf;
        classifierNames[6] = "RotF";

//        classifiers[7] = new BayesNet();
//        classifierNames[7] = "bayesNet";

        setClassifiers(classifiers, classifierNames, null);
    }
 
Example #6
Source File: WekaTrainTest.java    From Java-Data-Science-Cookbook with MIT License 4 votes vote down vote up
public void loadModel(String modelPath){
	try {
		nb = (NaiveBayes) weka.core.SerializationHelper.read(modelPath);
	} catch (Exception e) {
	}
}
 
Example #7
Source File: EnsembleProvider.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
/**
 * Initializes the HIVE COTE ensemble consisting of 7 classifiers using a
 * majority voting strategy as described in J. Lines, S. Taylor and A. Bagnall,
 * "HIVE-COTE: The Hierarchical Vote Collective of Transformation-Based
 * Ensembles for Time Series Classification," 2016 IEEE 16th International
 * Conference on Data Mining (ICDM), Barcelona, 2016, pp. 1041-1046. doi:
 * 10.1109/ICDM.2016.0133.
 *
 * @param seed
 *            Seed used within the classifiers and the majority confidence
 *            voting scheme
 * @param numFolds
 *            Number of folds used within the determination of the classifier
 *            weights for the {@link MajorityConfidenceVote}
 * @return Returns the initialized (but untrained) HIVE COTE ensemble model.
 */
public static Classifier provideHIVECOTEEnsembleModel(final long seed) {
	Classifier[] classifier = new Classifier[7];

	Vote voter = new MajorityConfidenceVote(5, seed);

	// SMO poly2
	SMO smop = new SMO();
	smop.turnChecksOff();
	smop.setBuildCalibrationModels(true);
	PolyKernel kernel = new PolyKernel();
	kernel.setExponent(2);
	smop.setKernel(kernel);
	smop.setRandomSeed((int)seed);
	classifier[0] = smop;

	// Random Forest
	RandomForest rf = new RandomForest();
	rf.setSeed((int)seed);
	rf.setNumIterations(500);
	classifier[1] = rf;

	// Rotation forest
	RotationForest rotF = new RotationForest();
	rotF.setSeed((int)seed);
	rotF.setNumIterations(100);
	classifier[2] = rotF;

	// NN
	IBk nn = new IBk();
	classifier[3] = nn;

	// Naive Bayes
	NaiveBayes nb = new NaiveBayes();
	classifier[4] = nb;

	// C45
	J48 c45 = new J48();
	c45.setSeed((int)seed);
	classifier[5] = c45;

	// SMO linear
	SMO smol = new SMO();
	smol.turnChecksOff();
	smol.setBuildCalibrationModels(true);
	PolyKernel linearKernel = new PolyKernel();
	linearKernel.setExponent(1);
	smol.setKernel(linearKernel);
	classifier[6] = smol;

	voter.setClassifiers(classifier);
	return voter;
}
 
Example #8
Source File: Util.java    From recon with GNU General Public License v2.0 4 votes vote down vote up
/**
 * Get classifier's class name by a short name
 * */
public static String getClassifierClassName(String classifierName) {
	String className = "";
	switch (classifierName) {
	case "SGD":
		className = SGD.class.toString();
		break;
	case "SGDText":
		className = SGDText.class.toString();
		break;
	case "J48":
		className = J48.class.toString();
		break;
	case "PART":
		className = PART.class.toString();
		break;
	case "NaiveBayes":
		className = NaiveBayes.class.toString();
		break;
	case "NBUpdateable":
		className = NaiveBayesUpdateable.class.toString();
		break;
	case "AdaBoostM1":
		className = AdaBoostM1.class.toString();
		break;
	case "LogitBoost":
		className = LogitBoost.class.toString();
		break;
	case "Bagging":
		className = Bagging.class.toString();
		break;
	case "Stacking":
		className = Stacking.class.toString();
		break;
	case "AdditiveRegression":
		className = AdditiveRegression.class.toString();
		break;
	case "Apriori":
		className = Apriori.class.toString();
		break;
	default:
		className = SGD.class.toString();
	}
	className = className.substring(6);
	return className;
}
 
Example #9
Source File: KddCup.java    From Machine-Learning-in-Java with MIT License 4 votes vote down vote up
public static void main(String args[]) throws Exception {
	
	/*
	 * Build Naive Bayes baseline
	 */


	Classifier baselineNB = new NaiveBayes();

	double resNB[] = evaluate(baselineNB);
	System.out.println("Naive Bayes\n" + 
						"\tchurn:     " + resNB[0] + "\n" + 
						"\tappetency: " + resNB[1] + "\n" + 
						"\tup-sell:   " + resNB[2] + "\n" + 
						"\toverall:   " + resNB[3] + "\n");


	
	/*
	 * Ensemble method
	 */
	
	EnsembleLibrary ensembleLib = new EnsembleLibrary();
	
	// Decision trees
	ensembleLib.addModel("weka.classifiers.trees.J48 -S -C 0.25 -B -M 2");
	ensembleLib.addModel("weka.classifiers.trees.J48 -S -C 0.25 -B -M 2 -A");
	
	// naive Bayes
	ensembleLib.addModel("weka.classifiers.bayes.NaiveBayes");
	
	// k-nn
	ensembleLib.addModel("weka.classifiers.lazy.IBk");

	// AdaBoost
	ensembleLib.addModel("weka.classifiers.meta.AdaBoostM1");
	
	// LogitBoost
	ensembleLib.addModel("weka.classifiers.meta.LogitBoost");

	// SVM
	ensembleLib.addModel("weka.classifiers.functions.SMO");

	// Logistic regression
	ensembleLib.addModel("weka.classifiers.functions.Logistic");
	
	// Simple logistic regression
	ensembleLib.addModel("weka.classifiers.functions.SimpleLogistic");

	
	EnsembleLibrary.saveLibrary(new File("data/ensembleLib.model.xml"), ensembleLib, null);
	System.out.println(ensembleLib.getModels());
	
	EnsembleSelection ensambleSel = new EnsembleSelection();
	ensambleSel.setOptions(new String[]{
			 "-L", "data/ensembleLib.model.xml", // </path/to/modelLibrary> - Specifies the Model Library File, continuing the list of all models.
			 "-W", "data/esTmp", // </path/to/working/directory> - Specifies the Working Directory, where all models will be stored.
			 "-B", "10", // <numModelBags> - Set the number of bags, i.e., number of iterations to run the ensemble selection algorithm.
			 "-E", "1.0", // <modelRatio> - Set the ratio of library models that will be randomly chosen  to populate each bag of models.
			 "-V", "0.25", // <validationRatio> - Set the ratio of the training data set that will be reserved for validation.
			 "-H", "100", // <hillClimbIterations> - Set the number of hillclimbing iterations to be performed on each model bag.
			 "-I", "1.0", // <sortInitialization> - Set the the ratio of the ensemble library that the sort initialization algorithm will be able to choose from while initializing the ensemble for each model bag
			 "-X", "2", // <numFolds> - Sets the number of cross-validation folds.
			 "-P", "roc", // <hillclimbMettric> - Specify the metric that will be used for model selection during the hillclimbing algorithm.
			 "-A", "forward", // <algorithm> - Specifies the algorithm to be used for ensemble selection. 
			 "-R", "true", // - Flag whether or not models can be selected more than once for an ensemble.
			 "-G", "true", // - Whether sort initialization greedily stops adding models when performance degrades.
			 "-O", "true", // - Flag for verbose output. Prints out performance of all selected models.
			 "-S", "1", // <num> - Random number seed.
			 "-D", "true" // - If set, classifier is run in debug mode and may output additional info to the console
	});
	
	double resES[] = evaluate(ensambleSel);
	System.out.println("Ensemble\n" + "\tchurn:     " + resES[0] + "\n"
			+ "\tappetency: " + resES[1] + "\n" + "\tup-sell:   "
			+ resES[2] + "\n" + "\toverall:   " + resES[3] + "\n");
	

}