Java Code Examples for weka.classifiers.AbstractClassifier#forName()

The following examples show how to use weka.classifiers.AbstractClassifier#forName() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MLExperimentTester.java    From AILibs with GNU Affero General Public License v3.0 7 votes vote down vote up
@Override
public void evaluate(final ExperimentDBEntry experimentEntry, final IExperimentIntermediateResultProcessor processor) throws ExperimentEvaluationFailedException {
	try {
		if (config.getDatasetFolder() == null || (!config.getDatasetFolder().exists())) {
			throw new IllegalArgumentException("config specifies invalid dataset folder " + config.getDatasetFolder());
		}
		Map<String, String> description = experimentEntry.getExperiment().getValuesOfKeyFields();
		Classifier c = AbstractClassifier.forName(description.get("classifier"), null);
		Instances data = new Instances(new BufferedReader(new FileReader(new File(config.getDatasetFolder() + File.separator + description.get("dataset") + ".arff"))));
		data.setClassIndex(data.numAttributes() - 1);
		int seed = Integer.parseInt(description.get("seed"));

		logger.info("Testing classifier {}", c.getClass().getName());
		Map<String, Object> results = new HashMap<>();

		ILabeledDataset<? extends ILabeledInstance> dataset = new WekaInstances(data);
		SingleRandomSplitClassifierEvaluator eval = new SingleRandomSplitClassifierEvaluator(dataset, .7, new Random(seed));
		double loss = eval.evaluate(new WekaClassifier(c));

		results.put("loss", loss);
		processor.processResults(results);
		this.conductedExperiment = true;
	} catch (Exception e) {
		throw new ExperimentEvaluationFailedException(e);
	}
}
 
Example 2
Source File: CDN.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
@Override
public void buildClassifier(Instances D) throws Exception {
	testCapabilities(D);

	int N = D.numInstances();
	int L = D.classIndex();
	h = new Classifier[L];
	m_R = new Random(m_S);
	D_templates = new Instances[L];

	// Build L probabilistic models, each to predict Y_i | X, Y_{-y}; save the templates.
	for(int j = 0; j < L; j++) {
		// X = [Y[0],...,Y[j-1],Y[j+1],...,Y[L],X]
		D_templates[j] = new Instances(D);
		D_templates[j].setClassIndex(j);
		// train H[j] : X -> Y
		h[j] = AbstractClassifier.forName(getClassifier().getClass().getName(),((AbstractClassifier)getClassifier()).getOptions());
		h[j].buildClassifier(D_templates[j]);
	}
}
 
Example 3
Source File: WekaClassifierTest.java    From AILibs with GNU Affero General Public License v3.0 6 votes vote down vote up
@Test
public void testFit() throws Exception {
	WekaClassifier classifier = new WekaClassifier("weka.classifiers.trees.RandomForest", new String[] {});
	Classifier oClassifier = AbstractClassifier.forName("weka.classifiers.trees.RandomForest", null);

	/* fit both classifiers */
	IWekaInstances dataset = new WekaInstances(ArffDatasetAdapter.readDataset(new File("testrsc/dataset/arff/numeric_only_with_classindex.arff")));
	classifier.fit(dataset);
	oClassifier.buildClassifier(dataset.getInstances());

	/* test that predictions are identical */
	ISingleLabelClassificationPredictionBatch yHat = classifier.predict(dataset);
	int n = yHat.size();
	assertEquals(dataset.size(), n);
	for (int i = 0; i < n; i++) {
		assertEquals(oClassifier.classifyInstance(dataset.get(i).getElement()), yHat.get(i));
	}
}
 
Example 4
Source File: CCUpdateable.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public ULink(int chain[], int j, Instances train) throws Exception {
	this.j = j;

	this.index = chain[j];

	// sort out excludes [4|5,1,0,2,3]
	this.excld = Arrays.copyOfRange(chain,j+1,chain.length); 
	// sort out excludes [0,1,2,3,5]
	Arrays.sort(this.excld); 

	this.classifier = (AbstractClassifier)AbstractClassifier.forName(getClassifier().getClass().getName(),((AbstractClassifier)getClassifier()).getOptions());

	Instances new_train = new Instances(train);

	// delete all except one (leaving a binary problem)
	if(getDebug()) System.out.print(" "+this.index);
	new_train.setClassIndex(-1); 
	// delete all the attributes (and track where our index ends up)
	this.value = chain[j];
	int c_index = value; 
	for(int i = excld.length-1; i >= 0; i--) {
		new_train.deleteAttributeAt(excld[i]);
		if (excld[i] < this.index)
			c_index--; 
	}
	new_train.setClassIndex(c_index); 

	_template = new Instances(new_train,0);

	this.classifier.buildClassifier(new_train);
	new_train = null;

	if(j+1 < chain.length) 
		next = new ULink(chain, ++j, train);
}
 
Example 5
Source File: NestedDichotomyUtil.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public static ClassSplit<String> createGeneralRPNDBasedSplit(final Collection<String> classes, final Collection<String> s1, final Collection<String> s2, final Random rand, final String classifierName, final Instances data) {
	try {
		RPNDSplitter splitter = new RPNDSplitter(rand, AbstractClassifier.forName(classifierName, new String[] {}));
		Collection<Collection<String>> splitAsCollection = null;

		splitAsCollection = splitter.split(classes, s1, s2, data);
		Iterator<Collection<String>> it = splitAsCollection.iterator();
		return new ClassSplit<>(classes, it.next(), it.next());
	} catch (Exception e) {
		logger.error("Unexpected exception occurred while creating an RPND split", e);
	}
	return null;
}
 
Example 6
Source File: RankingByPairwiseComparison.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public void fit(final Instances dataset, final int labels) throws Exception {
	this.labelIndices = getLabelIndices(labels, dataset);
	this.labelIndices.stream().map(x -> dataset.attribute(x).name()).forEach(this.labelSet::add);
	Instances plainPWDataset = this.applyFiltersToDataset(dataset);

	try {
		for (int i = 0; i < this.labelIndices.size() - 1; i++) {
			for (int j = i + 1; j < this.labelIndices.size(); j++) {

				PairWiseClassifier pwc = new PairWiseClassifier();
				pwc.a = dataset.attribute(this.labelIndices.get(i)).name();
				pwc.b = dataset.attribute(this.labelIndices.get(j)).name();

				pwc.c = AbstractClassifier.forName(this.config.getBaseLearner(), null);

				Instances pwDataset = new Instances(plainPWDataset);

				for (int k = 0; k < pwDataset.size(); k++) {
					String value;
					if (dataset.get(k).value(this.labelIndices.get(i)) > dataset.get(k).value(this.labelIndices.get(j))) {
						value = "true";
					} else {
						value = "false";
					}
					pwDataset.get(k).setValue(pwDataset.numAttributes() - 1, value);
				}
				pwDataset.setClassIndex(pwDataset.numAttributes() - 1);

				pwc.c.buildClassifier(pwDataset);
				this.pwClassifiers.add(pwc);
			}
		}
	} catch (Exception e) {
		throw new TrainingException("Could not build ranker", e);
	}
}
 
Example 7
Source File: WekaClassifier.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public static WekaClassifier createPipeline(final String searcher, final List<String> searcherOptions, final String evaluator, final List<String> evaluatorOptions, final String classifier, final List<String> classifierOptions)
		throws Exception {
	ASSearch search = ASSearch.forName(searcher, searcherOptions.toArray(new String[0]));
	ASEvaluation eval = ASEvaluation.forName(evaluator, evaluatorOptions.toArray(new String[0]));
	Classifier c = AbstractClassifier.forName(classifier, classifierOptions.toArray(new String[0]));
	return new WekaClassifier(new MLPipeline(search, eval, c));
}
 
Example 8
Source File: WekaClassifier.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public WekaClassifier(final String name, final String[] options) {
	this.name = name;
	try {
		this.wrappedClassifier = AbstractClassifier.forName(name, options);
	} catch (Exception e) {
		throw new IllegalArgumentException("Could not find classifier for name " + name + " or could not set its options to " + Arrays.toString(options), e);
	}
}
 
Example 9
Source File: Util.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public static List<Map<String, Object>> conductSingleOneStepReductionExperiment(final ReductionExperiment experiment) throws Exception {
	/* load data */
	Instances data = new Instances(new BufferedReader(new FileReader(experiment.getDataset())));
	data.setClassIndex(data.numAttributes() - 1);

	/* prepare basis for experiments */
	int seed = experiment.getSeed();
	Classifier classifierForRPNDSplit = AbstractClassifier.forName(experiment.getNameOfInnerClassifier(), null);
	Classifier leftClassifier = AbstractClassifier.forName(experiment.getNameOfLeftClassifier(), null);
	Classifier innerClassifier = AbstractClassifier.forName(experiment.getNameOfInnerClassifier(), null);
	Classifier rightClassifier = AbstractClassifier.forName(experiment.getNameOfRightClassifier(), null);

	RPNDSplitter splitter = new RPNDSplitter(new Random(seed), classifierForRPNDSplit);

	/* conduct experiments */
	List<Map<String, Object>> results = new ArrayList<>();
	for (int k = 0; k < 10; k++) {
		List<Collection<String>> classSplit;
		try {
			classSplit = new ArrayList<>(splitter.split(data));
		} catch (Exception e) {
			throw new RuntimeException("Could not create RPND split.", e);
		}
		MCTreeNodeReD classifier = new MCTreeNodeReD(innerClassifier, classSplit.get(0), leftClassifier, classSplit.get(1), rightClassifier);
		long start = System.currentTimeMillis();
		Map<String, Object> result = new HashMap<>();
		List<Instances> dataSplit = WekaUtil.getStratifiedSplit(data, (seed + k), .7);
		classifier.buildClassifier(dataSplit.get(0));
		long time = System.currentTimeMillis() - start;
		Evaluation eval = new Evaluation(dataSplit.get(0));
		eval.evaluateModel(classifier, dataSplit.get(1));
		double loss = (100 - eval.pctCorrect()) / 100f;
		logger.info("Conducted experiment {} with split {}/{}. Loss: {}. Time: {}ms.", k, classSplit.get(0), classSplit.get(1), loss, time);
		result.put("errorRate", loss);
		result.put(LABEL_TRAIN_TIME, time);
		results.add(result);
	}
	return results;
}
 
Example 10
Source File: Evaluation.java    From collective-classification-weka-package with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Evaluates a classifier with the options given in an array of strings.
 * 
 * @param classifierString class of machine learning classifier as a string
 * @param options the array of string containing the options
 * @throws Exception if model could not be evaluated successfully
 * @return a string describing the results
 */
public static String evaluateModel(String classifierString, String[] options) throws Exception {
  Classifier classifier;

  try {
    classifier = AbstractClassifier.forName(classifierString, null);
  } 
  catch (Exception e) {
    throw new Exception("Can't find class with name " + classifierString + '.');
  }

  return evaluateModel(classifier, options);
}
 
Example 11
Source File: CCp.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public Link(int chain[], int j, Instances train) throws Exception {
	this.j = j;

	this.index = chain[j];

	// sort out excludes [4|5,1,0,2,3]
	this.excld = Arrays.copyOfRange(chain,j+1,chain.length); 
	// sort out excludes [0,1,2,3,5]
	Arrays.sort(this.excld); 

	this.classifier = (AbstractClassifier)AbstractClassifier.forName(getClassifier().getClass().getName(),((AbstractClassifier)getClassifier()).getOptions());

	Instances new_train = new Instances(train);

	// delete all except one (leaving a binary problem)
	if(getDebug()) System.out.print(" "+this.index);
	new_train.setClassIndex(-1); 
	// delete all the attributes (and track where our index ends up)
	int c_index = chain[j]; 
	for(int i = excld.length-1; i >= 0; i--) {
		new_train.deleteAttributeAt(excld[i]);
		if (excld[i] < this.index)
			c_index--; 
	}
	new_train.setClassIndex(c_index); 

	_template = new Instances(new_train,0);

	this.classifier.buildClassifier(new_train);
	new_train = null;

	if(j+1 < chain.length) 
		next = new meka.classifiers.multitarget.CCp.Link(chain, ++j, train);
}
 
Example 12
Source File: MBR.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
@Override
public void buildClassifier(Instances data) throws Exception {
  	testCapabilities(data);
  	
	int c = data.classIndex();

	// Base BR

	if (getDebug()) System.out.println("Build BR Base ("+c+" models)");
	m_BASE = (BR)AbstractClassifier.forName(getClassifier().getClass().getName(),((AbstractClassifier)getClassifier()).getOptions());
	m_BASE.buildClassifier(data);

	// Meta BR

	if (getDebug()) System.out.println("Prepare Meta data           ");
	Instances meta_data = new Instances(data);

	FastVector BinaryClass = new FastVector(c);
	BinaryClass.addElement("0");
	BinaryClass.addElement("1");

	for(int i = 0; i < c; i++) {
		meta_data.insertAttributeAt(new Attribute("metaclass"+i,BinaryClass),c);
	}

	for(int i = 0; i < data.numInstances(); i++) {
		double cfn[] = m_BASE.distributionForInstance(data.instance(i));
		for(int a = 0; a < cfn.length; a++) {
			meta_data.instance(i).setValue(a+c,cfn[a]);
		}
	}

	meta_data.setClassIndex(c);
	m_InstancesTemplate = new Instances(meta_data, 0);

	if (getDebug()) System.out.println("Build BR Meta ("+c+" models)");

	m_META = (BR)AbstractClassifier.forName(getClassifier().getClass().getName(),((AbstractClassifier)getClassifier()).getOptions());
	m_META.buildClassifier(meta_data);
}
 
Example 13
Source File: MCTreeNodeReD.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
public MCTreeNodeReD(final String innerNodeClassifier, final Collection<String> leftChildClasses, final Classifier leftChildClassifier, final Collection<String> rightChildClasses, final Classifier rightChildClassifier)
		throws Exception {
	this(AbstractClassifier.forName(innerNodeClassifier, new String[] {}), leftChildClasses, leftChildClassifier, rightChildClasses, rightChildClassifier);
}
 
Example 14
Source File: CCq.java    From meka with GNU General Public License v3.0 4 votes vote down vote up
public QLink(int chain[], int j, Instances train) throws Exception {
	this.j = j;

	this.index = chain[j];

	// sort out excludes [4|5,1,0,2,3]
	this.excld = Arrays.copyOfRange(chain,j+1,chain.length); 
	// sort out excludes [0,1,2,3,5]
	Arrays.sort(this.excld); 

	this.classifier = AbstractClassifier.forName(getClassifier().getClass().getName(),((AbstractClassifier)getClassifier()).getOptions());

	Instances new_train = new Instances(train);

	// delete all except one (leaving a binary problem)
	if(getDebug()) System.out.print(" "+this.index);
	new_train.setClassIndex(-1); 
	// delete all the attributes (and track where our index ends up)
	int c_index = chain[j]; 
	for(int i = excld.length-1; i >= 0; i--) {
		new_train.deleteAttributeAt(excld[i]);
		if (excld[i] < this.index)
			c_index--; 
	}
	new_train.setClassIndex(c_index); 

	/* BEGIN downsample for this link */
	new_train.randomize(m_Random);
	int numToRemove = new_train.numInstances() - (int)Math.round(new_train.numInstances() * m_DownSampleRatio);
	for(int i = 0, removed = 0; i < new_train.numInstances(); i++) {
		if (new_train.instance(i).classValue() <= 0.0) {
			new_train.instance(i).setClassMissing();
			if (++removed >= numToRemove)
				break;
		}
	}
	new_train.deleteWithMissingClass();
	/* END downsample for this link */

	_template = new Instances(new_train,0);

	this.classifier.buildClassifier(new_train);
	new_train = null;

	if(j+1 < chain.length) 
		next = new QLink(chain, ++j, train);
}
 
Example 15
Source File: MCTreeNodeReD.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
public MCTreeNodeReD(final String innerNodeClassifier, final Collection<String> leftChildClasses, final String leftChildClassifier, final Collection<String> rightChildClasses, final String rightChildClassifier) throws Exception {
	this(innerNodeClassifier, leftChildClasses, AbstractClassifier.forName(leftChildClassifier, null), rightChildClasses, AbstractClassifier.forName(rightChildClassifier, null));
}
 
Example 16
Source File: MCTreeNode.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
public MCTreeNode(final List<Integer> containedClasses, final EMCNodeType nodeType, final String classifierID) throws Exception {
	this(containedClasses, nodeType, AbstractClassifier.forName(classifierID, null));
}
 
Example 17
Source File: Util.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
public static List<Map<String, Object>> conductEnsembleOfOneStepReductionsExperiment(final EnsembleOfSimpleOneStepReductionsExperiment experiment) throws Exception {

		/* load data */
		Instances data = new Instances(new BufferedReader(new FileReader(experiment.getDataset())));
		data.setClassIndex(data.numAttributes() - 1);

		/* prepare basis for experiments */
		int seed = experiment.getSeed();
		String classifier = experiment.getNameOfClassifier();
		RPNDSplitter splitter = new RPNDSplitter(new Random(seed), AbstractClassifier.forName(classifier, null));

		/* conduct experiments */
		List<Map<String, Object>> results = new ArrayList<>();
		for (int k = 0; k < 10; k++) {

			Vote ensemble = new Vote();
			ensemble.setOptions(new String[] { "-R", "MAJ" });
			long start = System.currentTimeMillis();
			List<Instances> dataSplit = WekaUtil.getStratifiedSplit(data, (seed + k), .7);
			for (int i = 0; i < experiment.getNumberOfStumps(); i++) {

				List<Collection<String>> classSplit;
				classSplit = new ArrayList<>(splitter.split(data));
				MCTreeNodeReD tree = new MCTreeNodeReD(classifier, classSplit.get(0), classifier, classSplit.get(1), classifier);
				tree.buildClassifier(dataSplit.get(0));
				ensemble.addPreBuiltClassifier(tree);
			}
			Map<String, Object> result = new HashMap<>();
			result.put(LABEL_TRAIN_TIME, System.currentTimeMillis() - start);

			/* now evaluate the ensemble */
			ensemble.buildClassifier(data);
			Evaluation eval = new Evaluation(dataSplit.get(0));
			eval.evaluateModel(ensemble, dataSplit.get(1));
			double loss = (100 - eval.pctCorrect()) / 100f;
			logger.info("Conducted experiment {}. Loss: {}. Time: {}ms.", k, loss, result.get(LABEL_TRAIN_TIME));
			result.put("errorRate", loss);
			results.add(result);
		}
		return results;
	}