weka.classifiers.Classifier Java Examples

The following examples show how to use weka.classifiers.Classifier. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Evaluation.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Return the global info (if it exists) for the supplied classifier.
 * 
 * @param classifier the classifier to get the global info for
 * @return the global info (synopsis) for the classifier
 * @throws Exception if there is a problem reflecting on the classifier
 */
protected static String getGlobalInfo(Classifier classifier) throws Exception {
  BeanInfo bi = Introspector.getBeanInfo(classifier.getClass());
  MethodDescriptor[] methods;
  methods = bi.getMethodDescriptors();
  Object[] args = {};
  String result = "\nSynopsis for " + classifier.getClass().getName()
      + ":\n\n";

  for (int i = 0; i < methods.length; i++) {
    String name = methods[i].getDisplayName();
    Method meth = methods[i].getMethod();
    if (name.equals("globalInfo")) {
      String globalInfo = (String) (meth.invoke(classifier, args));
      result += globalInfo;
      break;
    }
  }

  return result;
}
 
Example #2
Source File: logistic_regression.java    From CompetitiveJava with MIT License 6 votes vote down vote up
/**
 * This method is used to process the input and return the statistics.
 * 
 * @throws Exception
 */
public static void process() throws Exception {

	Instances trainingDataSet = getDataSet(TRAINING_DATA_SET_FILENAME);
	Instances testingDataSet = getDataSet(TESTING_DATA_SET_FILENAME);
	/** Classifier here is Linear Regression */
	Classifier classifier = new weka.classifiers.functions.Logistic();
	/** */
	classifier.buildClassifier(trainingDataSet);
	/**
	 * train the alogorithm with the training data and evaluate the
	System.out.println("** Linear Regression Evaluation with Datasets **");
	System.out.println(eval.toSummaryString());
	System.out.print(" the expression for the input data as per alogorithm is ");
	System.out.println(classifier);

	Instance predicationDataSet = getDataSet(PREDICTION_DATA_SET_FILENAME).lastInstance();
	double value = classifier.classifyInstance(predicationDataSet);
	/** Prediction Output */
	System.out.println(value);
}
 
Example #3
Source File: PredictByDomainOS.java    From recon with GNU General Public License v2.0 6 votes vote down vote up
private static boolean predictOneFlow(String line, String domainOS) {
	if (!domainOSModel.containsKey(domainOS))
		return false;
	else {
		try {
			Classifier classifier = domainOSModel.get(domainOS);
			Map<String, Integer> fi = domainOSFeature.get(domainOS);
			Instances structure = domainOSStruct.get(domainOS);
			Instance current = getInstance(line, fi, fi.size());

			Instances is = new Instances(structure);
			is.setClassIndex(is.numAttributes() - 1);
			is.add(current);
			current = is.get(is.size() - 1);
			current.setClassMissing();
			double predicted = classifier.classifyInstance(current);
			if (predicted > 0) {
				return true;
			} else
				return false;
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
	return false;
}
 
Example #4
Source File: CostCurve.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
/**
  * Tests the CostCurve generation from the command line.
  * The classifier is currently hardcoded. Pipe in an arff file.
  *
  * @param args currently ignored
  */
 public static void main(String [] args) {

   try {
     
     Instances inst = new Instances(new java.io.InputStreamReader(System.in));
     
     inst.setClassIndex(inst.numAttributes() - 1);
     CostCurve cc = new CostCurve();
     EvaluationUtils eu = new EvaluationUtils();
     Classifier classifier = new weka.classifiers.functions.Logistic();
     FastVector predictions = new FastVector();
     for (int i = 0; i < 2; i++) { // Do two runs.
eu.setSeed(i);
predictions.appendElements(eu.getCVPredictions(classifier, inst, 10));
//System.out.println("\n\n\n");
     }
     Instances result = cc.getCurve(predictions);
     System.out.println(result);
     
   } catch (Exception ex) {
     ex.printStackTrace();
   }
 }
 
Example #5
Source File: PolarityClassifier.java    From sentiment-analysis with Apache License 2.0 6 votes vote down vote up
/**Initializes the MNB and SVM classifiers, by loading the previously generated models.*/
private void initializeClassifiers(){
	mnb_classifiers = new Classifier[3];
	try {
		mnb_classifiers[0] = (Classifier) weka.core.SerializationHelper.read(folder+"/models/text.model");
		mnb_classifiers[1] = (Classifier) weka.core.SerializationHelper.read(folder+"/models/feature.model");
		mnb_classifiers[2] = (Classifier) weka.core.SerializationHelper.read(folder+"/models/complex.model");
		lexicon_classifier = (LibSVM) weka.core.SerializationHelper.read(folder+"/models/lexicon.model");
		BufferedReader trdr = new BufferedReader(new FileReader(new File(folder+"/train/T.arff")));
		BufferedReader frdr = new BufferedReader(new FileReader(new File(folder+"/train/F.arff")));
		BufferedReader crdr = new BufferedReader(new FileReader(new File(folder+"/train/C.arff")));
		training_text = new Instances(trdr);
		training_feature = new Instances(frdr);
		training_complex = new Instances(crdr);
		trdr.close();
		frdr.close();
		crdr.close();
	} catch (Exception e) {
		e.printStackTrace();
	}
}
 
Example #6
Source File: ClassifierResults.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
public void setNonResourceDetails(final Classifier classifier, final Instances data) {
    setDatasetName(data.relationName());
    if(classifier instanceof EnhancedAbstractClassifier) {
        setClassifierName(((EnhancedAbstractClassifier) classifier).getClassifierName());
        setFoldID(((EnhancedAbstractClassifier) classifier).getSeed());
    } else {
        setClassifierName(classifier.getClass().getSimpleName());
    }
    if(classifier instanceof Randomizable) {
        setFoldID(((Randomizable) classifier).getSeed());
    }
    if(classifier instanceof OptionHandler) {
        setParas(StrUtils.join(",", ((OptionHandler) classifier).getOptions()));
    }
    setOs(SysUtils.getOsName());
    setCpuInfo(SysUtils.findCpuInfo());
}
 
Example #7
Source File: AbstractOutput.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Prints the classifications to the buffer.
 * 
 * @param classifier the classifier to use for printing the classifications
 * @param testset the test instances
 * @throws Exception if check fails or error occurs during printing of
 *           classifications
 */
public void printClassifications(Classifier classifier, Instances testset)
    throws Exception {
  int i;

  if (classifier instanceof BatchPredictor) {
    double[][] predictions = ((BatchPredictor) classifier)
        .distributionsForInstances(testset);
    for (i = 0; i < testset.numInstances(); i++) {
      printClassification(predictions[i], testset.instance(i), i);
    }
  } else {
    for (i = 0; i < testset.numInstances(); i++)
      doPrintClassification(classifier, testset.instance(i), i);
  }
}
 
Example #8
Source File: BoTSWEnsemble.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
public static void main(String[] args) throws Exception{
        //Minimum working example
        String dataset = "ItalyPowerDemand";
        Instances train = DatasetLoading.loadDataNullable("C:\\TSC Problems\\"+dataset+"\\"+dataset+"_TRAIN.arff");
        Instances test = DatasetLoading.loadDataNullable("C:\\TSC Problems\\"+dataset+"\\"+dataset+"_TEST.arff");
        
        Classifier c = new BoTSWEnsemble();
        ((BoTSWEnsemble)c).dist = BoTSW.DistFunction.BOSS_DISTANCE;
        c.buildClassifier(train);
        double accuracy = ClassifierTools.accuracy(test, c);
        
        System.out.println("BoTSWEnsemble accuracy on " + dataset + " fold 0 = " + accuracy);
        
        //Other examples/tests
//        detailedFold0Test(dataset);
//        resampleTest(dataset, 25);
    }
 
Example #9
Source File: Predictor.java    From browserprint with MIT License 6 votes vote down vote up
public static void initialise(String browserModelFilePath, String osModelFilePath, String fontsPath) throws Exception{
	browserAttributes = new ArrayList<Attribute>();
	osAttributes = new ArrayList<Attribute>();
	browserClassAttribute = new Attribute("className", browserGroupsWeCareAbout);
	osClassAttribute = new Attribute("className", osGroupsWeCareAbout);
	browserAttributes.add(browserClassAttribute);
	osAttributes.add(osClassAttribute);
	for(int i = 1; i <= 5300; ++i){
		browserAttributes.add(new Attribute(Integer.toString(i)));
		osAttributes.add(new Attribute(Integer.toString(i)));
	}
	
	browserClassifier = (Classifier) SerializationHelper.read(browserModelFilePath);
	osClassifier = (Classifier) SerializationHelper.read(osModelFilePath);
	
	BrowserOsGuessFingerprintNumericRepresentation.initialise(fontsPath);
}
 
Example #10
Source File: CollectiveClassifierSplitEvaluator.java    From collective-classification-weka-package with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Set the Classifier to use, given it's class name. A new classifier will be
 * instantiated.
 * 
 * @param newClassifierName the Classifier class name.
 * @throws Exception if the class name is invalid.
 */
@Override
public void setClassifierName(String newClassifierName) throws Exception {

  try {
    setClassifier((Classifier) Class.forName(newClassifierName).newInstance());
  } catch (Exception ex) {
    throw new Exception("Can't find Classifier with class name: "
        + newClassifierName);
  }
}
 
Example #11
Source File: SingleTestSetEvaluator.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
@Override
public synchronized ClassifierResults evaluate(Classifier classifier, Instances dataset) throws Exception {

    final Instances insts = cloneData ? new Instances(dataset) : dataset;

    ClassifierResults res = new ClassifierResults(insts.numClasses());
    res.setTimeUnit(TimeUnit.NANOSECONDS);
    res.setClassifierName(classifier.getClass().getSimpleName());
    res.setDatasetName(dataset.relationName());
    res.setFoldID(seed);
    res.setSplit("train"); //todo revisit, or leave with the assumption that calling method will set this to test when needed

    res.turnOffZeroTimingsErrors();
    for (Instance testinst : insts) {
        double trueClassVal = testinst.classValue();
        if (setClassMissing)
            testinst.setClassMissing();

        long startTime = System.nanoTime();
        double[] dist = classifier.distributionForInstance(testinst);
        long predTime = System.nanoTime() - startTime;

        res.addPrediction(trueClassVal, dist, indexOfMax(dist), predTime, "");
    }

    res.turnOnZeroTimingsErrors();

    res.finaliseResults();
    res.findAllStatsOnce();
    
    return res;
}
 
Example #12
Source File: MCTreeNodeReD.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public MCTreeNodeReD(final Classifier innerNodeClassifier, final List<Collection<String>> childClasses, final List<Classifier> childClassifier) {
	this();
	if (childClasses.size() != childClassifier.size()) {
		throw new IllegalArgumentException("Number of child classes does not equal the number of child classifiers");
	}
	this.innerNodeClassifier = innerNodeClassifier;
	for (int i = 0; i < childClasses.size(); i++) {
		this.addChild(new ArrayList<>(childClasses.get(i)), childClasses.get(i).size() > 1 ? childClassifier.get(i) : new ConstantClassifier());
	}
}
 
Example #13
Source File: Decision.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public Decision(Set<String> lft, Set<String> rgt, EMCNodeType classificationType, Classifier baseClassifier) {
	super();
	this.lft = lft;
	this.rgt = rgt;
	this.classificationType = classificationType;
	this.baseClassifier = baseClassifier;
}
 
Example #14
Source File: AdvancedCollective.java    From collective-classification-weka-package with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Set the base learner. Creates copies of the given classifier for the two
 * classifiers used internally. <br/>
 * Note: also unsets the flag whether the classifier has been built so far.
 *
 * @param newClassifier the classifier to use.
 * @see #m_ClassifierBuilt
 */
public void setClassifier(Classifier newClassifier) {
  super.setClassifier(newClassifier);

  try {
    m_Classifier2 = AbstractClassifier.makeCopies(newClassifier, 1)[0];
  }
  catch (Exception e) {
    e.printStackTrace();
    m_Classifier  = new weka.classifiers.trees.J48();
    m_Classifier2 = new weka.classifiers.trees.J48();
  }
}
 
Example #15
Source File: PLSTTest.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Creates a default classifier.
 *
 * @return the classifier
 */
@Override
public Classifier getClassifier() {
	PLST plst =  new PLST();
	plst.setSize(3);
	return plst;
}
 
Example #16
Source File: RelExTool.java    From Criteria2Query with Apache License 2.0 5 votes vote down vote up
public static void saveModel(Classifier classifier, String modelName) {
    try {
        SerializationHelper.write(modelName, classifier);
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example #17
Source File: CAWPE.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Uses the 'basic UCI' set up:
 * Comps: SVML, MLP, NN, Logistic, C4.5
 * Weight: TrainAcc(4) (train accuracies to the power 4)
 * Vote: MajorityConfidence (summing probability distributions)
 */
public final void setupDefaultSettings_NoLogistic() {
    this.ensembleName = "CAWPE-NoLogistic";
    
    this.weightingScheme = new TrainAcc(4);
    this.votingScheme = new MajorityConfidence();
    
    CrossValidationEvaluator cv = new CrossValidationEvaluator(seed, false, false, false, false); 
    cv.setNumFolds(10);
    this.trainEstimator = cv; 
    
    Classifier[] classifiers = new Classifier[4];
    String[] classifierNames = new String[4];

    SMO smo = new SMO();
    smo.turnChecksOff();
    smo.setBuildLogisticModels(true);
    PolyKernel kl = new PolyKernel();
    kl.setExponent(1);
    smo.setKernel(kl);
    smo.setRandomSeed(seed);
    classifiers[0] = smo;
    classifierNames[0] = "SVML";

    kNN k=new kNN(100);
    k.setCrossValidate(true);
    k.normalise(false);
    k.setDistanceFunction(new EuclideanDistance());
    classifiers[1] = k;
    classifierNames[1] = "NN";

    classifiers[2] = new J48();
    classifierNames[2] = "C4.5";

    classifiers[3] = new MultilayerPerceptron();
    classifierNames[3] = "MLP";

    setClassifiers(classifiers, classifierNames, null);
}
 
Example #18
Source File: ClassifierLists.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
 *
 * setClassifier, which takes the experimental
 * arguments themselves and therefore the classifiers can take from them whatever they
 * need, e.g the dataset name, the fold id, separate checkpoint paths, etc.
 *
 * To take this idea further, to be honest each of the TSC-specific classifiers
 * could/should have a constructor and/or factory that builds the classifier
 * from the experimental args.
 *
 * previous usage was setClassifier(String classifier name, int fold).
 * this can be reproduced with setClassifierClassic below.
 *
 */
public static Classifier setClassifier(Experiments.ExperimentalArguments exp){
    String classifier=exp.classifierName;
    Classifier c = null;
    if(distanceBased.contains(classifier))
        c=setDistanceBased(exp);
    else if(dictionaryBased.contains(classifier))
        c=setDictionaryBased(exp);
    else if(intervalBased.contains(classifier))
        c=setIntervalBased(exp);
    else if(frequencyBased.contains(classifier))
        c=setFrequencyBased(exp);
    else if(shapeletBased.contains(classifier))
        c=setShapeletBased(exp);
    else if(hybridBased.contains(classifier))
        c=setHybridBased(exp);
    else if(multivariateBased.contains(classifier))
        c=setMultivariate(exp);
    else if(standardClassifiers.contains(classifier))
        c=setStandardClassifiers(exp);
    else if(bespokeClassifiers.contains(classifier))
        c=setBespokeClassifiers(exp);
    else{
        System.out.println("Unknown classifier "+classifier+" it is not in any of the sublists ");
        throw new UnsupportedOperationException("Unknown classifier "+classifier+" it is not in any of the sublists on ClassifierLists ");
    }
    if(c instanceof Randomizable)
        ((Randomizable)c).setSeed(exp.foldId);
    return c;
}
 
Example #19
Source File: Maniac.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
/**
    * Change default classifier to CR with Linear Regression as base as this classifier
    * uses numeric values in the compressed labels.
    */
   protected Classifier getDefaultClassifier() {
CR cr = new CR();
LinearRegression lr = new LinearRegression();
cr.setClassifier(lr);
return cr;
   }
 
Example #20
Source File: SimpleFlipper.java    From collective-classification-weka-package with GNU General Public License v3.0 5 votes vote down vote up
/**
 * returns the (possibly) new class label
 * @param c           the Classifier to use for prediction
 * @param instances   the instances to use for flipping
 * @param from        the starting of flipping
 * @param count       the number of instances to flip
 * @param index       the index of the instance to flip
 * @param history     the flipping history
 * @return            the (possibly) new class label
 */
@Override
public double flipLabel( Classifier c, Instances instances, 
                         int from, int count, int index,
                         FlipHistory history ) {
  double[]        dist;
  double          result;
  
  // get distribution 
  try {
    dist = c.distributionForInstance(instances.instance(index));
  }
  catch (Exception e) {
    e.printStackTrace();
    return instances.instance(index).classValue();
  }
  
  // flip label
  if (m_Random.nextDouble() < dist[0])
    result = 0;
  else
    result = 1;

  // history
  history.add(instances.instance(index), dist);

  return result;
}
 
Example #21
Source File: Segmenter.java    From gsn with GNU General Public License v3.0 5 votes vote down vote up
public SegmentedClassifier computeErrors(Instances i,Double[] seg) throws Exception{
	Classifier cl = Tools.getClassifierById(model);
	Filter f = new DummyFilter();
	f.setInputFormat(i);
	SegmentedClassifier sc = new SegmentedClassifier(cl, 1, seg,f);
	sc.buildClassifier(i);	
	i.sort(0);
	Pred_errors = Tools.get_errors(sc, i); 
	return sc;

}
 
Example #22
Source File: SingleTestSetEvaluator.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Utility method, will build on the classifier on the train set and evaluate on the test set 
 */
public synchronized ClassifierResults evaluate(Classifier classifier, Instances train, Instances test) throws Exception {
    long buildTime = System.nanoTime();
    classifier.buildClassifier(train);
    buildTime = System.nanoTime() - buildTime;
    
    ClassifierResults res = evaluate(classifier, test);
    
    res.turnOffZeroTimingsErrors();
    res.setBuildTime(buildTime);
    res.turnOnZeroTimingsErrors();
    
    return res;
}
 
Example #23
Source File: LabelTransformationClassifier.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
@Override
   public void setClassifier(Classifier newClassifier) {
if (newClassifier instanceof MultiLabelClassifier)
    super.setClassifier(newClassifier);
else
    System.err.println(
		       "Base classifier must implement " + MultiLabelClassifier.class.getName()
		       + ", provided: " + newClassifier.getClass().getName());
   }
 
Example #24
Source File: BasicReproductionTests.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
public static boolean confirmAllExpectedResultReproductions() throws Exception {
    System.out.println("--confirmAllExpectedResultReproductions()");

    File[] expectedResults = FileHandlingTools.listFiles(reproductionDirectory);
    if (expectedResults == null) 
        throw new Exception("No expected results saved to compare to, dir="+reproductionDirectory);
    
    List<String> failedClassifiers = new ArrayList<>();
    
    for (File expectedResultFile : expectedResults) {
        ExpectedClassifierResults expres = new ExpectedClassifierResults(expectedResultFile);
        
        Classifier c = constructClassifier(expres.fullClassifierName);
        ClassifierResults newres = ClassifierTools.testUtils_evalOnIPD(c);
        
        if (expres.equal(newres))
            System.out.println("\t" + expres.simpleClassifierName + " all good, parity with results created " + expres.dateTime);
        else {
            System.out.println("\t" + expres.simpleClassifierName + " was NOT recreated successfully! no parity with results created " + expres.dateTime);
            failedClassifiers.add(expres.simpleClassifierName);
        }
    }
    
    if (failedClassifiers.size() > 0) {
        System.out.println("\n\n\n");
        System.out.println("Failing classifiers = " + failedClassifiers);
        return false;
    }
    return true;
}
 
Example #25
Source File: RnnSequenceClassifierAbstractTest.java    From wekaDeeplearning4j with GNU General Public License v3.0 5 votes vote down vote up
@Override
public Classifier getClassifier() {
  RnnSequenceClassifier rnn = new RnnSequenceClassifier();
  RnnOutputLayer ol = new RnnOutputLayer();
  rnn.setLayers(ol);
  rnn.setNumEpochs(1);
  rnn.setEarlyStopping(new EarlyStopping(0, 0));
  return rnn;
}
 
Example #26
Source File: MLPlan4BigFileInputTester.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void test() throws Exception {
	// MLPlan4BigFileInput mlplan = new MLPlan4BigFileInput(new File("testrsc/openml/41103.arff"));

	String origDataSrcName = "testrsc/openml/1240.arff";

	if (true) {
		Instances data = new Instances(new FileReader(new File(origDataSrcName)));
		data.setClassIndex(data.numAttributes() - 1);
		List<Instances> split = WekaUtil.getStratifiedSplit(data, 0, .7f);
		ArffSaver saver = new ArffSaver();
		saver.setInstances(split.get(0));
		saver.setFile(new File(origDataSrcName + ".train"));
		saver.writeBatch();
		saver.setInstances(split.get(1));
		saver.setFile(new File(origDataSrcName + ".test"));
		saver.writeBatch();
		System.exit(0);
	}

	MLPlan4BigFileInput mlplan = new MLPlan4BigFileInput(new File(origDataSrcName + ".train"));
	mlplan.setTimeout(new Timeout(5, TimeUnit.MINUTES));
	mlplan.setLoggerName("testedalgorithm");
	long start = System.currentTimeMillis();
	Classifier c = mlplan.call();
	System.out.println("Observed output: " + c + " after " + (System.currentTimeMillis() - start) + "ms. Now validating the model");

	/* check quality */
	Instances testData = new Instances(new FileReader(new File(origDataSrcName + ".test")));
	testData.setClassIndex(testData.numAttributes() - 1);
	Evaluation eval = new Evaluation(testData);
	eval.evaluateModel(c, testData);
	System.out.println(eval.toSummaryString());

	assertNotNull(c);
}
 
Example #27
Source File: AutoFEWekaPipelineTest.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void testAutoFEWekaPipelineClone() throws Exception {

	Graph<IFilter> graph = new Graph<>();
	PretrainedNNFilter nnFilter = ImageUtils.getPretrainedNNFilterByName("VGG16", 5,
			DataSetUtilsTest.CIFAR10_INPUT_SHAPE);
	graph.addItem(nnFilter);

	FilterPipeline fp = new FilterPipeline(null, graph);

	AutoFEWekaPipeline pipeline = new AutoFEWekaPipeline(fp, new RandomForest());
	Classifier clonedClassifier = WekaUtil.cloneClassifier(pipeline);
	Assert.assertNotNull(clonedClassifier);
}
 
Example #28
Source File: ElasticEnsemble.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
private static ElasticEnsemble forEachTunedConstituent(ElasticEnsemble elasticEnsemble, Consumer<RLTunedKNNSetup> consumer) {
    for(Classifier classifier : elasticEnsemble.getConstituents()) {
        if(!(classifier instanceof RLTunedClassifier)) {
            continue;
        }
        RLTunedClassifier tuner = (RLTunedClassifier) classifier;
        RLTunedKNNSetup config = (RLTunedKNNSetup) tuner.getTrainSetupFunction();
        consumer.accept(config);
    }
    return elasticEnsemble;
}
 
Example #29
Source File: StrUtils.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
public static AbstractClassifier classifierFromClassifierLists(String name) {
    Experiments.ExperimentalArguments experimentalArguments = new Experiments.ExperimentalArguments();
    experimentalArguments.foldId = 0;
    experimentalArguments.classifierName = name;
    Classifier classifier = ClassifierLists.setClassifier(experimentalArguments);
    return (AbstractClassifier) classifier;
}
 
Example #30
Source File: WekaUtil.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public static Classifier cloneClassifier(final Classifier c) throws Exception {
	Method cloneMethod = MethodUtils.getAccessibleMethod(c.getClass(), "clone");
	if (cloneMethod != null) {
		return (Classifier) cloneMethod.invoke(c);
	}
	return AbstractClassifier.makeCopy(c);
}