weka.classifiers.functions.LinearRegression Java Examples

The following examples show how to use weka.classifiers.functions.LinearRegression. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MultiLinearRegression.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
@Override
public void buildClassifier(Instances data) throws Exception {
    //creating the 2class version of the insts
    numericClassInsts = new Instances(data);
    numericClassInsts.setClassIndex(0); //temporary
    numericClassInsts.deleteAttributeAt(numericClassInsts.numAttributes()-1);
    Attribute newClassAtt = new Attribute("newClassVal"); //numeric class
    numericClassInsts.insertAttributeAt(newClassAtt, numericClassInsts.numAttributes());
    numericClassInsts.setClassIndex(numericClassInsts.numAttributes()-1); //temporary

    //and building the regressors
    regressors = new LinearRegression[data.numClasses()];
    double[] trueClassVals = data.attributeToDoubleArray(data.classIndex());
    for (int c = 0; c < data.numClasses(); c++) {

        for (int i = 0; i < numericClassInsts.numInstances(); i++) {
            //if this inst is of the class we're currently handling (c), set new class val to 1 else 0
            double cval = trueClassVals[i] == c ? 1 : 0; 
            numericClassInsts.instance(i).setClassValue(cval);
        }    

        regressors[c] = new LinearRegression();
        regressors[c].buildClassifier(numericClassInsts);
    }
}
 
Example #2
Source File: StackingC.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
  * The constructor.
  */
 public StackingC() {
   m_MetaClassifier = new weka.classifiers.functions.LinearRegression();
   ((LinearRegression)(getMetaClassifier())).
     setAttributeSelectionMethod(new 
weka.core.SelectedTag(1, LinearRegression.TAGS_SELECTION));
 }
 
Example #3
Source File: StackingC.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Process options setting meta classifier.
 * 
 * @param options the meta options to parse
 * @throws Exception if parsing fails
 */
protected void processMetaOptions(String[] options) throws Exception {

  String classifierString = Utils.getOption('M', options);
  String [] classifierSpec = Utils.splitOptions(classifierString);
  if (classifierSpec.length != 0) {
    String classifierName = classifierSpec[0];
    classifierSpec[0] = "";
    setMetaClassifier(AbstractClassifier.forName(classifierName, classifierSpec));
  } else {
      ((LinearRegression)(getMetaClassifier())).
 setAttributeSelectionMethod(new 
   weka.core.SelectedTag(1,LinearRegression.TAGS_SELECTION));
  }
}
 
Example #4
Source File: RuleNode.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
  * Build a linear model for this node using those attributes
  * specified in indices.
  *
  * @param indices an array of attribute indices to include in the linear
  * model
  * @throws Exception if something goes wrong
  */
 private void buildLinearModel(int [] indices) throws Exception {
   // copy the training instances and remove all but the tested
   // attributes
   Instances reducedInst = new Instances(m_instances);
   Remove attributeFilter = new Remove();
   
   attributeFilter.setInvertSelection(true);
   attributeFilter.setAttributeIndicesArray(indices);
   attributeFilter.setInputFormat(reducedInst);

   reducedInst = Filter.useFilter(reducedInst, attributeFilter);
   
   // build a linear regression for the training data using the
   // tested attributes
   LinearRegression temp = new LinearRegression();
   temp.buildClassifier(reducedInst);

   double [] lmCoeffs = temp.coefficients();
   double [] coeffs = new double [m_instances.numAttributes()];

   for (int i = 0; i < lmCoeffs.length - 1; i++) {
     if (indices[i] != m_classIndex) {
coeffs[indices[i]] = lmCoeffs[i];
     }
   }
   m_nodeModel = new PreConstructedLinearModel(coeffs, lmCoeffs[lmCoeffs.length - 1]);
   m_nodeModel.buildClassifier(m_instances);
 }
 
Example #5
Source File: WekaLinearRegressionTest.java    From Java-Data-Science-Cookbook with MIT License 5 votes vote down vote up
public void buildRegression(){	
	lReg = new LinearRegression();
	try {
		lReg.buildClassifier(cpu);
	} catch (Exception e) {
	} 
	System.out.println(lReg);
}
 
Example #6
Source File: Maniac.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
/**
    * Change default classifier to CR with Linear Regression as base as this classifier
    * uses numeric values in the compressed labels.
    */
   protected Classifier getDefaultClassifier() {
CR cr = new CR();
LinearRegression lr = new LinearRegression();
cr.setClassifier(lr);
return cr;
   }
 
Example #7
Source File: PLST.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
/**
    * Change default classifier to CR with Linear Regression as base as this classifier
    * uses numeric values in the compressed labels.
    */
   protected Classifier getDefaultClassifier() {
CR cr = new CR();
LinearRegression lr = new LinearRegression();
cr.setClassifier(lr);
return cr;
   }
 
Example #8
Source File: COMT2.java    From bestconf with Apache License 2.0 4 votes vote down vote up
@Override
public Capabilities getCapabilities() {
	return new LinearRegression().getCapabilities();
}
 
Example #9
Source File: RegressionTask.java    From Machine-Learning-in-Java with MIT License 4 votes vote down vote up
public static void main(String[] args) throws Exception {

		/*
		 * Load data
		 */
		CSVLoader loader = new CSVLoader();
		loader.setFieldSeparator(",");
		loader.setSource(new File("data/ENB2012_data.csv"));
		Instances data = loader.getDataSet();

		// System.out.println(data);

		/*
		 * Build regression models
		 */
		// set class index to Y1 (heating load)
		data.setClassIndex(data.numAttributes() - 2);
		// remove last attribute Y2
		Remove remove = new Remove();
		remove.setOptions(new String[] { "-R", data.numAttributes() + "" });
		remove.setInputFormat(data);
		data = Filter.useFilter(data, remove);

		// build a regression model
		LinearRegression model = new LinearRegression();
		model.buildClassifier(data);
		System.out.println(model);

		// 10-fold cross-validation
		Evaluation eval = new Evaluation(data);
		eval.crossValidateModel(model, data, 10, new Random(1), new String[] {});
		System.out.println(eval.toSummaryString());
		double coef[] = model.coefficients();
		System.out.println();

		// build a regression tree model

		M5P md5 = new M5P();
		md5.setOptions(new String[] { "" });
		md5.buildClassifier(data);
		System.out.println(md5);

		// 10-fold cross-validation
		eval.crossValidateModel(md5, data, 10, new Random(1), new String[] {});
		System.out.println(eval.toSummaryString());
		System.out.println();
		
		
		
		
		/*
		 * Bonus: Build additional models 
		 */
		
		// ZeroR modelZero = new ZeroR();
		//
		//
		//
		//
		//
		// REPTree modelTree = new REPTree();
		// modelTree.buildClassifier(data);
		// System.out.println(modelTree);
		// eval = new Evaluation(data);
		// eval.crossValidateModel(modelTree, data, 10, new Random(1), new
		// String[]{});
		// System.out.println(eval.toSummaryString());
		//
		// SMOreg modelSVM = new SMOreg();
		//
		// MultilayerPerceptron modelPerc = new MultilayerPerceptron();
		//
		// GaussianProcesses modelGP = new GaussianProcesses();
		// modelGP.buildClassifier(data);
		// System.out.println(modelGP);
		// eval = new Evaluation(data);
		// eval.crossValidateModel(modelGP, data, 10, new Random(1), new
		// String[]{});
		// System.out.println(eval.toSummaryString());

		/*
		 * Bonus: Save ARFF
		 */
		// ArffSaver saver = new ArffSaver();
		// saver.setInstances(data);
		// saver.setFile(new File(args[1]));
		// saver.setDestination(new File(args[1]));
		// saver.writeBatch();

	}
 
Example #10
Source File: M5Base.java    From tsml with GNU General Public License v3.0 2 votes vote down vote up
/**
 * Returns default capabilities of the classifier, i.e., of LinearRegression.
 *
 * @return      the capabilities of this classifier
 */
public Capabilities getCapabilities() {
  return new LinearRegression().getCapabilities();
}