Java Code Examples for weka.core.converters.CSVLoader#setSource()

The following examples show how to use weka.core.converters.CSVLoader#setSource() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: UtilsDataset.java    From apogen with Apache License 2.0 6 votes vote down vote up
private void convertCSVtoArff(String filename) throws Exception {

		// load CSV
		CSVLoader loader = new CSVLoader();
		loader.setSource(new File(filename));

		// CSV uses no header
		String[] options = new String[1];
		options[0] = "-H";
		loader.setOptions(options);

		Instances data = loader.getDataSet();

		// save ARFF
		ArffSaver saver = new ArffSaver();
		saver.setInstances(data);

		filename = filename.replace(".csv", ".arff");

		// saver.setDestination(new File(filename));
		saver.setFile(new File(filename));
		saver.writeBatch();

	}
 
Example 2
Source File: Csv2arff.java    From Hands-On-Artificial-Intelligence-with-Java-for-Beginners with MIT License 5 votes vote down vote up
/**
 * @param args the command line arguments
 */
public static void main(String[] args) throws Exception {
    CSVLoader loader = new CSVLoader();
    loader.setSource(new File("/Users/admin/Documents/NetBeansProjects/Arff2CSV/weather.csv"));
    Instances data = loader.getDataSet();

    ArffSaver saver = new ArffSaver();
    saver.setInstances(data);

    saver.setFile(new File("weather.arff"));
    saver.writeBatch();
}
 
Example 3
Source File: DataIOFile.java    From bestconf with Apache License 2.0 5 votes vote down vote up
/**
 * Return the data set loaded from the CSV file at @param path
 */
public static Instances loadDataFromCsvFile(String path) throws IOException{
    CSVLoader loader = new CSVLoader();
    loader.setSource(new File(path));
    Instances data = loader.getDataSet();
    
    System.out.println("\nHeader of dataset:\n");
    System.out.println(new Instances(data, 0));
    return data;
}
 
Example 4
Source File: DatasetLoader.java    From wekaDeeplearning4j with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Load the mnist minimal meta arff file
 *
 * @return Mnist minimal meta data as Instances
 * @throws Exception IO error.
 */
public static Instances loadCSV(String path) throws Exception {
  CSVLoader csv = new CSVLoader();
  csv.setSource(new File(path));
  Instances data = csv.getDataSet();
  data.setClassIndex(data.numAttributes() - 1);
  return data;
}
 
Example 5
Source File: DatasetLoader.java    From wekaDeeplearning4j with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Load the mnist minimal meta arff file
 *
 * @return Mnist minimal meta data as Instances
 * @throws Exception IO error.
 */
public static Instances loadCSV(String path) throws Exception {
  CSVLoader csv = new CSVLoader();
  csv.setSource(new File(path));
  Instances data = csv.getDataSet();
  data.setClassIndex(data.numAttributes() - 1);
  return data;
}
 
Example 6
Source File: RegressionTask.java    From Machine-Learning-in-Java with MIT License 4 votes vote down vote up
public static void main(String[] args) throws Exception {

		/*
		 * Load data
		 */
		CSVLoader loader = new CSVLoader();
		loader.setFieldSeparator(",");
		loader.setSource(new File("data/ENB2012_data.csv"));
		Instances data = loader.getDataSet();

		// System.out.println(data);

		/*
		 * Build regression models
		 */
		// set class index to Y1 (heating load)
		data.setClassIndex(data.numAttributes() - 2);
		// remove last attribute Y2
		Remove remove = new Remove();
		remove.setOptions(new String[] { "-R", data.numAttributes() + "" });
		remove.setInputFormat(data);
		data = Filter.useFilter(data, remove);

		// build a regression model
		LinearRegression model = new LinearRegression();
		model.buildClassifier(data);
		System.out.println(model);

		// 10-fold cross-validation
		Evaluation eval = new Evaluation(data);
		eval.crossValidateModel(model, data, 10, new Random(1), new String[] {});
		System.out.println(eval.toSummaryString());
		double coef[] = model.coefficients();
		System.out.println();

		// build a regression tree model

		M5P md5 = new M5P();
		md5.setOptions(new String[] { "" });
		md5.buildClassifier(data);
		System.out.println(md5);

		// 10-fold cross-validation
		eval.crossValidateModel(md5, data, 10, new Random(1), new String[] {});
		System.out.println(eval.toSummaryString());
		System.out.println();
		
		
		
		
		/*
		 * Bonus: Build additional models 
		 */
		
		// ZeroR modelZero = new ZeroR();
		//
		//
		//
		//
		//
		// REPTree modelTree = new REPTree();
		// modelTree.buildClassifier(data);
		// System.out.println(modelTree);
		// eval = new Evaluation(data);
		// eval.crossValidateModel(modelTree, data, 10, new Random(1), new
		// String[]{});
		// System.out.println(eval.toSummaryString());
		//
		// SMOreg modelSVM = new SMOreg();
		//
		// MultilayerPerceptron modelPerc = new MultilayerPerceptron();
		//
		// GaussianProcesses modelGP = new GaussianProcesses();
		// modelGP.buildClassifier(data);
		// System.out.println(modelGP);
		// eval = new Evaluation(data);
		// eval.crossValidateModel(modelGP, data, 10, new Random(1), new
		// String[]{});
		// System.out.println(eval.toSummaryString());

		/*
		 * Bonus: Save ARFF
		 */
		// ArffSaver saver = new ArffSaver();
		// saver.setInstances(data);
		// saver.setFile(new File(args[1]));
		// saver.setDestination(new File(args[1]));
		// saver.writeBatch();

	}