weka.core.Randomizable Java Examples

The following examples show how to use weka.core.Randomizable. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ClassifierResults.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
public void setNonResourceDetails(final Classifier classifier, final Instances data) {
    setDatasetName(data.relationName());
    if(classifier instanceof EnhancedAbstractClassifier) {
        setClassifierName(((EnhancedAbstractClassifier) classifier).getClassifierName());
        setFoldID(((EnhancedAbstractClassifier) classifier).getSeed());
    } else {
        setClassifierName(classifier.getClass().getSimpleName());
    }
    if(classifier instanceof Randomizable) {
        setFoldID(((Randomizable) classifier).getSeed());
    }
    if(classifier instanceof OptionHandler) {
        setParas(StrUtils.join(",", ((OptionHandler) classifier).getOptions()));
    }
    setOs(SysUtils.getOsName());
    setCpuInfo(SysUtils.findCpuInfo());
}
 
Example #2
Source File: BaggingMLdup.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
@Override
public void buildClassifier(Instances train) throws Exception {
  	testCapabilities(train);
  	
	if (getDebug()) System.out.print("-: Models: ");

	//m_Classifiers = (MultilabelClassifier[]) AbstractClassifier.makeCopies(m_Classifier, m_NumIterations);
	m_Classifiers = ProblemTransformationMethod.makeCopies((ProblemTransformationMethod) m_Classifier, m_NumIterations);

	for(int i = 0; i < m_NumIterations; i++) {
		Random r = new Random(m_Seed+i);
		Instances bag = new Instances(train,0);
		if (m_Classifiers[i] instanceof Randomizable) ((Randomizable)m_Classifiers[i]).setSeed(m_Seed+i);
		if(getDebug()) System.out.print(""+i+" ");

		int bag_no = (m_BagSizePercent*train.numInstances()/100);
		//System.out.println(" bag no: "+bag_no);
		while(bag.numInstances() < bag_no) {
			bag.add(train.instance(r.nextInt(train.numInstances())));
		}
		m_Classifiers[i].buildClassifier(bag);
	}
	if (getDebug()) System.out.println(":-");
}
 
Example #3
Source File: FilteredClassifier.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Build the classifier on the filtered data.
 *
 * @param data the training data
 * @throws Exception if the classifier could not be built successfully
 */
public void buildClassifier(Instances data) throws Exception {

  if (m_Classifier == null) {
    throw new Exception("No base classifier has been set!");
  }

  getCapabilities().testWithFail(data);

  Random r = (data.numInstances() > 0) ? data.getRandomNumberGenerator(getSeed()) : new Random(getSeed());
  data = setUp(data, r);
  if (!data.allInstanceWeightsIdentical() && !(m_Classifier instanceof WeightedInstancesHandler)) {
    data = data.resampleWithWeights(r); // The filter may have assigned weights.
  }
  if (!data.allAttributeWeightsIdentical() && !(m_Classifier instanceof WeightedAttributesHandler)) {
    data = resampleAttributes(data, false, r);
  }

  if (m_Classifier instanceof Randomizable) {
    ((Randomizable)m_Classifier).setSeed(r.nextInt());
  }

  m_Classifier.buildClassifier(data);
}
 
Example #4
Source File: RepeatedRuns.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Executes the runs in sequential order.
 *
 * @param classifier    the classifier to evaluate
 * @param dataset       the dataset to evaluate on
 * @return              the statistics
 */
protected List<EvaluationStatistics> evaluateSequential(MultiLabelClassifier classifier, Instances dataset) {
	List<EvaluationStatistics>  result;
	List<EvaluationStatistics>  stats;
	int                         i;

	result = new ArrayList<>();

	for (i = m_LowerRuns; i <= m_UpperRuns; i++) {
		log("Run: " + i);
		if (m_Evaluator instanceof Randomizable)
			((Randomizable) m_Evaluator).setSeed(i);
		m_Evaluator.initialize();
		stats = m_Evaluator.evaluate(classifier, dataset);
		if (stats != null) {
			for (EvaluationStatistics stat: stats) {
				stat.put(KEY_RUN, i);
				result.add(stat);
			}
		}
		if (m_Stopped)
			break;
	}

	return result;
}
 
Example #5
Source File: RandomCommittee.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
   * Builds the committee of randomizable classifiers.
   *
   * @param data the training data to be used for generating the
   * bagged classifier.
   * @exception Exception if the classifier could not be built successfully
   */
  public void buildClassifier(Instances data) throws Exception {

    // can classifier handle the data?
    getCapabilities().testWithFail(data);

    // remove instances with missing class
    m_data = new Instances(data);
    m_data.deleteWithMissingClass();
    super.buildClassifier(m_data);
    
    if (!(m_Classifier instanceof Randomizable)) {
      throw new IllegalArgumentException("Base learner must implement Randomizable!");
    }

    m_Classifiers = AbstractClassifier.makeCopies(m_Classifier, m_NumIterations);

    Random random = m_data.getRandomNumberGenerator(m_Seed);

    // Resample data based on weights if base learner can't handle weights
    if (!(m_Classifier instanceof WeightedInstancesHandler)) {
      m_data = m_data.resampleWithWeights(random);
    }

    for (int j = 0; j < m_Classifiers.length; j++) {

      // Set the random number seed for the current classifier.
      ((Randomizable) m_Classifiers[j]).setSeed(random.nextInt());
      
      // Build the classifier.
//      m_Classifiers[j].buildClassifier(m_data);
    }
    
    buildClassifiers();
    
    // save memory
    m_data = null;
  }
 
Example #6
Source File: ClassifierLists.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
 *
 * setClassifier, which takes the experimental
 * arguments themselves and therefore the classifiers can take from them whatever they
 * need, e.g the dataset name, the fold id, separate checkpoint paths, etc.
 *
 * To take this idea further, to be honest each of the TSC-specific classifiers
 * could/should have a constructor and/or factory that builds the classifier
 * from the experimental args.
 *
 * previous usage was setClassifier(String classifier name, int fold).
 * this can be reproduced with setClassifierClassic below.
 *
 */
public static Classifier setClassifier(Experiments.ExperimentalArguments exp){
    String classifier=exp.classifierName;
    Classifier c = null;
    if(distanceBased.contains(classifier))
        c=setDistanceBased(exp);
    else if(dictionaryBased.contains(classifier))
        c=setDictionaryBased(exp);
    else if(intervalBased.contains(classifier))
        c=setIntervalBased(exp);
    else if(frequencyBased.contains(classifier))
        c=setFrequencyBased(exp);
    else if(shapeletBased.contains(classifier))
        c=setShapeletBased(exp);
    else if(hybridBased.contains(classifier))
        c=setHybridBased(exp);
    else if(multivariateBased.contains(classifier))
        c=setMultivariate(exp);
    else if(standardClassifiers.contains(classifier))
        c=setStandardClassifiers(exp);
    else if(bespokeClassifiers.contains(classifier))
        c=setBespokeClassifiers(exp);
    else{
        System.out.println("Unknown classifier "+classifier+" it is not in any of the sublists ");
        throw new UnsupportedOperationException("Unknown classifier "+classifier+" it is not in any of the sublists on ClassifierLists ");
    }
    if(c instanceof Randomizable)
        ((Randomizable)c).setSeed(exp.foldId);
    return c;
}
 
Example #7
Source File: FlatCote.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
@Override
    public void buildClassifier(Instances train) throws Exception{

        long t1=System.nanoTime();
        this.train = train;
        
        ee = new ElasticEnsemble();
        ShapeletTransformClassifier stc = new ShapeletTransformClassifier();
        stc.setHourLimit(24);
        stc.setClassifier(new CAWPE());
//Redo for STC
        //ShapeletTransform shapeletTransform = ShapeletTransformFactory.createTransform(train);
        ShapeletFilter shapeletFilter = ShapeletTransformTimingUtilities.createTransformWithTimeLimit(train, 24); // now defaults to max of 24 hours
        shapeletFilter.supressOutput();
        st = new CAWPE();
        st.setTransform(shapeletFilter);
        st.setupOriginalHESCASettings();
        acf = new CAWPE();
        acf.setupOriginalHESCASettings();
        acf.setTransform(new ACF());
        ps = new CAWPE();
        ps.setupOriginalHESCASettings();
        ps.setTransform(new PowerSpectrum());

        if(seedClassifier){
            if(ee instanceof Randomizable)
                ((Randomizable)ee).setSeed(seed);
            if(st instanceof Randomizable)
                ((Randomizable)st).setSeed(seed);
            if(acf instanceof Randomizable)
                ((Randomizable)st).setSeed(seed);
            if(acf instanceof Randomizable)
                ((Randomizable)st).setSeed(seed);
            
        }
//        st.setDebugPrinting(true);
        ee.buildClassifier(train);
        acf.buildClassifier(train);
        ps.buildClassifier(train);
        st.buildClassifier(train);
        
        cvAccs = new double[4][];
        cvAccs[0] = ee.getCVAccs();
        cvAccs[1] = st.getIndividualAccEstimates();
        cvAccs[2] = acf.getIndividualAccEstimates();
        cvAccs[3] = ps.getIndividualAccEstimates();
        
        cvSum = 0;
        for(int e = 0; e < cvAccs.length;e++){
            for(int c = 0; c < cvAccs[e].length; c++){
                cvSum+=cvAccs[e][c];
            }
        }
        long t2=System.nanoTime();
        trainResults.setBuildTime(t2-t1);
        for(int i=0;i<cvAccs.length;i++)
            numClassifiers+=cvAccs[i].length;
    }
 
Example #8
Source File: RandomSubSpace.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
/**
  * builds the classifier.
  *
  * @param data 	the training data to be used for generating the
  * 			classifier.
  * @throws Exception 	if the classifier could not be built successfully
  */
 public void buildClassifier(Instances data) throws Exception {

   // can classifier handle the data?
   getCapabilities().testWithFail(data);

   // remove instances with missing class
   m_data = new Instances(data);
   m_data.deleteWithMissingClass();
   
   // only class? -> build ZeroR model
   if (m_data.numAttributes() == 1) {
     System.err.println(
  "Cannot build model (only class attribute present in data!), "
  + "using ZeroR model instead!");
     m_ZeroR = new weka.classifiers.rules.ZeroR();
     m_ZeroR.buildClassifier(m_data);
     return;
   }
   else {
     m_ZeroR = null;
   }
   
   super.buildClassifier(data);

   Integer[] indices = new Integer[data.numAttributes()-1];
   int classIndex = data.classIndex();
   int offset = 0;
   for(int i = 0; i < indices.length+1; i++) {
     if (i != classIndex) {
indices[offset++] = i+1;
     }
   }
   int subSpaceSize = numberOfAttributes(indices.length, getSubSpaceSize());
   Random random = data.getRandomNumberGenerator(m_Seed);
   
   for (int j = 0; j < m_Classifiers.length; j++) {
     if (m_Classifier instanceof Randomizable) {
((Randomizable) m_Classifiers[j]).setSeed(random.nextInt());
     }
     FilteredClassifier fc = new FilteredClassifier();
     fc.setClassifier(m_Classifiers[j]);
     m_Classifiers[j] = fc;
     Remove rm = new Remove();
     rm.setOptions(new String[]{"-V", "-R", randomSubSpace(indices,subSpaceSize,classIndex+1,random)});
     fc.setFilter(rm);

     // build the classifier
     //m_Classifiers[j].buildClassifier(m_data);
   }
   
   buildClassifiers();
   
   // save memory
   m_data = null;
 }