weka.classifiers.functions.supportVector.RBFKernel Java Examples

The following examples show how to use weka.classifiers.functions.supportVector.RBFKernel. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TunedSVM.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
public void setKernelType(KernelType type) {
    kernel = type;
    switch (type) {
        case LINEAR:                     
            PolyKernel p=new PolynomialKernel();
            p.setExponent(1);
            setKernel(p);
        break;
        case QUADRATIC:
            PolyKernel p2=new PolynomialKernel();
            p2.setExponent(2);
            setKernel(p2);
        break;
        case POLYNOMIAL:
            PolyKernel p3=new PolynomialKernel();
            p3.setExponent(1);
            setKernel(p3);
        break;
        case RBF:
            RBFKernel kernel2 = new RBFKernel();
            setKernel(kernel2);
        break;
    }
}
 
Example #2
Source File: TunedSVM.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
public static void cheatOnMNIST(){
    Instances train=DatasetLoading.loadDataNullable("\\\\cmptscsvr.cmp.uea.ac.uk\\ueatsc\\Data\\LargeProblems\\MNIST\\MNIST_TRAIN");
    Instances test=DatasetLoading.loadDataNullable("\\\\cmptscsvr.cmp.uea.ac.uk\\ueatsc\\Data\\LargeProblems\\MNIST\\MNIST_TEST");
    SMO svm=new SMO();
    RBFKernel k=new RBFKernel();
    svm.setKernel(k);
    System.out.println("Data loaded ......");
    double a =ClassifierTools.singleTrainTestSplitAccuracy(svm, train, test);
    System.out.println("Default acc = "+a);
    int min=1;//These search values are used for all kernels with C. It is also used for Gamma in RBF, but not for the Polynomial exponent search
    int max=6;
    for(double c=min;c<=max;c++)
        for(double r=min;r<=max;r++){
                 svm.setC(Math.pow(2, c));
                 k.setGamma(Math.pow(2, r));
                 svm.setKernel(k);//Just in case ...
                a =ClassifierTools.singleTrainTestSplitAccuracy(svm, train, test);
                System.out.println("logC ="+c+" logGamma = "+r+" acc = "+a);
            }
    
}
 
Example #3
Source File: EvaluationUtils.java    From AILibs with GNU Affero General Public License v3.0 6 votes vote down vote up
private static List<Map.Entry<Kernel, Instances>> getKernelsWithInstances(final Instances insts) throws KernelInitializationException {
	try {
		ArrayList<Map.Entry<Kernel, Instances>> result = new ArrayList<>();
		Instances rbfInsts = new Instances(insts);
		result.add(new AbstractMap.SimpleEntry<>(new RBFKernel(rbfInsts, 250007, 0.01), rbfInsts));

		Instances poly2Insts = new Instances(insts);
		result.add(new AbstractMap.SimpleEntry<>(new PolyKernel(poly2Insts, 250007, 2, false),
				poly2Insts));

		Instances poly3Insts = new Instances(insts);
		result.add(new AbstractMap.SimpleEntry<>(new PolyKernel(poly3Insts, 250007, 2, false),
				poly3Insts));

		return result;
	} catch (Exception e) {
		throw new KernelInitializationException("Could not instantiate a kernel due to an exception.", e);
	}
}
 
Example #4
Source File: BayesianOptimisedSearch.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
@Override
public ArrayList<Shapelet> searchForShapeletsInSeries(Instance timeSeries, ShapeletSearch.ProcessCandidate checkCandidate) {

    evaluatedShapelets = new ArrayList<>();

    //do the random presamples.
    for (int i = 0; i < pre_samples; i++) {
        ShapeletSearch.CandidateSearchData pair = GetRandomShapelet();
        evaluatePair(timeSeries, checkCandidate, pair);
    }

    current_gp = new GaussianProcesses();
    current_gp.setKernel(new RBFKernel()); //use RBF Kernel.

    for (int i = 0; i < num_iterations; i++) {

        try {
            Instances to_train = ConvertShapeletsToInstances(evaluatedShapelets);

            current_gp.buildClassifier(to_train);

            evaluatePair(timeSeries, checkCandidate, GetRandomShapeletFromGP(current_gp));
        } catch (Exception ex) {
            Logger.getLogger(BayesianOptimisedSearch.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    return evaluatedShapelets;
}
 
Example #5
Source File: TunedSVM.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
public TunedSVM(){
    super();
    kernelOptimise=false;
    kernel=KernelType.RBF;
    tuneParameters=true;
    setKernel(new RBFKernel());
    rng=new Random();
    accuracy=new ArrayList<>();
    setBuildLogisticModels(true);
}
 
Example #6
Source File: TunedSVM.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
@Override
    public void setParametersFromIndex(int x) {
        kernelOptimise=false;   //Choose between linear, quadratic and RBF kernel
        tuneParameters=false;
        int numCParas=maxC-minC+1;
        
        
        if(kernel==KernelType.LINEAR || kernel==KernelType.QUADRATIC){//Single parameter for C between 1 and 33
            if(x<1 || x>numCParas)//Error, invalid range
                throw new UnsupportedOperationException("ERROR parameter index "+x+" out of range "+minC+" to "+ "max"); //To change body of generated methods, choose Tools | Templates.
            paras=new double[1];
            paras[0]=Math.pow(2,minC+(x-1));
            setC(paras[0]);
        }
        else if(kernel==KernelType.RBF){//Two parameters, same range for both
            if(x<1 || x>numCParas*numCParas)//Error, invalid range
                throw new UnsupportedOperationException("ERROR parameter index "+x+" out of range "+minC+" to "+ "max"); //To change body of generated methods, choose Tools | Templates.
            paras=new double[2];
            int temp=minC+(x-1)/numCParas;
            paras[0]=Math.pow(2,temp);
            temp=minC+(x-1)%numCParas;
            paras[1]=Math.pow(2,temp);
            setC(paras[0]);
            ((RBFKernel)m_kernel).setGamma(paras[1]);
            System.out.println("");
        }
        else if(kernel==KernelType.POLYNOMIAL){
//Three paras, not evenly distributed. C [1  to 33] exponent =[1 to 6], b=[0 to 5] 
            paras=new double[3];
            int numExpParas=maxExponent-minExponent+1;
            int numBParas=maxB-minB+1;
            if(x<1 || x>numCParas*numExpParas*numBParas)//Error, invalid range
                throw new UnsupportedOperationException("ERROR parameter index "+x+" out of range for PolyNomialKernel"); //To change body of generated methods, choose Tools | Templates.
            int cPara=minC+(x-1)%numCParas;
            int expPara=minExponent+(x-1)/(numBParas*numCParas);
            int bPara=minB+((x-1)/numCParas)%numBParas;
            paras[0]=Math.pow(2,cPara);
            paras[1]=expPara;
            paras[2]=bPara;
              PolynomialKernel kern = new PolynomialKernel();
            kern.setExponent(paras[1]);
            kern.setB(paras[2]);
            setKernel(kern);
            setC(paras[0]);
            System.out.println("Index "+x+" maps to "+cPara+","+expPara+","+bPara);
         }
    }
 
Example #7
Source File: TunedSVM.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
private void setRBFParasFromPartiallyCompleteSearch() throws Exception{
         paras=new double[2];
        combinedBuildTime=0;
        ArrayList<TunedSVM.ResultsHolder> ties=new ArrayList<>();
    //            If so, read them all from file, pick the best
        int count=0;
        int present=0;
        double minErr=1;
        for(double p1:paraSpace1){//C
            for(double p2:paraSpace2){//GAMMA
                ClassifierResults tempResults = new ClassifierResults();
                count++;
                if(new File(resultsPath+count+".csv").exists()){
                    present++;
                    tempResults.loadResultsFromFile(resultsPath+count+".csv");
                    combinedBuildTime+=tempResults.getBuildTime();
                    double e=1-tempResults.getAcc();
                    if(e<minErr){
                        minErr=e;
                        ties=new ArrayList<>();//Remove previous ties
                        ties.add(new TunedSVM.ResultsHolder(p1,p2,tempResults));
                    }
                    else if(e==minErr){//Sort out ties
                            ties.add(new TunedSVM.ResultsHolder(p1,p2,tempResults));
                    }
                }
            }
        }
//Set the parameters
        if(present>0){
            System.out.println("Number of paras = "+present);
            System.out.println("Number of best = "+ties.size());
            TunedSVM.ResultsHolder best=ties.get(rng.nextInt(ties.size()));
            double bestC;
            double bestSigma;
            bestC=best.x;
            bestSigma=best.y;
            paras[0]=bestC;
            paras[1]=bestSigma;
            setC(bestC);
            ((RBFKernel)m_kernel).setGamma(bestSigma);
            res=best.res;
        }        
        else
            throw new Exception("Error, no parameter files for "+resultsPath);
    }
 
Example #8
Source File: LDAEvaluationTest.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
@Test
public void evaluateKernelLDA() throws Exception {
    logger.info("Starting LDA evaluation test...");

    /* load dataset and create a train-test-split */
    OpenmlConnector connector = new OpenmlConnector();
    DataSetDescription ds = connector.dataGet(DataSetUtils.SEGMENT_ID);
    File file = ds.getDataset(DataSetUtils.API_KEY);
    Instances data = new Instances(new BufferedReader(new FileReader(file)));
    data.setClassIndex(data.numAttributes() - 1);
    List<Instances> dataSplit = WekaUtil.getStratifiedSplit(data, 42, .05f);

    Instances insts = dataSplit.get(0);
    List<Instances> split = WekaUtil.getStratifiedSplit(insts, 42, .7f);
    Instances newInsts = split.get(0);
    Instances evalInsts = split.get(1);

    long timeStart = System.currentTimeMillis();

    Nystroem kernelFilter = new Nystroem();
    kernelFilter.setInputFormat(newInsts);
    kernelFilter.setKernel(new RBFKernel(newInsts, 250007, 0.01));
    newInsts = Filter.useFilter(newInsts, kernelFilter);

    LDA lda = new LDA();

    lda.buildClassifier(newInsts);

    long timeStartEval = System.currentTimeMillis();

    Evaluation eval = new Evaluation(newInsts);
    eval.evaluateModel(lda, evalInsts);
    logger.debug("LDA pct correct: " + eval.pctCorrect());
    Assert.assertTrue(eval.pctCorrect() > 0);

    long timeTaken = System.currentTimeMillis() - timeStart;
    long timeTakenEval = System.currentTimeMillis() - timeStartEval;

    logger.debug("LDA took " + (timeTaken / 1000) + " s.");
    logger.debug("LDA eval took " + (timeTakenEval / 1000) + " s.");
}