Java Code Examples for weka.core.Instance#classIndex()

The following examples show how to use weka.core.Instance#classIndex() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CRUpdateable.java    From meka with GNU General Public License v3.0 6 votes vote down vote up
@Override
public void updateClassifier(Instance x) throws Exception {

	int L = x.classIndex();

	if(getDebug()) System.out.print("-: Updating "+L+" models");

	for(int j = 0; j < L; j++) {
		Instance x_j = (Instance)x.copy();
		x_j.setDataset(null);
		x_j = MLUtils.keepAttributesAt(x_j,new int[]{j},L);
		x_j.setDataset(m_Templates[j]);
		((UpdateableClassifier)m_MultiClassifiers[j]).updateClassifier(x_j);
	}

	if(getDebug()) System.out.println(":- ");
}
 
Example 2
Source File: WekaUtil.java    From AILibs with GNU Affero General Public License v3.0 6 votes vote down vote up
public static Instance getRefactoredInstance(final Instance instance, final List<String> classes) {

		/* modify instance */
		Instances dataset = WekaUtil.getEmptySetOfInstancesWithRefactoredClass(instance.dataset(), classes);
		int numAttributes = instance.numAttributes();
		int classIndex = instance.classIndex();
		Instance iNew = new DenseInstance(numAttributes);
		for (int i = 0; i < numAttributes; i++) {
			Attribute a = instance.attribute(i);
			if (i != classIndex) {
				iNew.setValue(a, instance.value(a));
			} else {
				iNew.setValue(a, 0.0); // the value does not matter since this should only be used for TESTING
			}
		}
		dataset.add(iNew);
		iNew.setDataset(dataset);
		return iNew;
	}
 
Example 3
Source File: NaiveBayesMultinomialUpdateable.java    From tsml with GNU General Public License v3.0 6 votes vote down vote up
/**
  * Updates the classifier with the given instance.
  *
  * @param instance 	the new training instance to include in the model
  * @throws Exception 	if the instance could not be incorporated in
  * 			the model.
  */
 public void updateClassifier(Instance instance) throws Exception {
   int classIndex = (int) instance.value(instance.classIndex());
   m_probOfClass[classIndex] += instance.weight();

   for (int a = 0; a < instance.numValues(); a++) {
     if (instance.index(a) == instance.classIndex() ||
  instance.isMissing(a))
continue;

     double numOccurences = instance.valueSparse(a) * instance.weight();
     /*if (numOccurences < 0)
throw new Exception(
    "Numeric attribute values must all be greater or equal to zero."); */
     m_wordsPerClass[classIndex] += numOccurences;
     if (m_wordsPerClass[classIndex] < 0) {
       throw new Exception("Can't have a negative number of words for class " 
           + (classIndex + 1));
     }
     m_probOfWordGivenClass[classIndex][instance.index(a)] += numOccurences;
     if (m_probOfWordGivenClass[classIndex][instance.index(a)] < 0) {
       throw new Exception("Can't have a negative conditional sum for attribute " 
          + instance.index(a));
     }
   }
 }
 
Example 4
Source File: TWEDistance.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
 * distance method that converts instances to arrays of doubles
 *
 * @param first instance 1
 * @param second instance 2
 * @param cutOffValue used for early abandon
 * @return distance between instances
 */
@Override
public double distance(Instance first, Instance second, 
        double cutOffValue)
{
    //remove class index from first instance if there is one
    int firtClassIndex = first.classIndex();
    double[] arr1;
    if(firtClassIndex > 0){
        arr1 = new double[first.numAttributes()-1];
        for(int i = 0,j = 0; i < first.numAttributes(); i++){
            if(i != firtClassIndex){
                arr1[j]= first.value(i);
                j++;
            }
        }
    }else{
        arr1 = first.toDoubleArray();
    }

    //remove class index from second instance if there is one
    int secondClassIndex = second.classIndex();
    double[] arr2;
    if(secondClassIndex > 0){
        arr2 = new double[second.numAttributes()-1];
        for(int i = 0,j = 0; i < second.numAttributes(); i++){
            if(i != secondClassIndex){
                arr2[j]= second.value(i);
                j++;
            }
        }
    }else{
        arr2 = second.toDoubleArray();
    }

    return distance(arr1,arr2,cutOffValue);
}
 
Example 5
Source File: CCp.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
@Override
public double[] distributionForInstance(Instance x) throws Exception {
	int L = x.classIndex();
	confidences = new double[L];
	root.classify(x);
	double y[] = new double[L*2];
	for(int j = 0; j < L; j++) {
		y[j] = x.value(j);
		y[j+L] = confidences[j]; // <--- this is the extra line
	}
	return y;
}
 
Example 6
Source File: NSR.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
@Override
public double[] distributionForInstance(Instance x) throws Exception {

	int L = x.classIndex();

	//if there is only one class (as for e.g. in some hier. mtds) predict it
	//if(L == 1) return new double[]{1.0};

	Instance x_sl = PSUtils.convertInstance(x,L,m_InstancesTemplate);							// the sl instance
	//x_sl.setDataset(m_InstancesTemplate);							// where y in {comb_1,comb_2,...,comb_k}

	double w[] = m_Classifier.distributionForInstance(x_sl);		// w[j] = p(y_j) for each j = 1,...,L
	int max_j  = Utils.maxIndex(w);									// j of max w[j]
	//int max_j = (int)m_Classifier.classifyInstance(x_sl);			// where comb_i is selected
	String y_max = m_InstancesTemplate.classAttribute().value(max_j);									// comb_i e.g. "0+3+0+0+1+2+0+0"

	double y[] = Arrays.copyOf(A.toDoubleArray(MLUtils.decodeValue(y_max)),L*2);					// "0+3+0+0+1+2+0+0" -> [0.0,3.0,0.0,...,0.0]

	HashMap<Double,Double> votes[] = new HashMap[L];
	for(int j = 0; j < L; j++) {
		votes[j] = new HashMap<Double,Double>();
	}

	for(int i = 0; i < w.length; i++) {
		double y_i[] = A.toDoubleArray(MLUtils.decodeValue(m_InstancesTemplate.classAttribute().value(i)));
		for(int j = 0; j < y_i.length; j++) {
			votes[j].put(y_i[j] , votes[j].containsKey(y_i[j]) ? votes[j].get(y_i[j]) + w[i] : w[i]);
		}
	}

	// some confidence information
	for(int j = 0; j < L; j++) {
		y[j+L] = votes[j].size() > 0 ? Collections.max(votes[j].values()) : 0.0;
	}

	return y;
}
 
Example 7
Source File: MLUtils.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
public static final String toDebugString(Instance x) {
	int L = x.classIndex();
	StringBuilder sb = new StringBuilder();  
	sb.append("y = [");
	for(int j = 0; j < L; j++) {
		sb.append(x.value(j)+" ");
	}
	sb.append("], x = [");
	for(int j = L; j < L+10; j++) {
		sb.append(x.value(j)+" ");
	}
	sb.append(" ... ]");
	return sb.toString();
}
 
Example 8
Source File: SpatialBOSS.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Assumes class index, if present, is last
 * @return data of passed instance in a double array with the class value removed if present
 */
protected static double[] toArrayNoClass(Instance inst) {
    int length = inst.numAttributes();
    if (inst.classIndex() >= 0)
        --length;

    double[] data = new double[length];

    for (int i=0, j=0; i < inst.numAttributes(); ++i)
        if (inst.classIndex() != i)
            data[j++] = inst.value(i);

    return data;
}
 
Example 9
Source File: ACF.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
    PRE: An instance of ACF data. Performs a test of significance on the 
    * ACF terms until it finds the first insignificant one.
    * Will not work if the class variable is not 
    the last. 
    * @param inst
 * @return 
 */    
    private int findSingleCutOff(Instance inst){
/** Finds the threshold of the first non significant ACF term for all the series.
*/            
        double[] r=inst.toDoubleArray();
        int count=0;
        if(useGlobalSigThreshold){
            for(int i=0;i<inst.numAttributes();i++){
                if(i!=inst.classIndex()){
                    sigThreshold[count]=globalSigThreshold;
                    count++;
                }
            }
        }
        else{   ///DO NOT USE, I'm not sure of the logic of this, need to look up the paper
            sigThreshold[0]=r[0]*r[0];
            count=1;
            for(int i=1;i<inst.numAttributes();i++){
                if(i!=inst.classIndex()){
                sigThreshold[count]=sigThreshold[count-1]+r[i]*r[i]; 
                count++;
                }
            }
            for(int i=0;i<sigThreshold.length;i++){
                sigThreshold[i]=(1+sigThreshold[i])/seriesLength;
                sigThreshold[i]=2/Math.sqrt(sigThreshold[i]);
            }
        }
        for(int i=0;i<sigThreshold.length;i++)
            if(Math.abs(r[i])<sigThreshold[i])
                return i;
        return sigThreshold.length-1;
    }
 
Example 10
Source File: ComplementNaiveBayes.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
    * Classifies a given instance. <p>
    *
    * The classification rule is: <br>
    *     MinC(forAllWords(ti*Wci)) <br>
    *      where <br>
    *         ti is the frequency of word i in the given instance <br>
    *         Wci is the weight of word i in Class c. <p>
    *
    * For more information see section 4.4 of the paper mentioned above
    * in the classifiers description.
    *
    * @param instance the instance to classify
    * @return the index of the class the instance is most likely to belong.
    * @throws Exception if the classifier has not been built yet.
    */
   public double classifyInstance(Instance instance) throws Exception {

       if(wordWeights==null)
           throw new Exception("Error. The classifier has not been built "+
                               "properly.");
       
       double [] valueForClass = new double[numClasses];
double sumOfClassValues=0;

for(int c=0; c<numClasses; c++) {
    double sumOfWordValues=0;
    for(int w=0; w<instance.numValues(); w++) {
               if(instance.index(w)!=instance.classIndex()) {
                   double freqOfWordInDoc = instance.valueSparse(w);
                   sumOfWordValues += freqOfWordInDoc * 
                                 wordWeights[c][instance.index(w)];
               }
    }
    //valueForClass[c] = Math.log(probOfClass[c]) - sumOfWordValues;
    valueForClass[c] = sumOfWordValues;
    sumOfClassValues += valueForClass[c];
}

       int minidx=0;
for(int i=0; i<numClasses; i++)
    if(valueForClass[i]<valueForClass[minidx])
	minidx = i;

return minidx;
   }
 
Example 11
Source File: ClusterMembership.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
  * Convert a single instance over. The converted instance is added to 
  * the end of the output queue.
  *
  * @param instance the instance to convert
  * @throws Exception if something goes wrong
  */
 protected void convertInstance(Instance instance) throws Exception {
   
   // set up values
   double [] instanceVals = new double[outputFormatPeek().numAttributes()];
   double [] tempvals;
   if (instance.classIndex() >= 0) {
     tempvals = new double[outputFormatPeek().numAttributes() - 1];
   } else {
     tempvals = new double[outputFormatPeek().numAttributes()];
   }
   int pos = 0;
   for (int j = 0; j < m_clusterers.length; j++) {
     if (m_clusterers[j] != null) {
double [] probs;
if (m_removeAttributes != null) {
  m_removeAttributes.input(instance);
  probs = logs2densities(j, m_removeAttributes.output());
} else {
  probs = logs2densities(j, instance);
}
System.arraycopy(probs, 0, tempvals, pos, probs.length);
pos += probs.length;
     }
   }
   tempvals = Utils.logs2probs(tempvals);
   System.arraycopy(tempvals, 0, instanceVals, 0, tempvals.length);
   if (instance.classIndex() >= 0) {
     instanceVals[instanceVals.length - 1] = instance.classValue();
   }
   
   push(new DenseInstance(instance.weight(), instanceVals));
 }
 
Example 12
Source File: SVMLightSaver.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
/**
  * turns the instance into a svm light row.
  * 
  * @param inst	the instance to transform
  * @return		the generated svm light row
  */
 protected String instanceToSvmlight(Instance inst) {
   StringBuffer	result;
   int			i;
   
   result = new StringBuffer();
   
   // class
   if (inst.classAttribute().isNominal()) {
     if (inst.classValue() == 0)
result.append("1");
     else if (inst.classValue() == 1)
result.append("-1");
   }
   else {
     result.append("" + Utils.doubleToString(inst.classValue(), MAX_DIGITS));
   }

   // attributes
   for (i = 0; i < inst.numAttributes(); i++) {
     if (i == inst.classIndex())
continue;
     if (inst.value(i) == 0)
continue;
     result.append(" " + (i+1) + ":" + Utils.doubleToString(inst.value(i), MAX_DIGITS));
   }
   
   return result.toString();
 }
 
Example 13
Source File: MLUtils.java    From meka with GNU General Public License v3.0 5 votes vote down vote up
/**
 * SetTemplate - returns a copy of x_template, set with x's attributes, and set to dataset D_template (of which x_template) is a template of this.
 * This function is very useful when Weka throws a strange IndexOutOfBounds exception for setTemplate(x,Template)
 */
public static final Instance setTemplate(Instance x, Instance x_template, Instances D_template) {
	Instance x_ = (Instance)x_template.copy();
	int L_y = x.classIndex();
	int L_z = D_template.classIndex();
	// copy over x space
	MLUtils.copyValues(x_,x,L_y,L_z);
	// set class values to missing
	MLUtils.setLabelsMissing(x_,L_z);
	// set dataset
	x_.setDataset(D_template);
	return x_;
}
 
Example 14
Source File: WindowSearcher.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
@Override
public void buildClassifier(Instances data) throws Exception {
    // Initialise training dataset
    Attribute classAttribute = data.classAttribute();

    classedData = new HashMap <>();
    classedDataIndices = new HashMap <>();
    for (int c = 0; c < data.numClasses(); c++) {
        classedData.put(data.classAttribute().value(c), new ArrayList <SymbolicSequence>());
        classedDataIndices.put(data.classAttribute().value(c), new ArrayList <Integer>());
    }

    train = new SymbolicSequence[data.numInstances()];
    classMap = new String[train.length];
    maxLength = 0;
    for (int i = 0; i < train.length; i++) {
        Instance sample = data.instance(i);
        MonoDoubleItemSet[] sequence = new MonoDoubleItemSet[sample.numAttributes() - 1];
        maxLength = Math.max(maxLength, sequence.length);
        int shift = (sample.classIndex() == 0) ? 1 : 0;
        for (int t = 0; t < sequence.length; t++) {
            sequence[t] = new MonoDoubleItemSet(sample.value(t + shift));
        }
        train[i] = new SymbolicSequence(sequence);
        String clas = sample.stringValue(classAttribute);
        classMap[i] = clas;
        classedData.get(clas).add(train[i]);
        classedDataIndices.get(clas).add(i);
    }

    warpingMatrix = new double[maxLength][maxLength];
    U = new double[maxLength];
    L = new double[maxLength];

    maxWindow = Math.round(1 * maxLength);
    nns = new int[maxWindow + 1][train.length];
    dist = new double[maxWindow + 1][train.length];

    // Start searching for the best window
    searchBestWarpingWindow();

    // if we are doing length, find the best window in percentage
    if (bestWindowPercent < 0)
        bestWindowPercent = lengthToPercent(bestWarpingWindow);

    // Saving best windows found
    System.out.println("Windows found=" + bestWarpingWindow +
            "(" + bestWindowPercent + ") Best Acc=" + (1 - bestScore));
}
 
Example 15
Source File: ERP1NN.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
public final double distance(Instance first, Instance second, double cutoff) {

        // base case - we're assuming class val is last. If this is true, this method is fine,
        // if not, we'll default to the DTW class
        if (first.classIndex() != first.numAttributes() - 1 || second.classIndex() != second.numAttributes() - 1) {
            return new ERPDistance(this.g, this.bandSize).distance(first, second, cutoff);
        }

        int m = first.numAttributes() - 1;
        int n = second.numAttributes() - 1;


        // Current and previous columns of the matrix
        double[] curr = new double[m];
        double[] prev = new double[m];

        // size of edit distance band
        // bandsize is the maximum allowed distance to the diagonal
//        int band = (int) Math.ceil(v2.getDimensionality() * bandSize);
        int band = (int) Math.ceil(m * bandSize);

        // g parameter for local usage
        double gValue = g;

        for (int i = 0; i < m; i++) {
            // Swap current and prev arrays. We'll just overwrite the new curr.
            {
                double[] temp = prev;
                prev = curr;
                curr = temp;
            }
            int l = i - (band + 1);
            if (l < 0) {
                l = 0;
            }
            int r = i + (band + 1);
            if (r > (m - 1)) {
                r = (m - 1);
            }

            for (int j = l; j <= r; j++) {
                if (Math.abs(i - j) <= band) {
                    // compute squared distance of feature vectors
                    double val1 = first.value(i);
                    double val2 = gValue;
                    double diff = (val1 - val2);
                    final double d1 = Math.sqrt(diff * diff);

                    val1 = gValue;
                    val2 = second.value(j);
                    diff = (val1 - val2);
                    final double d2 = Math.sqrt(diff * diff);

                    val1 = first.value(i);
                    val2 = second.value(j);
                    diff = (val1 - val2);
                    final double d12 = Math.sqrt(diff * diff);

                    final double dist1 = d1 * d1;
                    final double dist2 = d2 * d2;
                    final double dist12 = d12 * d12;

                    final double cost;

                    if ((i + j) != 0) {
                        if ((i == 0) || ((j != 0) && (((prev[j - 1] + dist12) > (curr[j - 1] + dist2)) && ((curr[j - 1] + dist2) < (prev[j] + dist1))))) {
                            // del
                            cost = curr[j - 1] + dist2;
                        } else if ((j == 0) || ((i != 0) && (((prev[j - 1] + dist12) > (prev[j] + dist1)) && ((prev[j] + dist1) < (curr[j - 1] + dist2))))) {
                            // ins
                            cost = prev[j] + dist1;
                        } else {
                            // match
                            cost = prev[j - 1] + dist12;
                        }
                    } else {
                        cost = 0;
                    }

                    curr[j] = cost;
                    // steps[i][j] = step;
                } else {
                    curr[j] = Double.POSITIVE_INFINITY; // outside band
                }
            }
        }

        return Math.sqrt(curr[m - 1]);
    }
 
Example 16
Source File: SPegasos.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Updates the classifier with the given instance.
 *
 * @param instance the new training instance to include in the model 
 * @exception Exception if the instance could not be incorporated in
 * the model.
 */
public void updateClassifier(Instance instance) throws Exception {
  if (!instance.classIsMissing()) {
    
    double learningRate = 1.0 / (m_lambda * m_t);
    //double scale = 1.0 - learningRate * m_lambda;
    double scale = 1.0 - 1.0 / m_t;
    double y = (instance.classValue() == 0) ? -1 : 1;
    double wx = dotProd(instance, m_weights, instance.classIndex());
    double z = y * (wx + m_weights[m_weights.length - 1]);        
    
    for (int j = 0; j < m_weights.length - 1; j++) {
      if (j != instance.classIndex()) {
        m_weights[j] *= scale;
      }
    }
    
    if (m_loss == LOGLOSS || (z < 1)) {
      double loss = dloss(z);
      int n1 = instance.numValues();
      for (int p1 = 0; p1 < n1; p1++) {
        int indS = instance.index(p1);
        if (indS != instance.classIndex() &&  !instance.isMissingSparse(p1)) {
          double m = learningRate * loss * (instance.valueSparse(p1) * y);
          m_weights[indS] += m;
        }
      }
      
      // update the bias
      m_weights[m_weights.length - 1] += learningRate * loss * y;
    }
    
    double norm = 0;
    for (int k = 0; k < m_weights.length - 1; k++) {
      if (k != instance.classIndex()) {
        norm += (m_weights[k] * m_weights[k]);
      }
    }
    
    double scale2 = Math.min(1.0, (1.0 / (m_lambda * norm)));
    if (scale2 < 1.0) {
      scale2 = Math.sqrt(scale2);
      for (int j = 0; j < m_weights.length - 1; j++) {
        if (j != instance.classIndex()) {
          m_weights[j] *= scale2;
        }
      }
    }
    m_t++;
  }
}
 
Example 17
Source File: LibSVM.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
/**
  * returns an instance into a sparse libsvm array
  * 
  * @param instance	the instance to work on
  * @return		the libsvm array
  * @throws Exception	if setup of array fails
  */
 protected Object instanceToArray(Instance instance) throws Exception {
   int		index;
   int		count;
   int 	i;
   Object 	result;
   
   // determine number of non-zero attributes
   /*for (i = 0; i < instance.numAttributes(); i++) {
     if (i == instance.classIndex())
continue;
     if (instance.value(i) != 0)
count++;
   } */
   count = 0;
   for (i = 0; i < instance.numValues(); i++) {
     if (instance.index(i) == instance.classIndex())
       continue;
     if (instance.valueSparse(i) != 0)
       count++;
   }

   // fill array
   /* result = Array.newInstance(Class.forName(CLASS_SVMNODE), count);
   index  = 0;
   for (i = 0; i < instance.numAttributes(); i++) {
     if (i == instance.classIndex())
continue;
     if (instance.value(i) == 0)
continue;

     Array.set(result, index, Class.forName(CLASS_SVMNODE).newInstance());
     setField(Array.get(result, index), "index", new Integer(i + 1));
     setField(Array.get(result, index), "value", new Double(instance.value(i)));
     index++;
   } */
   
   result = Array.newInstance(Class.forName(CLASS_SVMNODE), count);
   index  = 0;
   for (i = 0; i < instance.numValues(); i++) {
     
     int idx = instance.index(i);
     if (idx == instance.classIndex())
       continue;
     if (instance.valueSparse(i) == 0)
       continue;

     Array.set(result, index, Class.forName(CLASS_SVMNODE).newInstance());
     setField(Array.get(result, index), "index", new Integer(idx + 1));
     setField(Array.get(result, index), "value", new Double(instance.valueSparse(i)));
     index++;
   }
   
   return result;
 }
 
Example 18
Source File: ReduceDimensionFilter.java    From anthelion with Apache License 2.0 4 votes vote down vote up
/**
 * Returns the next instances based on the configuration of this class.
 */
public Instance nextInstance() {
	Instance inst = this.inputStream.nextInstance();

	Instance newInst = new SparseInstance(hashSize
			+ notHashableAttributes.size());
	newInst.setDataset(newInstances);
	newInst.replaceMissingValues(replacementArray);
	if (newInstances.size() > 0)
		newInstances.remove(0);
	// newInstances.add(0, newInst);
	for (int i = 0; i < inst.numAttributes(); i++) {
		if (inst.classIndex() == i) {
			newInst.setValue(
					attributesIndex.get(inst.classAttribute().name()),
					inst.classValue());
		} else {
			// check if attributes should be manipulated
			if (ignoreAttributes.contains(i)) {
				inst.setValue(i, 0);
			}
			if (makeBinaryAttributes.contains(i) && inst.value(i) > 0) {
				inst.setValue(i, 1);
			}
			// check what should be done with the attributes.
			if (notHashableAttributes.contains(i)) {
				newInst.setValue(
						attributesIndex.get(inst.attribute(i).name()),
						inst.value(i));

			} else {
				// calculate the hash of the attribute name which is
				// included in
				// the vector and set it to 1
				if (inst.value(i) > 0) {
					newInst.setValue(attributesIndex
							.get(getAttributeNameOfHash(getHash(inst
									.attribute(i).name(), hashSize))), 1);
				}
			}
		}
	}
	// System.out.println(newInst.toString());
	return newInst;
}
 
Example 19
Source File: WDTW1NN.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
public final double distance(Instance first, Instance second, double cutoff) {

        // base case - we're assuming class val is last. If this is true, this method is fine,
        // if not, we'll default to the DTW class
        if (first.classIndex() != first.numAttributes() - 1 || second.classIndex() != second.numAttributes() - 1) {
            return new WeightedDTW(g).distance(first, second, cutoff);
        }

        int m = first.numAttributes() - 1;
        int n = second.numAttributes() - 1;

        if (this.refreshWeights) {
            this.initWeights(m);
        }


        //create empty array
        double[][] distances = new double[m][n];

        //first value
        distances[0][0] = this.weightVector[0] * (first.value(0) - second.value(0)) * (first.value(0) - second.value(0));

        //early abandon if first values is larger than cut off
        if (distances[0][0] > cutoff) {
            return Double.MAX_VALUE;
        }

        //top row
        for (int i = 1; i < n; i++) {
            distances[0][i] = distances[0][i - 1] + this.weightVector[i] * (first.value(0) - second.value(i)) * (first.value(0) - second.value(i)); //edited by Jay
        }

        //first column
        for (int i = 1; i < m; i++) {
            distances[i][0] = distances[i - 1][0] + this.weightVector[i] * (first.value(i) - second.value(0)) * (first.value(i) - second.value(0)); //edited by Jay
        }

        //warp rest
        double minDistance;
        for (int i = 1; i < m; i++) {
            boolean overflow = true;

            for (int j = 1; j < n; j++) {
                //calculate distances
                minDistance = Math.min(distances[i][j - 1], Math.min(distances[i - 1][j], distances[i - 1][j - 1]));
                distances[i][j] = minDistance + this.weightVector[Math.abs(i - j)] * (first.value(i) - second.value(j)) * (first.value(i) - second.value(j));

                if (overflow && distances[i][j] < cutoff) {
                    overflow = false; // because there's evidence that the path can continue
                }
            }

            //early abandon
            if (overflow) {
                return Double.MAX_VALUE;
            }
        }
        return distances[m - 1][n - 1];


    }
 
Example 20
Source File: NaiveDTW.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
@Override
public void buildClassifier(Instances data) throws Exception {
   	// Initialise training dataset
	Attribute classAttribute = data.classAttribute();
	
	classedData = new HashMap<>();
	classedDataIndices = new HashMap<>();
	for (int c = 0; c < data.numClasses(); c++) {
		classedData.put(data.classAttribute().value(c), new ArrayList<SymbolicSequence>());
		classedDataIndices.put(data.classAttribute().value(c), new ArrayList<Integer>());
	}

	train = new SymbolicSequence[data.numInstances()];
	classMap = new String[train.length];
	maxLength = 0;
	for (int i = 0; i < train.length; i++) {
		Instance sample = data.instance(i);
		MonoDoubleItemSet[] sequence = new MonoDoubleItemSet[sample.numAttributes() - 1];
		maxLength = Math.max(maxLength, sequence.length);
		int shift = (sample.classIndex() == 0) ? 1 : 0;
		for (int t = 0; t < sequence.length; t++) {
			sequence[t] = new MonoDoubleItemSet(sample.value(t + shift));
		}
		train[i] = new SymbolicSequence(sequence);
		String clas = sample.stringValue(classAttribute);
		classMap[i] = clas;
		classedData.get(clas).add(train[i]);
		classedDataIndices.get(clas).add(i);
	}
	
	warpingMatrix = new double[maxLength][maxLength];
	U = new double[maxLength];
	L = new double[maxLength];
	
	maxWindow = Math.round(1 * maxLength);
	searchResults = new String[maxWindow+1];
	nns = new int[maxWindow+1][train.length];
	dist = new double[maxWindow+1][train.length];
	
	// Start searching for the best window
	searchBestWarpingWindow();
	
	// Saving best windows found
	System.out.println("Windows found=" + bestWarpingWindow + " Best Acc=" + (1-bestScore));
}