Java Code Examples for weka.core.Instance#setValue()

The following examples show how to use weka.core.Instance#setValue() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ExtendedRandomTreeTest.java    From AILibs with GNU Affero General Public License v3.0 6 votes vote down vote up
public Instances getTrainingData() {
	List<Instance> instances = new ArrayList<>();
	for (double i = lowerBound; i < upperBound; i += stepSize) {
		Instance instance = new DenseInstance(2);
		instance.setValue(0, i);
		instance.setValue(1, this.fun.apply(i));
		instances.add(instance);
	}
	ArrayList<Attribute> attributes = new ArrayList<>();
	attributes.add(0, new Attribute("xVal"));
	attributes.add(1, new Attribute("yVal"));
	Instances inst = new Instances("test", attributes, instances.size());
	inst.addAll(instances);
	inst.setClassIndex(1);
	return inst;
}
 
Example 2
Source File: GridModelDataPredictVS.java    From gsn with GNU General Public License v3.0 6 votes vote down vote up
private Instance instanceFromStream(StreamElement data) {
	try{
	Instance i = new Instance(data.getFieldNames().length);
	for(int j=0;j<data.getFieldNames().length;j++){
		i.setValue(j, ((Double)data.getData()[j]));
	}
	//scaling specific to opensense data!! should be put in the parameters?
	i.setValue(0, i.value(0)/1400.0);
	i.setValue(2, i.value(2)/50);
	i.setValue(3, i.value(3)/100.0);
	i.setValue(4, i.value(4)/100.0 - 4);	
	return i;
	}catch(Exception e){
		return null;
	}
}
 
Example 3
Source File: Analyzer.java    From NLIWOD with GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * Analyzes the question and extracts all features that were set for this Analyzer.
 * @param q question string
 * @return feature vector for the input question
 */
public Instance analyze(String q) {
	Instance tmpInstance = new DenseInstance(fvWekaAttributes.size());
	
	for (IAnalyzer analyzer : analyzers) {
		//special case for PartOfSpeechTags, need to set 36 attributes
		if(analyzer instanceof PartOfSpeechTags) {
			analyzePOS(tmpInstance, (PartOfSpeechTags) analyzer, q);
			continue;
		}		
		
		//special case for Dependencies, need to set 18 attributes
		if(analyzer instanceof Dependencies) {
			analyzeDeps(tmpInstance, (Dependencies) analyzer, q);
			continue;
		}
		
		Attribute attribute = analyzer.getAttribute();
		if (attribute.isNumeric()) {
			tmpInstance.setValue(attribute, (double) analyzer.analyze(q));
		} else if (attribute.isNominal() || attribute.isString()) {
			String value = (String) analyzer.analyze(q);
			tmpInstance.setValue(attribute,value);
			tmpInstance.setDataset(null);
		}
	}
	return tmpInstance;
}
 
Example 4
Source File: EntityTypeTest.java    From NLIWOD with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void organizationTest2() {
	EntityOrganization organa = new EntityOrganization();
	ArrayList<Attribute> fvWekaAttributes = new ArrayList<Attribute>();
	fvWekaAttributes.add(organa.getAttribute());
	new Instances("Test", fvWekaAttributes, 1);
	Instance testinstance = new DenseInstance(fvWekaAttributes.size());
	testinstance.setValue(organa.getAttribute(), (String) organa.analyze("Bart is a person."));
	assertTrue(testinstance.stringValue(organa.getAttribute()).equals("NoOrganization"));
}
 
Example 5
Source File: CnnTextFilesEmbeddingInstanceIteratorTest.java    From wekaDeeplearning4j with GNU General Public License v3.0 5 votes vote down vote up
public Instances makeData() throws Exception {
  final Instances data = TestUtil.makeTestDataset(42,
      100,
      0,
      0,
      1,
      0,
      0,
      1,
      Attribute.NUMERIC,
      1,
      false);

  WordVectors wordVectors = WordVectorSerializer
      .loadStaticModel(DatasetLoader.loadGoogleNewsVectors());
  String[] words = (String[]) wordVectors.vocab().words().toArray(new String[0]);

  Random rand = new Random(42);
  for (Instance inst : data) {
    StringBuilder sentence = new StringBuilder();
    for (int i = 0; i < 10; i++) {
      final int idx = rand.nextInt(words.length);
      sentence.append(" ").append(words[idx]);
    }
    inst.setValue(0, sentence.toString());
  }
  return data;
}
 
Example 6
Source File: YeoJohnson.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
@Override
public Instances invert(Instances data){
	Instance inst;
	int responsePos=data.numAttributes()-1;
	double[] response=data.attributeToDoubleArray(responsePos);
	double v;
	double[] newVals=invert(bestLambda,response);
	
	for(int i=0;i<data.numInstances();i++)
	{
		inst=data.instance(i);
		inst.setValue(responsePos,newVals[i]);
	}
	return data;
}
 
Example 7
Source File: YeoJohnson.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
static public void transformResponse(Instances data, double lambda, double[] response)
{
	Instance inst;
	int responsePos=data.classIndex();
	double[] newData=transform(lambda,response);
	for(int i=0;i<response.length;i++)
	{
		inst=data.instance(i);
		inst.setValue(responsePos,newData[i]);
	}
}
 
Example 8
Source File: EntityTypeTest.java    From NLIWOD with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void dateTest1() {
	EntityDate dateana = new EntityDate();
	ArrayList<Attribute> fvWekaAttributes = new ArrayList<Attribute>();
	fvWekaAttributes.add(dateana.getAttribute());
	new Instances("Test", fvWekaAttributes, 1);
	Instance testinstance = new DenseInstance(fvWekaAttributes.size());
	testinstance.setValue(dateana.getAttribute(), (String) dateana.analyze("The olympic games in 1992 were the best."));
	assertTrue(testinstance.stringValue(dateana.getAttribute()).equals("Date"));
}
 
Example 9
Source File: KMeans.java    From Java-Data-Analysis with MIT License 5 votes vote down vote up
private static Instances load(double[][] data) {
    ArrayList<Attribute> attributes = new ArrayList<Attribute>();
    attributes.add(new Attribute("X"));
    attributes.add(new Attribute("Y"));
    Instances dataset = new Instances("Dataset", attributes, M);
    for (double[] datum : data) {
        Instance instance = new SparseInstance(2);
        instance.setValue(0, datum[0]);
        instance.setValue(1, datum[1]);
        dataset.add(instance);
    }
    return dataset;
}
 
Example 10
Source File: CnnTextFilesEmbeddingInstanceIteratorTest.java    From wekaDeeplearning4j with GNU General Public License v3.0 5 votes vote down vote up
public Instances makeData() throws Exception {
  final Instances data = TestUtil.makeTestDataset(42,
      100,
      0,
      0,
      1,
      0,
      0,
      1,
      Attribute.NUMERIC,
      1,
      false);

  WordVectors wordVectors = WordVectorSerializer
      .loadStaticModel(DatasetLoader.loadGoogleNewsVectors());
  String[] words = (String[]) wordVectors.vocab().words().toArray(new String[0]);

  Random rand = new Random(42);
  for (Instance inst : data) {
    StringBuilder sentence = new StringBuilder();
    for (int i = 0; i < 10; i++) {
      final int idx = rand.nextInt(words.length);
      sentence.append(" ").append(words[idx]);
    }
    inst.setValue(0, sentence.toString());
  }
  return data;
}
 
Example 11
Source File: RelExTool.java    From Criteria2Query with Apache License 2.0 5 votes vote down vote up
public String predict(String en1, String en2,String cb, Double e1e, Double e2s, Double dis, Double shortestdeppath)
		throws Exception {
	List entity1_type = Arrays.asList(GlobalSetting.primaryEntities);
	List entity2_type = Arrays.asList(GlobalSetting.atrributes);
	List combo = Arrays.asList(GlobalSetting.combo);
	List rel = Arrays.asList(GlobalSetting.relations);
	Attribute entity1_end_index = new Attribute("entity1_end_index");
	Attribute entity2_start_index = new Attribute("entity2_start_index");
	Attribute distance = new Attribute("distance");
	Attribute shortestdep = new Attribute("shortestdep");
	Attribute entity1_type_attr = new Attribute("entity1_type", entity1_type);
	Attribute entity2_type_attr = new Attribute("entity2_type", entity2_type);
	Attribute combo_attr = new Attribute("combo", combo);
	Attribute rel_attr = new Attribute("rel", rel);

	ArrayList<Attribute> atts = new ArrayList<Attribute>();
	atts.add(entity1_type_attr);
	atts.add(entity2_type_attr);
	atts.add(combo_attr);
	atts.add(entity1_end_index);
	atts.add(entity2_start_index);
	atts.add(distance);
	atts.add(shortestdep);
	atts.add(rel_attr);
	Instances adataset = new Instances("TestDataSet", atts, 1);
	Instance inst = new DenseInstance(8);
	inst.setValue(entity1_type_attr, en1);
	inst.setValue(entity2_type_attr, en2);
	inst.setValue(combo_attr, cb);
	inst.setValue(entity2_start_index, e2s);
	inst.setValue(entity1_end_index, e1e);
	inst.setValue(distance, dis);
	inst.setValue(shortestdep, shortestdeppath);
	// inst.setValue(rel_attr, "has-relation");
	inst.setDataset(adataset);
	adataset.setClassIndex(7);
	Double d = classifier.classifyInstance(inst);
	// System.out.println("?="+d);
	return (String) rel.get(d.intValue());
}
 
Example 12
Source File: WekaHierarchicalClustering2.java    From Java-Data-Analysis with MIT License 5 votes vote down vote up
private static Instances load(double[][] data) {
    ArrayList<Attribute> attributes = new ArrayList<Attribute>();
    attributes.add(new Attribute("X"));
    attributes.add(new Attribute("Y"));
    Instances dataset = new Instances("Dataset", attributes, M);
    for (double[] datum : data) {
        Instance instance = new SparseInstance(2);
        instance.setValue(0, datum[0]);
        instance.setValue(1, datum[1]);
        dataset.add(instance);
    }
    return dataset;
}
 
Example 13
Source File: EntityTypeTest.java    From NLIWOD with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void organizationTest1() {
	EntityOrganization organa = new EntityOrganization();
	ArrayList<Attribute> fvWekaAttributes = new ArrayList<Attribute>();
	fvWekaAttributes.add(organa.getAttribute());
	new Instances("Test", fvWekaAttributes, 1);
	Instance testinstance = new DenseInstance(fvWekaAttributes.size());
	testinstance.setValue(organa.getAttribute(), (String) organa.analyze("The United Nations are an organization."));
	assertTrue(testinstance.stringValue(organa.getAttribute()).equals("Organization"));
}
 
Example 14
Source File: FTNode.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
/**
 * Returns the class probabilities for an instance given by the Functional Tree.
 * @param instance the instance
 * @return the array of probabilities
 */
public double[] distributionForInstance(Instance instance) throws Exception {
  double[] probs;

  if (m_isLeaf && m_hasConstr) { //leaf
    //leaf: use majoraty class or constructor model
    probs = modelDistributionForInstance(instance);
  } else { 
    if (m_isLeaf && !m_hasConstr)
      {
        probs=new double[instance.numClasses()];
        probs[m_leafclass]=(double)1;
      }else{
             
      probs = modelDistributionForInstance(instance);
      //Built auxiliary split instance    
      Instance instanceSplit=new DenseInstance(instance.numAttributes()+instance.numClasses());
      instanceSplit.setDataset(instance.dataset());
         
      // Inserts attribute and their value
      for(int i=0; i< instance.numClasses();i++)
        {
          instanceSplit.dataset().insertAttributeAt( new Attribute("N"+ (instance.numClasses()-i)), 0);
          instanceSplit.setValue(i,probs[i]);
        }
      for(int i=0; i< instance.numAttributes();i++)
        instanceSplit.setValue(i+instance.numClasses(),instance.value(i));
         
      //chooses best branch           
      int branch = m_localModel.whichSubset(instanceSplit); //split
         
      //delete added attributes
      for(int i=0; i< instance.numClasses();i++)
        instanceSplit.dataset().deleteAttributeAt(0);
          
      probs = m_sons[branch].distributionForInstance(instance);
    }
  }
  return probs;
	
}
 
Example 15
Source File: ExtendedRandomForestTest.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
/**
 * Test the classifier without any cross-validation
 * @throws IOException
 */
@Test
public void testPredict() throws IOException {
	for (int dataset_index = 0; dataset_index < dataset_count; dataset_index++) {
		for (int noise_index = 0; noise_index < noise_count; noise_index++) {
			for (int seed = 0; seed < seedNum; seed++) {
				String testfile_name = this.getTestFileName(dataset_index);
				try (BufferedReader reader = Files.newBufferedReader(Paths.get(testfile_name),
						StandardCharsets.UTF_8)) {
					ArffReader arffReader = new ArffReader(reader);
					Instances data = arffReader.getData();
					List<Double> predictedLowers = new ArrayList<>();
					List<Double> actualLowers = new ArrayList<>();
					List<Double> predictedUppers = new ArrayList<>();
					List<Double> actualUppers = new ArrayList<>();
					for (Instance instance : data) {
						// construct the real interval
						double lower = instance.value(data.numAttributes() - 2);
						double upper = instance.value(data.numAttributes() - 1);
						Instance strippedInstance = new DenseInstance(data.numAttributes() - 2);
						for (int i = 0; i < data.numAttributes() - 2; i++) {
							strippedInstance.setValue(i, instance.value(i));
						}
						Interval actualInterval = new Interval(lower, upper);
						Interval predictedInterval = this.classifier[dataset_index][noise_index][seed]
								.predictInterval(strippedInstance);

						predictedLowers.add(predictedInterval.getInf());
						predictedUppers.add(predictedInterval.getSup());
						actualLowers.add(lower);
						actualUppers.add(upper);
					}

					double l1LossLower = L1Loss(predictedLowers, actualLowers);
					double l1LossUpper = L1Loss(predictedUppers, actualUppers);

					l1Lower[dataset_index][noise_index][seed] = l1LossLower;
					l1Upper[dataset_index][noise_index][seed] = l1LossUpper;

				}
			}
			double avgLower = Arrays.stream(l1Lower[dataset_index][noise_index]).average().getAsDouble();
			double avgUpper = Arrays.stream(l1Upper[dataset_index][noise_index]).average().getAsDouble();
			double l1Loss = (avgLower + avgUpper) / 2;
			System.out.println(datasets[dataset_index] + " " + noise[noise_index] + " " + l1Loss);
		}
	}
}
 
Example 16
Source File: RemoveWithValues.java    From tsml with GNU General Public License v3.0 4 votes vote down vote up
/**
  * Input an instance for filtering. Ordinarily the instance is processed
  * and made available for output immediately. Some filters require all
  * instances be read before producing output.
  *
  * @param instance the input instance
  * @return true if the filtered instance may now be
  * collected with output().
  * @throws IllegalStateException if no input format has been set.
  */
 public boolean input(Instance instance) {

   if (getInputFormat() == null) {
     throw new IllegalStateException("No input instance format defined");
   }
   if (m_NewBatch) {
     resetQueue();
     m_NewBatch = false;
   }
   
   if (isFirstBatchDone() && m_dontFilterAfterFirstBatch) {
     push((Instance)instance.copy());
     return true;
   }
   
   if (instance.isMissing(m_AttIndex.getIndex())) {
     if (!getMatchMissingValues()) {
       push((Instance)instance.copy());
       return true;
     } else {
       return false;
     }
   }
   if (isNumeric()) {
     if (!m_Values.getInvert()) {
if (instance.value(m_AttIndex.getIndex()) < m_Value) {
  push((Instance)instance.copy());
  return true;
} 
     } else {
if (instance.value(m_AttIndex.getIndex()) >= m_Value) {
  push((Instance)instance.copy());
  return true;
} 
     }
   }
   if (isNominal()) {
     if (m_Values.isInRange((int)instance.value(m_AttIndex.getIndex()))) {
Instance temp = (Instance)instance.copy();
if (getModifyHeader()) {
  temp.setValue(m_AttIndex.getIndex(),
		m_NominalMapping[(int)instance.value(m_AttIndex.getIndex())]);
}
push(temp);
return true;
     }
   }
   return false;
 }
 
Example 17
Source File: UnionFilter.java    From AILibs with GNU Affero General Public License v3.0 4 votes vote down vote up
static DataSet union(final DataSet coll1, final DataSet coll2) {
	if (coll1 == null || coll2 == null) {
		throw new IllegalArgumentException("Parameters 'coll1' and 'coll2' must not be null!");
	}

	if (coll1.getIntermediateInstances() == null || coll2.getIntermediateInstances() == null) {
		// Merge Weka instances
		Instances instances1 = coll1.getInstances();
		Instances instances2 = coll2.getInstances();

		if (instances1.numInstances() != instances2.numInstances()) {
			throw new IllegalArgumentException("Data sets to be united must have the same amount of instances!");
		}

		ArrayList<Attribute> attributes = new ArrayList<>(
				coll1.getInstances().numAttributes() + coll2.getInstances().numAttributes() - 1);
		for (int i = 0; i < instances1.numAttributes() - 1; i++) {
			attributes.add(instances1.attribute(i).copy(instances1.attribute(i).name() + "u1"));
		}
		for (int i = 0; i < instances2.numAttributes() - 1; i++) {
			attributes.add(instances2.attribute(i).copy(instances2.attribute(i).name() + "u2"));
		}

		// Add class attribute
		List<String> classValues = IntStream.range(0, instances1.classAttribute().numValues()).asDoubleStream()
				.mapToObj(String::valueOf).collect(Collectors.toList());
		Attribute classAtt = new Attribute("classAtt", classValues);
		attributes.add(classAtt);

		Instances unitedInstances = new Instances("UnitedInstances", attributes, instances1.numInstances());
		unitedInstances.setClassIndex(unitedInstances.numAttributes() - 1);

		for (int i = 0; i < instances1.numInstances(); i++) {
			Instance instance = new DenseInstance(attributes.size());
			instance.setDataset(unitedInstances);

			// Copy values
			int runningIndex = 0;
			for (int j = 0; j < instances1.numAttributes() - 1; j++) {
				instance.setValue(runningIndex++, instances1.get(i).value(j));
			}
			for (int j = 0; j < instances2.numAttributes() - 1; j++) {
				instance.setValue(runningIndex++, instances2.get(i).value(j));
			}
			instance.setClassValue(instances1.get(i).classValue());

			unitedInstances.add(instance);
		}

		return new DataSet(unitedInstances, null);
	} else {
		if (coll1.getIntermediateInstances().isEmpty() || coll2.getIntermediateInstances().isEmpty()) {
			throw new IllegalArgumentException("There must be intermediate instances if the collection is set.");
		}

		// Merge intermediate instances
		List<INDArray> intermediateInsts1 = coll1.getIntermediateInstances();
		List<INDArray> intermediateInsts2 = coll2.getIntermediateInstances();

		List<INDArray> unitedIntermediateInsts = new ArrayList<>(
				(int) (intermediateInsts1.get(0).length() + intermediateInsts2.get(0).length()));
		for (int i = 0; i < intermediateInsts1.size(); i++) {
			INDArray intermediateInst = Nd4j.hstack(intermediateInsts1.get(i).ravel(),
					intermediateInsts2.get(i).ravel());
			unitedIntermediateInsts.add(intermediateInst);
		}

		return new DataSet(coll1.getInstances(), unitedIntermediateInsts);
	}
}
 
Example 18
Source File: GridModelDataPredictVS.java    From gsn with GNU General Public License v3.0 4 votes vote down vote up
public void dataAvailable(String inputStreamName, StreamElement data) { 

//mapping the input stream to an instance and setting its dataset
String[] dfn = data.getFieldNames().clone();
Byte[] dft = data.getFieldTypes().clone();
Serializable[] da = data.getData().clone();
data = new StreamElement(dfn, dft, da, data.getTimeStamp());
Instance i = instanceFromStream(data);
if (att.size() == 0){
	att = attFromStream(data);
}
dataset = new Instances("input",att,0);
dataset.setClassIndex(classIndex);
if(i != null){
	dataset.add(i);
	i = dataset.firstInstance();
	
	boolean success = true;
	
	//extracting latitude/longitude
	Double center_lat = i.value(1);
	Double center_long = i.value(2);
	
	//filling the grid with predictions/extrapolations
	Double[][] rawData = new Double[gridSize][gridSize];
	for (int j=0;j<gridSize;j++){
		for(int k=0;k<gridSize;k++){
			i.setValue(1, center_lat - (cellSize*gridSize/2) + cellSize * j);
			i.setValue(2, center_long - (cellSize*gridSize/2) + cellSize * k);
			rawData[j][k] = ms.predict(i);
			success = success && (rawData[j][k] != null);
		}
	}

	//preparing the output
	
	Serializable[] stream = new Serializable[7];
       try {

           ByteArrayOutputStream bos = new ByteArrayOutputStream();
           ObjectOutputStream oos = new ObjectOutputStream(bos);
           oos.writeObject(rawData);
           oos.flush();
           oos.close();
           bos.close();

           stream[0] = new Integer(gridSize);
           stream[1] = new Integer(gridSize);
           stream[2] = new Double(center_lat - (cellSize*gridSize/2));
           stream[3] = new Double(center_long - (cellSize*gridSize/2));
           stream[4] = new Double(cellSize);
           stream[5] = new Double(0);
           stream[6] = bos.toByteArray();

       } catch (IOException e) {
           logger.warn(e.getMessage(), e);
           success = false;
       }
       
       if(success){
       	StreamElement se = new StreamElement(getOutputFormat(), stream, data.getTimeStamp());
       	dataProduced(se);
       }else{
		logger.warn("Prediction error. Something get wrong with the prediction.");
	}

}else{
	logger.warn("Predicting instance has wrong attibutes, please check the model and the inputs.");
}
  }
 
Example 19
Source File: ArffFileFromRun.java    From NLIWOD with GNU Affero General Public License v3.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
HAWK hawk = new HAWK();
SINA sina = new SINA();
QAKIS qakis = new QAKIS();
YODA yoda = new YODA();

/*
 * For multilable classification:
 */

ArrayList<String> fvhawk = new ArrayList<String>();
fvhawk.add("1");
fvhawk.add("0");
Attribute hawkatt = new Attribute("hawk", fvhawk);

ArrayList<String> fvqakis = new ArrayList<String>();
fvqakis.add("1");
fvqakis.add("0");
Attribute qakisatt = new Attribute("qakis", fvqakis);

ArrayList<String> fvyoda = new ArrayList<String>();
fvyoda.add("1");
fvyoda.add("0");
Attribute yodaatt = new Attribute("yoda", fvyoda);

ArrayList<String> fvsina = new ArrayList<String>();
fvsina.add("1");
fvsina.add("0");
Attribute sinaatt = new Attribute("sina", fvsina);


/*
 * 
 */

// 1. Learn on the training data for each system a classifier to find
// out which system can answer which question

// 1.1 load the questions and how good each system answers
log.debug("Load the questions and how good each system answers");
List<IQuestion> trainQuestions = LoaderController.load(Dataset.QALD6_Train_Multilingual);
List<ASystem> systems = Lists.newArrayList(hawk, sina, qakis, yoda);
JSONArray traindata = RunProducer.loadRunData(Dataset.QALD6_Train_Multilingual);

// 1.2 calculate the features per question and system
log.debug("Calculate the features per question and system");
Analyzer analyzer = new Analyzer();
ArrayList<Attribute> fvfinal = analyzer.fvWekaAttributes;

fvfinal.add(0, hawkatt);
fvfinal.add(0, yodaatt);
fvfinal.add(0, sinaatt);
fvfinal.add(0,qakisatt);


Instances trainingSet = new Instances("training_classifier: -C 4" , fvfinal, trainQuestions.size());
log.debug("Start collection of training data for each system");

	
for (int i = 0; i < traindata.size(); i++) {
	JSONObject questiondata = (JSONObject) traindata.get(i);
	JSONObject allsystemsdata = (JSONObject) questiondata.get("answers");
	String question = (String) questiondata.get("question");
	Instance tmp = analyzer.analyze(question);

	tmp.setValue(hawkatt, 0);
	tmp.setValue(yodaatt, 0);
	tmp.setValue(sinaatt, 0);
	tmp.setValue(qakisatt, 0);

	for(ASystem system: systems){
		JSONObject systemdata = (JSONObject) allsystemsdata.get(system.name());
		if(new Double(systemdata.get("fmeasure").toString()) > 0)
			switch (system.name()){
			case "hawk": tmp.setValue(hawkatt, 1); break;
			case "yoda": tmp.setValue(yodaatt, 1); break;
			case "sina": tmp.setValue(sinaatt, 1); break;
			case "qakis": tmp.setValue(qakisatt, 1); break;
			}
		}

	trainingSet.add(tmp);
	}
log.debug(trainingSet.toString());

try (FileWriter file = new FileWriter("./src/main/resources/old/Train.arff")) {
	file.write(trainingSet.toString());
} catch (IOException e) {
	e.printStackTrace();
}				
}
 
Example 20
Source File: CNode.java    From meka with GNU General Public License v3.0 4 votes vote down vote up
public void updateTransform(Instance t_, double ypred[]) throws Exception {
	for(int pa : this.paY) {
		t_.setValue(this.map[pa],ypred[pa]);
	}
}