org.neuroph.core.data.DataSet Java Examples

The following examples show how to use org.neuroph.core.data.DataSet. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Ionosphere.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
public void evaluate(NeuralNetwork neuralNet, DataSet dataSet) {

        System.out.println("Calculating performance indicators for neural network.");

        Evaluation evaluation = new Evaluation();
        evaluation.addEvaluator(new ErrorEvaluator(new MeanSquaredError()));

        evaluation.addEvaluator(new ClassifierEvaluator.Binary(0.5));
        evaluation.evaluate(neuralNet, dataSet);

        ClassifierEvaluator evaluator = evaluation.getEvaluator(ClassifierEvaluator.Binary.class);
        ConfusionMatrix confusionMatrix = evaluator.getResult();
        System.out.println("Confusion matrrix:\r\n");
        System.out.println(confusionMatrix.toString() + "\r\n\r\n");
        System.out.println("Classification metrics\r\n");
        ClassificationMetrics[] metrics = ClassificationMetrics.createFromMatrix(confusionMatrix);
        ClassificationMetrics.Stats average = ClassificationMetrics.average(metrics);
        for (ClassificationMetrics cm : metrics) {
            System.out.println(cm.toString() + "\r\n");
        }
        System.out.println(average.toString());
    }
 
Example #2
Source File: SunSpots.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
public void run() {

		// uncomment the following line to use regular Neuroph (non-flat) processing
		//Neuroph.getInstance().setFlattenNetworks(false);
		// create neural network
		NeuralNetwork network = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, WINDOW_SIZE, 10, 1);

                // normalize training data
		normalizeSunspots(0.1, 0.9);

		network.getLearningRule().addListener(this);

                // create training set
		DataSet trainingSet = generateTrainingData();
		network.learn(trainingSet);
		predict(network);

		Neuroph.getInstance().shutdown();
	}
 
Example #3
Source File: IrisFlowers.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
 * Evaluates classification performance of a neural network.
 * Contains calculation of Confusion matrix for classification tasks or Mean Ssquared Error and Mean Absolute Error for regression tasks.
 *
 * @param neuralNet
 * @param dataSet
 */
public void evaluate(NeuralNetwork neuralNet, DataSet dataSet) {

    System.out.println("Calculating performance indicators for neural network.");

    Evaluation evaluation = new Evaluation();
    evaluation.addEvaluator(new ErrorEvaluator(new MeanSquaredError()));

    String[] classLabels = new String[]{"Virginica", "Setosa", "Versicolor"};
    evaluation.addEvaluator(new ClassifierEvaluator.MultiClass(classLabels));
    evaluation.evaluate(neuralNet, dataSet);

    ClassifierEvaluator evaluator = evaluation.getEvaluator(ClassifierEvaluator.MultiClass.class);
    ConfusionMatrix confusionMatrix = evaluator.getResult();
    System.out.println("Confusion matrrix:\r\n");
    System.out.println(confusionMatrix.toString() + "\r\n\r\n");
    System.out.println("Classification metrics\r\n");
    ClassificationMetrics[] metrics = ClassificationMetrics.createFromMatrix(confusionMatrix);
    ClassificationMetrics.Stats average = ClassificationMetrics.average(metrics);
    for (ClassificationMetrics cm : metrics) {
        System.out.println(cm.toString() + "\r\n");
    }
    System.out.println(average.toString());
}
 
Example #4
Source File: Banknote.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
public void evaluate(NeuralNetwork neuralNet, DataSet dataSet) {

        System.out.println("Calculating performance indicators for neural network.");

        Evaluation evaluation = new Evaluation();
        evaluation.addEvaluator(new ErrorEvaluator(new MeanSquaredError()));

        evaluation.addEvaluator(new ClassifierEvaluator.Binary(0.5));
        evaluation.evaluate(neuralNet, dataSet);

        ClassifierEvaluator evaluator = evaluation.getEvaluator(ClassifierEvaluator.Binary.class);
        ConfusionMatrix confusionMatrix = evaluator.getResult();
        System.out.println("Confusion matrrix:\r\n");
        System.out.println(confusionMatrix.toString() + "\r\n\r\n");
        System.out.println("Classification metrics\r\n");
        ClassificationMetrics[] metrics = ClassificationMetrics.createFromMatrix(confusionMatrix);
        ClassificationMetrics.Stats average = ClassificationMetrics.average(metrics);
        for (ClassificationMetrics cm : metrics) {
            System.out.println(cm.toString() + "\r\n");
        }
        System.out.println(average.toString());
    }
 
Example #5
Source File: BostonHousePrice.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
public void evaluate(NeuralNetwork neuralNet, DataSet dataSet) {

        System.out.println("Calculating performance indicators for neural network.");

        MeanSquaredError mse = new MeanSquaredError();
        MeanAbsoluteError mae = new MeanAbsoluteError();

        for (DataSetRow testSetRow : dataSet.getRows()) {

            neuralNet.setInput(testSetRow.getInput());
            neuralNet.calculate();
            double[] networkOutput = neuralNet.getOutput();
            double[] desiredOutput = testSetRow.getDesiredOutput();
            mse.addPatternError(networkOutput, desiredOutput);
            mae.addPatternError(networkOutput, desiredOutput);
        }

        System.out.println("Mean squared error is: " + mse.getTotalError());
        System.out.println("Mean absolute error is: " + mae.getTotalError());
    }
 
Example #6
Source File: KohonenLearning.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
@Override
public void learn(DataSet trainingSet) {
               
	for (int phase = 0; phase < 2; phase++) {
		for (int k = 0; k < iterations[phase]; k++) {
			Iterator<DataSetRow> iterator = trainingSet.iterator();
			while (iterator.hasNext() && !isStopped()) {
				DataSetRow trainingSetRow = iterator.next();
				learnPattern(trainingSetRow, nR[phase]);				
			} // while
			currentIteration = k;
                               fireLearningEvent(new LearningEvent(this, LearningEvent.Type.EPOCH_ENDED));
			if (isStopped()) return;
		} // for k
		learningRate = learningRate * 0.5;
	} // for phase
}
 
Example #7
Source File: DigitsRecognition.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
 * Prints network output for the each element from the specified training
 * set.
 *
 * @param neuralNet neural network
 * @param testSet test data set
 */
public static void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) {

    System.out.println("--------------------------------------------------------------------");
    System.out.println("***********************TESTING NEURAL NETWORK***********************");
    for (DataSetRow testSetRow : testSet.getRows()) {
        neuralNet.setInput(testSetRow.getInput());
        neuralNet.calculate();

        int outputIdx = maxOutput(neuralNet.getOutput());

        String[] inputDigit = DigitData.convertDataIntoImage(testSetRow.getInput());

        for (int i = 0; i < inputDigit.length; i++) {
            if (i != inputDigit.length - 1) {
                System.out.println(inputDigit[i]);
            } else {
                System.out.println(inputDigit[i] + "----> " + outputIdx);
            }
        }
        System.out.println("");
    }
}
 
Example #8
Source File: WheatSeeds.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
public void evaluate(NeuralNetwork neuralNet, DataSet dataSet) {

        System.out.println("Calculating performance indicators for neural network.");

        Evaluation evaluation = new Evaluation();
        evaluation.addEvaluator(new ErrorEvaluator(new MeanSquaredError()));

        String[] classLabels = new String[]{"1", "2", "3"};
        evaluation.addEvaluator(new ClassifierEvaluator.MultiClass(classLabels));
        evaluation.evaluate(neuralNet, dataSet);

        ClassifierEvaluator evaluator = evaluation.getEvaluator(ClassifierEvaluator.MultiClass.class);
        ConfusionMatrix confusionMatrix = evaluator.getResult();
        System.out.println("Confusion matrrix:\r\n");
        System.out.println(confusionMatrix.toString() + "\r\n\r\n");
        System.out.println("Classification metrics\r\n");
        ClassificationMetrics[] metrics = ClassificationMetrics.createFromMatrix(confusionMatrix);
        ClassificationMetrics.Stats average = ClassificationMetrics.average(metrics);
        for (ClassificationMetrics cm : metrics) {
            System.out.println(cm.toString() + "\r\n");
        }
        System.out.println(average.toString());
    }
 
Example #9
Source File: StockTrainingSetImport.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
 * Creates and returns training set for stock market prediction using the provided data from array
 * @param values an array containing stock data
 * @param inputsCount training element (neural net) inputs count
 * @param outputsCount training element (neural net) ouputs count
 * @return training set with stock data
 */
public static DataSet importFromArray(double[] values, int inputsCount, int outputsCount) {
    DataSet trainingSet = new DataSet(inputsCount, outputsCount);
    for (int i = 0; i < values.length - inputsCount; i++) {
        ArrayList<Double> inputs = new ArrayList<Double>();
        for (int j = i; j < i + inputsCount; j++) {
            inputs.add(values[j]);
        }
        ArrayList<Double> outputs = new ArrayList<Double>();
        if (outputsCount > 0 && i + inputsCount + outputsCount <= values.length) {
            for (int j = i + inputsCount; j < i + inputsCount + outputsCount; j++) {
                outputs.add(values[j]);
            }
            if (outputsCount > 0) {
                trainingSet.add(new DataSetRow(inputs, outputs));
            } else {
                trainingSet.add(new DataSetRow(inputs));
            }
        }
    }
    return trainingSet;
}
 
Example #10
Source File: SwedishAutoInsurance.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
public void evaluate(NeuralNetwork neuralNet, DataSet dataSet) {

        System.out.println("Calculating performance indicators for neural network.");

        MeanSquaredError mse = new MeanSquaredError();
        MeanAbsoluteError mae = new MeanAbsoluteError();

        for (DataSetRow testSetRow : dataSet.getRows()) {
            neuralNet.setInput(testSetRow.getInput());
            neuralNet.calculate();
            double[] networkOutput = neuralNet.getOutput();
            double[] desiredOutput = testSetRow.getDesiredOutput();
            mse.addPatternError(networkOutput, desiredOutput);
            mae.addPatternError(networkOutput, desiredOutput);
        }

        System.out.println("Mean squared error is: " + mse.getTotalError());
        System.out.println("Mean absolute error is: " + mae.getTotalError());
    }
 
Example #11
Source File: Sonar.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
public void evaluate(NeuralNetwork neuralNet, DataSet dataSet) {

        System.out.println("Calculating performance indicators for neural network.");

        Evaluation evaluation = new Evaluation();
        evaluation.addEvaluator(new ErrorEvaluator(new MeanSquaredError()));

        evaluation.addEvaluator(new ClassifierEvaluator.Binary(0.5));
        evaluation.evaluate(neuralNet, dataSet);

        ClassifierEvaluator evaluator = evaluation.getEvaluator(ClassifierEvaluator.Binary.class);
        ConfusionMatrix confusionMatrix = evaluator.getResult();
        System.out.println("Confusion matrrix:\r\n");
        System.out.println(confusionMatrix.toString() + "\r\n\r\n");
        System.out.println("Classification metrics\r\n");
        ClassificationMetrics[] metrics = ClassificationMetrics.createFromMatrix(confusionMatrix);
        ClassificationMetrics.Stats average = ClassificationMetrics.average(metrics);
        for (ClassificationMetrics cm : metrics) {
            System.out.println(cm.toString() + "\r\n");
        }
        System.out.println(average.toString());
    }
 
Example #12
Source File: TestTimeSeries.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
public void train() {
    // get the path to file with data
    String inputFileName = "C:\\timeseries\\BSW15";
    
    // create MultiLayerPerceptron neural network
    neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, 5, 10, 1);
    MomentumBackpropagation learningRule = (MomentumBackpropagation)neuralNet.getLearningRule();
    learningRule.setLearningRate(0.2);
    learningRule.setMomentum(0.5);
    // learningRule.addObserver(this);
    learningRule.addListener(this);        
    
    // create training set from file
     trainingSet = DataSet.createFromFile(inputFileName, 5, 1, "\t", false);
    // train the network with training set
    neuralNet.learn(trainingSet);         
          
    System.out.println("Done training.");          
}
 
Example #13
Source File: MlpNetworkTrainer.java    From developerWorks with Apache License 2.0 6 votes vote down vote up
/**
 * Train the specified MLP network using the specified training data, store metrics in the
 * specified metrics object.
 * 
 * @param trainingData
 *          The data used to train the network.
 * @param network
 *          The MLP network to be trained.
 * @param metrics
 *          The {@link NetworkMetrics} object where metrics info is stored.
 */
private void trainNetwork(DataSet trainingData, MultiLayerPerceptron network) {
  //
  // Shuffle the training data. Adds an element of randomness to the data.
  trainingData.shuffle();
  //
  // Now learn, you!
  network.learn(trainingData);
  //
  // Learning complete. Set metrics.
  NetworkMetrics metrics = networkMetricsCache.get(network);
  metrics.setIterationLearnTime(System.currentTimeMillis() - metrics.getLearnStartTime());
  metrics.setTotalLearnTime(metrics.getTotalLearnTime() + metrics.getIterationLearnTime());
  metrics.setNumberOfAsymmetricWinsThisIteration(0);
  metrics.setNumberOfSymmetricWinsThisIteration(0);
  metrics.setNumberOfGamesThisIteration(0);
}
 
Example #14
Source File: PerceptronSample.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
 * Runs this sample
 */
public static void main(String args[]) {

        // create training set (logical AND function)
        DataSet trainingSet = new DataSet(2, 1);
        trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0}));
        trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{0}));
        trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{0}));
        trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{1}));

        // create perceptron neural network
        NeuralNetwork myPerceptron = new Perceptron(2, 1);
        // learn the training set
        myPerceptron.learn(trainingSet);
        // test perceptron
        System.out.println("Testing trained perceptron");
        testNeuralNetwork(myPerceptron, trainingSet);
        // save trained perceptron
        myPerceptron.save("mySamplePerceptron.nnet");
        // load saved neural network
        NeuralNetwork loadedPerceptron = NeuralNetwork.load("mySamplePerceptron.nnet");
        // test loaded neural network
        System.out.println("Testing loaded perceptron");
        testNeuralNetwork(loadedPerceptron, trainingSet);
}
 
Example #15
Source File: ZeroMeanNormalizerTest.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
@Ignore
public void testTwoRowsWithOneElementNormalization() {
	double[] inputRow1 = new double[] { 2 };
	double[] inputRow2 = new double[] { 4 };
	DataSetRow row1 = createDataRow(inputRow1);
	DataSetRow row2 = createDataRow(inputRow2);
	DataSet dataSet = createDataSetFromRows(row1, row2);
	normalizer.normalize(dataSet);

               DataSetStatistics stats = new DataSetStatistics(dataSet);
               
	double[] columnMeans = stats.inputsMean();
	for (int i = 0; i < dataSet.getInputSize(); i++) {
		double currentColumnMean = columnMeans[i];
		assertEquals(0.0, currentColumnMean, 0.01);
	}
}
 
Example #16
Source File: JMLNeurophSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
    try {
        //create jml dataset
        Dataset jmlDataset = FileHandler.loadDataset(new File("datasets/iris.data"), 4, ",");

        // normalize dataset
        NormalizeMidrange nmr=new NormalizeMidrange(0,1);
        nmr.build(jmlDataset);         
        nmr.filter(jmlDataset);
        
        //print data as read from file
        System.out.println(jmlDataset);

        //convert jml dataset to neuroph
        DataSet neurophDataset = JMLDataSetConverter.convertJMLToNeurophDataset(jmlDataset, 4, 3);
        
        //convert neuroph dataset to jml
        Dataset jml = JMLDataSetConverter.convertNeurophToJMLDataset(neurophDataset);

        //print out both to compare them
        System.out.println("Java-ML data set read from file");
        printDataset(jmlDataset);
        System.out.println("Neuroph data set converted from Java-ML data set");
        printDataset(neurophDataset);
        System.out.println("Java-ML data set reconverted from Neuroph data set");
        printDataset(jml);

        System.out.println("JMLNeuroph classifier test");
        //test NeurophJMLClassifier
        testJMLNeurophClassifier(jmlDataset);

    } catch (Exception ex) {
        Logger.getLogger(JMLNeurophSample.class.getName()).log(Level.SEVERE, null, ex);
    }

}
 
Example #17
Source File: ForestFiresSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) {

        for (DataSetRow testSetRow : testSet.getRows()) {
            neuralNet.setInput(testSetRow.getInput());
            neuralNet.calculate();
            double[] networkOutput = neuralNet.getOutput();

            System.out.print("Input: " + Arrays.toString(testSetRow.getInput()));
            System.out.println(" Output: " + Arrays.toString(networkOutput));
        }
    }
 
Example #18
Source File: StatisticsTest.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Test
public void testMinOneRowOneColumn() {
    double value = 3;
    DataSet dataSet = createDataSetWithOneRowAndOneColumn(value);

    DataSetStatistics stats = new DataSetStatistics(dataSet);
    double[] maxByColumns = stats.inputsMin();
    assertEquals(value, maxByColumns[0], LARGEST_MIN_MAX_ERROR);
}
 
Example #19
Source File: IterativeLearning.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public final void learn(DataSet trainingSet) {
    setTrainingSet(trainingSet); // set this field here su subclasses can access it 
    onStart();

    while (!isStopped()) {
        beforeEpoch();
        doLearningEpoch(trainingSet);
        this.currentIteration++;
        afterEpoch();

        // now check if stop condition is satisfied
        if (hasReachedStopCondition()) {
            stopLearning();
        } else if (!iterationsLimited && (currentIteration == Integer.MAX_VALUE)) {
            // if counter has reached max value and iteration number is not limited restart iteration counter
            this.currentIteration = 1;
        }

        // notify listeners that epoch has ended
        fireLearningEvent(new LearningEvent(this, LearningEvent.Type.EPOCH_ENDED));

        // Thread safe pause when learning is paused
        if (this.pausedLearning) {
            synchronized (this) {
                while (this.pausedLearning) {
                    try {
                        this.wait();
                    } catch (Exception e) {
                    }
                }
            }
        }

    }
    onStop();
    fireLearningEvent(new LearningEvent(this, LearningEvent.Type.LEARNING_STOPPED));
}
 
Example #20
Source File: UnsupervisedLearning.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * This method does one learning epoch for the unsupervised learning rules.
 * It iterates through the training set and trains network weights for each
 * element
 * 
 * @param trainingSet
 *            training set for training network
 */
        @Override
public void doLearningEpoch(DataSet trainingSet) {
	Iterator<DataSetRow> iterator = trainingSet.iterator();
	while (iterator.hasNext() && !isStopped()) {
		DataSetRow trainingSetRow = iterator.next();
		learnPattern(trainingSetRow);
	}
}
 
Example #21
Source File: BackPropagationTest.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() {
    instance = new BackPropagation();
    xorDataSet = new DataSet(2, 1);
    xorDataSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0}));
    xorDataSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1}));
    xorDataSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1}));
    xorDataSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0}));
    maxError = 0.01;
    instance.setLearningRate(0.5);
    instance.setMaxError(maxError);
    String inputFileName = "src/test/resources/iris_normalized.txt";
    irisDataSet = DataSet.createFromFile(inputFileName, 4, 3, ",", false);

}
 
Example #22
Source File: PerceptronLearningTest.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() {
    instance = new PerceptronLearning();
    dataSet = new DataSet(2, 1);
    dataSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0}));
    dataSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1}));
    dataSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1}));
    dataSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0}));

    maxError = 0.4;
    instance.setMaxError(maxError);
}
 
Example #23
Source File: BinaryDeltaRuleTest.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() {
    instance = new BinaryDeltaRule();
    dataSet = new DataSet(2, 1);
    dataSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0}));
    dataSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1}));
    dataSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1}));
    dataSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0}));

    maxError = 0.4;
    instance.setMaxError(maxError);
}
 
Example #24
Source File: BrestCancerSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) {

        System.out.println("**************************************************");
        System.out.println("**********************RESULT**********************");
        System.out.println("**************************************************");
        for (DataSetRow testSetRow : testSet.getRows()) {
            neuralNet.setInput(testSetRow.getInput());
            neuralNet.calculate();

            //Finding network output
            double[] networkOutput = neuralNet.getOutput();
            int predicted = maxOutput(networkOutput);

            //Finding actual output
            double[] networkDesiredOutput = testSetRow.getDesiredOutput();
            int ideal = maxOutput(networkDesiredOutput);

            //Colecting data for network evaluation
            keepScore(predicted, ideal);
        }

        System.out.println("Total cases: " + this.count[2] + ". ");
        System.out.println("Correctly predicted cases: " + this.correct[2] + ". ");
        System.out.println("Incorrectly predicted cases: " + (this.count[2] - this.correct[2] - unpredicted) + ". ");
        System.out.println("Unrecognized cases: " + unpredicted + ". ");
        double percentTotal = (double) this.correct[2] * 100 / (double) this.count[2];
        System.out.println("Predicted correctly: " + formatDecimalNumber(percentTotal) + "%. ");

        double percentM = (double) this.correct[0] * 100.0 / (double) this.count[0];
        System.out.println("Prediction for 'M (malignant)' => (Correct/total): "
                + this.correct[0] + "/" + count[0] + "(" + formatDecimalNumber(percentM) + "%). ");

        double percentB = (double) this.correct[1] * 100.0 / (double) this.count[1];
        System.out.println("Prediction for 'B (benign)' => (Correct/total): "
                + this.correct[1] + "/" + count[1] + "(" + formatDecimalNumber(percentB) + "%). ");
    }
 
Example #25
Source File: PimaIndiansDiabetes.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) {

        System.out.println("Showing inputs, desired output and neural network output for every row in test set.");

        for (DataSetRow testSetRow : testSet.getRows()) {
            neuralNet.setInput(testSetRow.getInput());
            neuralNet.calculate();
            double[] networkOutput = neuralNet.getOutput();

            System.out.println("Input: " + Arrays.toString(testSetRow.getInput()));
            System.out.println("Output: " + Arrays.toString(networkOutput));
            System.out.println("Desired output" + Arrays.toString(testSetRow.getDesiredOutput()));
        }
    }
 
Example #26
Source File: JMLNeurophSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Prints Neuroph data set
 *
 * @param neurophDataset Dataset Neuroph data set
 */
public static void printDataset(DataSet neurophDataset) {
    System.out.println("Neuroph dataset");
    Iterator iterator = neurophDataset.iterator();

    while (iterator.hasNext()) {
        DataSetRow row = (DataSetRow) iterator.next();
        System.out.println("inputs");
        System.out.println(Arrays.toString(row.getInput()));
        if (row.getDesiredOutput().length > 0) {
            System.out.println("outputs");
            System.out.println(Arrays.toString(row.getDesiredOutput()));
        }
    }
}
 
Example #27
Source File: Abalone.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) {

        System.out.println("Showing inputs, desired output and neural network output for every row in test set.");

        for (DataSetRow testSetRow : testSet.getRows()) {
            neuralNet.setInput(testSetRow.getInput());
            neuralNet.calculate();
            double[] networkOutput = neuralNet.getOutput();

            System.out.println("Input: " + Arrays.toString(testSetRow.getInput()));
            System.out.println("Output: " + Arrays.toString(networkOutput));
            System.out.println("Desired output" + Arrays.toString(testSetRow.getDesiredOutput()));
        }
    }
 
Example #28
Source File: IonosphereSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) {

        System.out.println("**************************************************");
        System.out.println("**********************RESULT**********************");
        System.out.println("**************************************************");
        for (DataSetRow testSetRow : testSet.getRows()) {
            neuralNet.setInput(testSetRow.getInput());
            neuralNet.calculate();

            //Finding network output
            double[] networkOutput = neuralNet.getOutput();
            int predicted = maxOutput(networkOutput);

            //Finding actual output
            double[] networkDesiredOutput = testSetRow.getDesiredOutput();
            int ideal = maxOutput(networkDesiredOutput);

            //Colecting data for network evaluation
            keepScore(predicted, ideal);
        }

        System.out.println("Total cases: " + this.count[2] + ". ");
        System.out.println("Correctly predicted cases: " + this.correct[2] + ". ");
        System.out.println("Incorrectly predicted cases: " + (this.count[2] - this.correct[2] - unpredicted) + ". ");
        System.out.println("Unrecognized cases: " + unpredicted + ". ");
        double percentTotal = (double) this.correct[2] * 100 / (double) this.count[2];
        System.out.println("Predicted correctly: " + formatDecimalNumber(percentTotal) + "%. ");

        double percentM = (double) this.correct[0] * 100.0 / (double) this.count[0];
        System.out.println("Prediction for 'Good' => (Correct/total): "
                + this.correct[0] + "/" + count[0] + "(" + formatDecimalNumber(percentM) + "%). ");

        double percentB = (double) this.correct[1] * 100.0 / (double) this.count[1];
        System.out.println("Prediction for 'Bad' => (Correct/total): "
                + this.correct[1] + "/" + count[1] + "(" + formatDecimalNumber(percentB) + "%). ");
        this.count = new int[3];
        this.correct = new int[3];
        unpredicted = 0;
    }
 
Example #29
Source File: ZeroMeanNormalizer.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void normalize(DataSet dataSet) {

    for (DataSetRow row : dataSet.getRows()) {
        double[] normalizedInput = row.getInput();

        for (int i = 0; i < dataSet.getInputSize(); i++) {
            normalizedInput[i] = (normalizedInput[i] - meanInput[i]) / stdInput[i];
        }
        row.setInput(normalizedInput);
    }
}
 
Example #30
Source File: RunExampleEvaluation.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * @param args the command line arguments
 */
public static void main(String[] args) {
    NeuralNetwork nnet = NeuralNetwork.createFromFile("irisNet.nnet");
    DataSet dataSet =  DataSet.createFromFile("data_sets/iris_data_normalised.txt", 4, 3, ",");
    
    Evaluation.runFullEvaluation(nnet, dataSet);
     
}