Java Code Examples for org.neuroph.core.data.DataSet#getOutputSize()

The following examples show how to use org.neuroph.core.data.DataSet#getOutputSize() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ImageRecognitionHelper.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
        * Creates training set for the specified image labels and rgb data
        * @param imageLabels image labels
        * @param rgbDataMap map collection of rgb data
        * @return training set for the specified image data
        */
public static DataSet createRGBTrainingSet(List<String> imageLabels, Map<String, FractionRgbData> rgbDataMap) 	{
               int inputCount = rgbDataMap.values().iterator().next().getFlattenedRgbValues().length;
               int outputCount = imageLabels.size();
	DataSet trainingSet = new DataSet(inputCount, outputCount);

	for (Entry<String, FractionRgbData> entry : rgbDataMap.entrySet()) {
		double[] input = entry.getValue().getFlattenedRgbValues();
		double[] response = createResponse(entry.getKey(), imageLabels);
		trainingSet.add(new DataSetRow(input, response));
	}

               // set labels for output columns
               int inputSize = trainingSet.getInputSize();
               for (int c= 0; c<trainingSet.getOutputSize() ; c++) {
                   trainingSet.setColumnName(inputSize+c, imageLabels.get(c));
               }

               return trainingSet;
}
 
Example 2
Source File: ImageRecognitionHelper.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
        * Creates training set for the specified image labels and hsl data
        * @param imageLabels image labels
        * @param hslDataMap map colletction of hsl data
        * @return training set for the specified image data
        */
       public static DataSet createHSLTrainingSet(List<String> imageLabels, Map<String, FractionHSLData> hslDataMap) 	{
               int inputCount = hslDataMap.values().iterator().next().getFlattenedHSLValues().length;
               int outputCount = imageLabels.size();
	DataSet trainingSet = new DataSet(inputCount, outputCount);

	for (Entry<String, FractionHSLData> entry : hslDataMap.entrySet()) {
		double[] input = entry.getValue().getFlattenedHSLValues();
		double[] response = createResponse(entry.getKey(), imageLabels);
		trainingSet.add(new DataSetRow(input, response));
	}

               // set labels for output columns
               int inputSize = trainingSet.getInputSize();
               for (int c= 0; c<trainingSet.getOutputSize() ; c++) {
                   trainingSet.setColumnName(inputSize+c, imageLabels.get(c));
               }


               return trainingSet;
}
 
Example 3
Source File: ImageRecognitionHelper.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
        * Creates binary black and white training set for the specified image labels and rgb data
        * white = 0 black = 1
        * @param imageLabels image labels
        * @param rgbDataMap map collection of rgb data
        * @return binary black and white training set for the specified image data
        */
       public static DataSet createBlackAndWhiteTrainingSet(List<String> imageLabels, Map<String, FractionRgbData> rgbDataMap) throws VectorSizeMismatchException
{
           // TODO: Use some binarization image filter to do this; currently it works  with averaging RGB values
               int inputCount = rgbDataMap.values().iterator().next().getFlattenedRgbValues().length / 3;
               int outputCount = imageLabels.size();
	DataSet trainingSet = new DataSet(inputCount, outputCount);

	for (Entry<String, FractionRgbData> entry : rgbDataMap.entrySet()) {
		double[] inputRGB = entry.getValue().getFlattenedRgbValues();
                       double[] inputBW = FractionRgbData.convertRgbInputToBinaryBlackAndWhite(inputRGB);
                       double[] response = createResponse(entry.getKey(), imageLabels);
		trainingSet.add(new DataSetRow(inputBW, response));
	}

               // set labels for output columns
               int inputSize = trainingSet.getInputSize();
               for (int c= 0; c<trainingSet.getOutputSize() ; c++) {
                   trainingSet.setColumnName(inputSize+c, imageLabels.get(c));
               }

           return trainingSet;
}
 
Example 4
Source File: KFoldCrossValidation.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public ConfusionMatrix sumConfusionMatrix(List<ConfusionMatrix> cmList, DataSet dataSet) {
    ConfusionMatrix cm = new ConfusionMatrix(cmList.get(0).getClassLabels());
    int[][] ar = new int[dataSet.getOutputSize()][dataSet.getOutputSize()];
    for (ConfusionMatrix c : cmList) {
        for (int i = 0; i < dataSet.getOutputSize(); ++i) {
            for (int j = 0; j < dataSet.getOutputSize(); ++j) {
                ar[i][j] += c.get(i, j);
            }
        }
    }

    cm.setValues(ar);
    return cm;
}
 
Example 5
Source File: DataSetStatistics.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public DataSetStatistics(DataSet dataSet) {
    this.dataSet = dataSet;
    this.rowLength = dataSet.getInputSize() + dataSet.getOutputSize();
    this.rowsCount = dataSet.getRows().size();
    this.mean = new double[this.rowLength];
    this.max = new double[this.rowLength];
    this.min = new double[this.rowLength];
    this.sum = new double[this.rowLength];
    this.variance = new double[this.rowLength];
    this.stdDeviation = new double[this.rowLength];
    this.frequency = new double[this.rowLength];
    this.setDefaultValues();
}
 
Example 6
Source File: DecimalScaleNormalizer.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Finds max values for all columns in dataset (inputs and outputs)
 * Sets max column values to maxIn and maxOut fields
 * @param dataSet
 */
private void findMaxVectors(DataSet dataSet) {
    int inputSize = dataSet.getInputSize();
    int outputSize = dataSet.getOutputSize();

    // init with minimum values
    maxIn = new double[inputSize];
    for (int i = 0; i < inputSize; i++) {
        maxIn[i] = Double.MIN_VALUE;
    }

    maxOut = new double[outputSize];
    for (int i = 0; i < outputSize; i++) {
        maxOut[i] = Double.MIN_VALUE;
    }


    for (DataSetRow dataSetRow : dataSet.getRows()) {
        double[] input = dataSetRow.getInput();
        for (int i = 0; i < inputSize; i++) {
            if (input[i] > maxIn[i]) {
                maxIn[i] = input[i];
            }
        }

        double[] output = dataSetRow.getDesiredOutput();
        for (int i = 0; i < outputSize; i++) {
            if (output[i] > maxOut[i]) {
                maxOut[i] = output[i];
            }
        }

    }
}
 
Example 7
Source File: MaxNormalizer.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
* Finds max values for columns in input and output vector for given data set
* @param dataSet
*/
private void init(DataSet dataSet) {
    int inputSize = dataSet.getInputSize();
    int outputSize = dataSet.getOutputSize();

    maxIn = new double[inputSize];
    for(int i=0; i<inputSize; i++) {
        maxIn[i] = Double.MIN_VALUE;
    }

    maxOut = new double[outputSize];
    for(int i=0; i<outputSize; i++)
        maxOut[i] = Double.MIN_VALUE;

    for (DataSetRow dataSetRow : dataSet.getRows()) {
        double[] input = dataSetRow.getInput();
        for (int i = 0; i < inputSize; i++) {
            if (Math.abs(input[i]) > maxIn[i]) {
                maxIn[i] = Math.abs(input[i]);
            }
         }

        double[] output = dataSetRow.getDesiredOutput();
        for (int i = 0; i < outputSize; i++) {
            if (Math.abs(output[i]) > maxOut[i]) {
                maxOut[i] = Math.abs(output[i]);
            }
        }
    }
}
 
Example 8
Source File: SubSampling.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public DataSet[] sample(DataSet dataSet) {
    // if object was initializes by specifying numParts calculate subSetSizes so all subsets are equally sized
    if (subSetSizes == null) {
        final double singleSubSetSize = 1.0d / numSubSets;
        subSetSizes = new double[numSubSets];
        for (int i = 0; i < numSubSets; i++) {
            subSetSizes[i] = singleSubSetSize;
        }
    }

    // create list of data sets to return
    List<DataSet> subSets = new ArrayList<>();

    // shuffle dataset in order to randomize rows that will be used to fill subsets
    dataSet.shuffle();

    int idxCounter = 0; // index of main data set
    for (int subSetIdx = 0; subSetIdx < numSubSets; subSetIdx++) {
        // create new subset
        DataSet newSubSet = new DataSet(dataSet.getInputSize(), dataSet.getOutputSize());
        // cop column names if there are any
        newSubSet.setColumnNames(dataSet.getColumnNames());

        // fill subset with rows
        long subSetSize = Math.round(subSetSizes[subSetIdx] * dataSet.size()); // calculate size of the current subset
        for (int i = 0; i < subSetSize; i++) {
            if (idxCounter >= dataSet.size()) {
                break;
            }
            newSubSet.add(dataSet.getRowAt(idxCounter));
            idxCounter++;
        }

        // add current subset to list that will be returned
        subSets.add(newSubSet);
    }

    return subSets.toArray(new DataSet[numSubSets]);
}
 
Example 9
Source File: AbstractSampling.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
private DataSet createDataSetFold(DataSet dataSet) {

        DataSet foldSet = new DataSet(dataSet.getInputSize(), dataSet.getOutputSize());
        for (int j = 0; j < getSampleSize(); j++) {
            foldSet.add(getNextDataSetRow());
        }

        return foldSet;
    }
 
Example 10
Source File: AbstractTraining.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Create instance of training with new neural network
 *
 * @param dataset
 * @param settings
 */
public AbstractTraining(DataSet dataset, TrainingSettings settings) {
    this.dataset = dataset;
    this.settings = settings;
    this.stats = new TrainingStatistics();
    this.neuralNet = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, dataset.getInputSize(), settings.getHiddenNeurons(), dataset.getOutputSize());
}
 
Example 11
Source File: RangeNormalizer.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
 * Find min and max values for each position in vectors.
 *
 * @param dataSet
 */
private void findMaxAndMinVectors(DataSet dataSet) {
    int inputSize = dataSet.getInputSize();
    int outputSize = dataSet.getOutputSize();

    maxIn = new double[inputSize];
    minIn = new double[inputSize];

    for(int i=0; i<inputSize; i++) {
        maxIn[i] = Double.MIN_VALUE;
        minIn[i] = Double.MAX_VALUE;
    }

    maxOut = new double[outputSize];
    minOut = new double[outputSize];

    for(int i=0; i<outputSize; i++) {
        maxOut[i] = Double.MIN_VALUE;
        minOut[i] = Double.MAX_VALUE;
    }

    for (DataSetRow dataSetRow : dataSet.getRows()) {
        double[] input = dataSetRow.getInput();
        for (int i = 0; i < inputSize; i++) {
            if (input[i] > maxIn[i]) {
                maxIn[i] = input[i];
            }
            if (input[i] < minIn[i]) {
                minIn[i] = input[i];
            }
        }

        double[] output = dataSetRow.getDesiredOutput();
        for (int i = 0; i < outputSize; i++) {
            if (output[i] > maxOut[i]) {
                maxOut[i] = output[i];
            }
            if (output[i] < minOut[i]) {
                minOut[i] = output[i];
            }
        }

    }
}
 
Example 12
Source File: MaxMinNormalizer.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
* Initialize normalizer: finds min and max values for all the columns in the data set.
* 
* @param dataSet 
*/
private void init(DataSet dataSet) {
    int numInputs = dataSet.getInputSize();
    int numOutputs = dataSet.getOutputSize();

    maxIn = new double[numInputs];
    minIn = new double[numInputs];

    for (int i = 0; i < numInputs; i++) {
        maxIn[i] = Double.MIN_VALUE;
        minIn[i] = Double.MAX_VALUE;
    }

    maxOut = new double[numOutputs];
    minOut = new double[numOutputs];

    for (int i = 0; i < numOutputs; i++) {
        maxOut[i] = Double.MIN_VALUE;
        minOut[i] = Double.MAX_VALUE;
    }

    for (DataSetRow dataSetRow : dataSet.getRows()) {
        double[] input = dataSetRow.getInput();
        for (int i = 0; i < numInputs; i++) {
            if (Math.abs(input[i]) > maxIn[i]) {
                maxIn[i] = Math.abs(input[i]);
            }
            if (Math.abs(input[i]) < minIn[i]) {
                minIn[i] = Math.abs(input[i]);
            }
        }

        double[] output = dataSetRow.getDesiredOutput();
        for (int i = 0; i < numOutputs; i++) {
            if (Math.abs(output[i]) > maxOut[i]) {
                maxOut[i] = Math.abs(output[i]);
            }
            if (Math.abs(output[i]) < minOut[i]) {
                minOut[i] = Math.abs(output[i]);
            }
        }
    }
}
 
Example 13
Source File: AutoTrainer.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
     *
     * You can get results calling getResults() method.
     *
     * @param neuralNetwork type of neural net
     * @param dataSet
     */
    public void train(DataSet dataSet) {// mozda da se vrati Training setting koji je najbolje resenje za dati dataset.??
        generateTrainingSettings();
        List<TrainingResult> statResults = null;
        DataSet trainingSet, testSet; // validationSet;

        if (splitTrainTest) {
            DataSet[] dataSplit = dataSet.split(splitPercentage, 100-splitPercentage); //opet ne radi Maven za neuroph 2.92
            trainingSet = dataSplit[0];
            testSet = dataSplit[1];
        } else {
            trainingSet = dataSet;
            testSet = dataSet;
        }

        if (generateStatistics) {
            statResults = new ArrayList<>();
        }

        int trainingNo = 0;
        for (TrainingSettings trainingSetting : trainingSettingsList) {
            System.out.println("-----------------------------------------------------------------------------------");
            trainingNo++;
            System.out.println("##TRAINING: " + trainingNo);
            trainingSetting.setTrainingSet(splitPercentage);
            trainingSetting.setTestSet(100 - splitPercentage);
            //int subtrainNo = 0;

            for (int subtrainNo = 1; subtrainNo <= repeat; subtrainNo++) {
                System.out.println("#SubTraining: " + subtrainNo);

                MultiLayerPerceptron neuralNet
                        = new MultiLayerPerceptron(dataSet.getInputSize(), trainingSetting.getHiddenNeurons(), dataSet.getOutputSize());

                BackPropagation bp = neuralNet.getLearningRule();

                bp.setLearningRate(trainingSetting.getLearningRate());
                bp.setMaxError(trainingSetting.getMaxError());
                bp.setMaxIterations(trainingSetting.getMaxIterations());

                neuralNet.learn(trainingSet);
//                  testNeuralNetwork(neuralNet, testSet); // not implemented
                ConfusionMatrix cm = new ConfusionMatrix(new String[]{""});
                TrainingResult result = new TrainingResult(trainingSetting, bp.getTotalNetworkError(), bp.getCurrentIteration(),cm);
                System.out.println(subtrainNo + ") iterations: " + bp.getCurrentIteration());

                if (generateStatistics) {
                    statResults.add(result);
                } else {
                    results.add(result);
                }

            }

            if (generateStatistics) {
                TrainingResult trainingStats = calculateTrainingStatistics(trainingSetting, statResults);
                results.add(trainingStats);
                statResults.clear();
            }

        }

    }