org.neuroph.nnet.learning.MomentumBackpropagation Java Examples

The following examples show how to use org.neuroph.nnet.learning.MomentumBackpropagation. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: NeuralNetworkFactory.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
 * Creates and returns a new instance of Multi Layer Perceptron
 * @param layersStr space separated number of neurons in layers
 * @param transferFunctionType transfer function type for neurons
 * @return instance of Multi Layer Perceptron
 */
public static MultiLayerPerceptron createMLPerceptron(String layersStr, TransferFunctionType transferFunctionType, Class learningRule,  boolean useBias, boolean connectIO) {
	ArrayList<Integer> layerSizes = VectorParser.parseInteger(layersStr);
               NeuronProperties neuronProperties = new NeuronProperties(transferFunctionType, useBias);
	MultiLayerPerceptron nnet = new MultiLayerPerceptron(layerSizes, neuronProperties);
               
               // set learning rule - TODO: use reflection here
               if (learningRule.getName().equals(BackPropagation.class.getName()))  {
                   nnet.setLearningRule(new BackPropagation());
               } else if (learningRule.getName().equals(MomentumBackpropagation.class.getName())) {
                   nnet.setLearningRule(new MomentumBackpropagation());
               } else if (learningRule.getName().equals(DynamicBackPropagation.class.getName())) {
                   nnet.setLearningRule(new DynamicBackPropagation());
               } else if (learningRule.getName().equals(ResilientPropagation.class.getName())) {
                   nnet.setLearningRule(new ResilientPropagation());
               } 

               // connect io
               if (connectIO) {
                   nnet.connectInputsToOutputs();
               }

	return nnet;
}
 
Example #2
Source File: TestTimeSeries.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
public void train() {
    // get the path to file with data
    String inputFileName = "C:\\timeseries\\BSW15";
    
    // create MultiLayerPerceptron neural network
    neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, 5, 10, 1);
    MomentumBackpropagation learningRule = (MomentumBackpropagation)neuralNet.getLearningRule();
    learningRule.setLearningRate(0.2);
    learningRule.setMomentum(0.5);
    // learningRule.addObserver(this);
    learningRule.addListener(this);        
    
    // create training set from file
     trainingSet = DataSet.createFromFile(inputFileName, 5, 1, "\t", false);
    // train the network with training set
    neuralNet.learn(trainingSet);         
          
    System.out.println("Done training.");          
}
 
Example #3
Source File: MlpNetworkTrainer.java    From developerWorks with Apache License 2.0 6 votes vote down vote up
/**
 * Yet another method to log network info. I probably should combine all these at
 * some point.
 * 
 * @param metrics
 *          The ubiquitous {@link NetworkMetrics} object.
 * @param neuronProperties
 *          The Neuroph neuron properties metadata object
 * @param learningRule
 *          The learning rule in use
 */
private static void logNetworkInfo(NetworkMetrics metrics, NeuronProperties neuronProperties,
    MomentumBackpropagation learningRule) {
  StringBuilder sb;
  String useBias = neuronProperties.getProperty("useBias").toString();
  String learningRate = Double.toString(learningRule.getLearningRate());
  String maxError = Double.toString(learningRule.getMaxError());
  String momentum = Double.toString(learningRule.getMomentum());
  log.info("*** NETWORK INFO ***");

  sb = new StringBuilder();
  sb.append("Network Info:\n");
  sb.append("\tUse Bias            : " + useBias + "\n");
  sb.append("\tLearning Rate       : " + learningRate + "\n");
  sb.append("\tMax Error           : " + maxError + "\n");
  sb.append("\tMomentum            : " + momentum + "\n");
  sb.append("\tLayer Structure     : " + metrics.getLayerStructure() + "\n");
  sb.append("\tTotal Network Error : "
      + BigDecimal.valueOf(learningRule.getTotalNetworkError() * 100.0).setScale(2, RoundingMode.HALF_UP));
  log.info(sb.toString());

}
 
Example #4
Source File: SingleImageTrainer.java    From FakeImageDetection with GNU General Public License v3.0 5 votes vote down vote up
@Override
public void doRun() {
    HashMap<String, BufferedImage> imagesMap = new HashMap<String, BufferedImage>();
    String fileName = "";
    if (!isReal) {
        fileName = "real";
    } else {
        fileName = "faked";
    }

    System.out.println("Teaching as " + fileName);
    imagesMap.put(fileName, image);
    Map<String, FractionRgbData> imageRgbData = ImageUtilities.getFractionRgbDataForImages(imagesMap);
    DataSet learningData = ImageRecognitionHelper.createRGBTrainingSet(labels, imageRgbData);
    MomentumBackpropagation mBackpropagation = (MomentumBackpropagation) nnet.getLearningRule();
    mBackpropagation.setLearningRate(learningRate);
    mBackpropagation.setMaxError(maxError);
    mBackpropagation.setMomentum(momentum);

    System.out.println("Network Information\nLabel = " + nnet.getLabel()
            + "\n Input Neurons = " + nnet.getInputsCount()
            + "\n Number of layers = " + nnet.getLayersCount()
    );

    mBackpropagation.addListener(this);
    System.out.println("Starting training......");
    nnet.learn(learningData, mBackpropagation);

    //Mark nnet as dirty. Write on close
    isDirty = true;
}
 
Example #5
Source File: BreastCancerSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training and test set from file...");
        String dataSetFile = "data_sets/breast_cancer.txt";
        int numInputs = 30;
        int numOutputs = 1;

        //Create data set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, numInputs, numOutputs, ",");

        //Creatinig training set (70%) and test set (30%)
        DataSet[] trainTestSplit = dataSet.split(0.7, 0.3);
        DataSet trainingSet = trainTestSplit[0];
        DataSet testSet = trainTestSplit[1];

        //Normalizing data set
        Normalizer normalizer = new MaxNormalizer(trainingSet);
        normalizer.normalize(trainingSet);
        normalizer.normalize(testSet);

        //Create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(numInputs, 16, numOutputs);

        //attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        learningRule.setLearningRate(0.3);
        learningRule.setMaxError(0.01);
        learningRule.setMaxIterations(500);

        System.out.println("Training network...");
        //train the network with training set
        neuralNet.learn(trainingSet);

        System.out.println("Testing network...");
        testNeuralNetwork(neuralNet, testSet);

    }
 
Example #6
Source File: MyBenchmarkTask.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Benchmrk preparation consists of training set and neural networ creatiion.
 * This method generates training set with 100 rows, where every row has 10 input and 5 output elements
 * Neural network has two hiddden layers with 8 and 7 neurons, and runs learning rule for 2000 iterations
 */
@Override
public void prepareTest() {
    int trainingSetSize = 100;
    int inputSize = 10;
    int outputSize = 5;

    this.trainingSet = new DataSet(inputSize, outputSize);

    for (int i = 0; i < trainingSetSize; i++) {
        double[] input = new double[inputSize];
        for( int j=0; j<inputSize; j++)
            input[j] = Math.random();

        double[] output = new double[outputSize];
        for( int j=0; j<outputSize; j++)
            output[j] = Math.random();

        DataSetRow trainingSetRow = new DataSetRow(input, output);
        trainingSet.add(trainingSetRow);
    }


    network = new MultiLayerPerceptron(inputSize, 8, 7, outputSize);
    ((MomentumBackpropagation)network.getLearningRule()).setMaxIterations(2000);

}
 
Example #7
Source File: MomentumTraining.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Method that set up learning rule with given settings, learns dataset and
 * creates statistics from results of the test
 */

@Override
public void testNeuralNet() {
    MomentumBackpropagation mbp = (MomentumBackpropagation) setParameters();
    getNeuralNet().setLearningRule(mbp);
    getNeuralNet().learn(getDataset());
    this.getStats().addData(new TrainingResult(mbp.getCurrentIteration(), mbp.getTotalNetworkError(), createMatrix()));
    this.getStats().calculateParameters();

}
 
Example #8
Source File: DiabetesSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {
    String dataSetFile = "data_sets/diabetes.txt";
    int inputsCount = 8;
    int outputsCount = 1;

    // Create data set from file
    DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",");

    // Creatinig training set (70%) and test set (30%)
    DataSet[] trainTestSplit = dataSet.split(0.7, 0.3);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    // Normalizing training and test set
    Normalizer normalizer = new MaxNormalizer(trainingSet);
    normalizer.normalize(trainingSet);
    normalizer.normalize(testSet);

    System.out.println("Creating neural network...");
    //Create MultiLayerPerceptron neural network
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 20, 10, outputsCount);
    //attach listener to learning rule
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener(this);

    learningRule.setLearningRate(0.6);
    learningRule.setMaxError(0.07);
    learningRule.setMaxIterations(100000);

    System.out.println("Training network...");
    //train the network with training set
    neuralNet.learn(trainingSet);

    System.out.println("Testing network...");
    testNeuralNetwork(neuralNet, testSet);

}
 
Example #9
Source File: WineQuality.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() throws InterruptedException, ExecutionException {
    System.out.println("Creating training set...");
    // get path to training set
    String dataSetFile = "data_sets/wine.txt";
    int inputsCount = 11;
    int outputsCount = 10;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", true);
    Normalizer norm = new MaxNormalizer(dataSet);
    norm.normalize(dataSet);
    dataSet.shuffle();

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 20, 15, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();

    // set learning rate and max error
    learningRule.setLearningRate(0.1);
    learningRule.setMaxIterations(10);

    String classLabels[] = new String[]{"1", "2", "3", "4", "5", "6", "7", "8", "9", "10"};
    neuralNet.setOutputLabels(classLabels);
    KFoldCrossValidation crossVal = new KFoldCrossValidation(neuralNet, dataSet, 10);
    EvaluationResult totalResult= crossVal.run();
    List<FoldResult> cflist= crossVal.getResultsByFolds();
}
 
Example #10
Source File: MomentumTraining.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Create instance of learning rule and setup given parameters
 *
 * @return returns learning rule with predefined parameters
 */

@Override
public LearningRule setParameters() {
    MomentumBackpropagation mbp = new MomentumBackpropagation();
    mbp.setBatchMode(getSettings().isBatchMode());
    mbp.setLearningRate(getSettings().getLearningRate());
    mbp.setMaxError(getSettings().getMaxError());
    mbp.setMaxIterations(getSettings().getMaxIterations());
    mbp.setMomentum(getSettings().getMomentum());
    return mbp;
}
 
Example #11
Source File: TrainNetwork.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void createNeuralNetwork() {
    System.out.println("Creating neural network... ");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(config.getInputCount(), config.getFirstHiddenLayerCount(), config.getSecondHiddenLayerCount(), config.getOutputCount());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.setLearningRate(0.01);
    learningRule.setMaxError(0.1);
    learningRule.setMaxIterations(1000);
    System.out.println("Saving neural network to file... ");
    neuralNet.save(config.getTrainedNetworkFileName());
    System.out.println("Neural network successfully saved!");
}
 
Example #12
Source File: WheatSeeds.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() throws InterruptedException, ExecutionException {
    System.out.println("Creating training set...");
    // get path to training set
    String dataSetFile = "data_sets/seeds.txt";
    int inputsCount = 7;
    int outputsCount = 3;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t");
    dataSet.shuffle();

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 2, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();

    // set learning rate and max error
    learningRule.setLearningRate(0.1);
    learningRule.setMaxError(0.01);
    learningRule.setMaxIterations(1000);

    String[] classLabels = new String[]{"Cama", "Rosa", "Canadian"};
    neuralNet.setOutputLabels(classLabels);
    KFoldCrossValidation crossVal = new KFoldCrossValidation(neuralNet, dataSet, 10);
    EvaluationResult totalResult= crossVal.run();
     List<FoldResult> cflist= crossVal.getResultsByFolds();
}
 
Example #13
Source File: ConcreteStrenghtTestSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        String dataSetFile = "data_sets/concrete_strenght_test_data.txt";
        int inputsCount = 8;
        int outputsCount = 1;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",", false);
       
        
        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);
       
        
        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralConcreteStrenght.nnet");

        System.out.println("Done.");
    }
 
Example #14
Source File: BalanceScaleSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        String dataSetFile = "data_sets/balance_scale_data.txt";
        int inputsCount = 20;
        int outputsCount = 3;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false);
        
        
        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);

        
        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetBalanceScale.nnet");

        System.out.println("Done.");
    }
 
Example #15
Source File: WineClassificationSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        // get path to training set
        String dataSetFile = "data_sets/wine_classification_data.txt";
        int inputsCount = 13;
        int outputsCount = 3;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false);

        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);

        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetWineClassification.nnet");

        System.out.println("Done.");
    }
 
Example #16
Source File: PredictingPokerHandsSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        
        String dataSetFile = "data_sets/predicting_poker_hands_data.txt";
        int inputsCount = 85;
        int outputsCount = 9;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false);

        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 65, outputsCount);


        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetPokerHands.nnet");

        System.out.println("Done.");
    }
 
Example #17
Source File: PredictingPerformanceOfCPUSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        String dataSetFile = "data_sets/cpu_data.txt";
        int inputsCount = 7;
        int outputsCount = 1;

        // create training set from file
        DataSet dataSet = DataSets.readFromCsv(dataSetFile, inputsCount, outputsCount);
        // normalize dataset
        DataSets.normalizeMax(dataSet);

        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 16, outputsCount);

        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetCPU.nnet");

        System.out.println("Done.");
    }
 
Example #18
Source File: PredictingTheReligionSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        // get path to training set
        String dataSetFile = "data_sets/religion_data.txt";
        int inputsCount = 54;
        int outputsCount = 5;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false);
       
        
        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);
       
        
        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetReligion.nnet");

        System.out.println("Done.");
    }
 
Example #19
Source File: AnimalsClassificationSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        String dataSetFile = "data_sets/animals_data.txt";
        int inputsCount = 20;
        int outputsCount = 7;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", true);
        
        
        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);

        
        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetAnimals.nnet");

        System.out.println("Done.");
    }
 
Example #20
Source File: RectifierNeuralNetwork.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public RectifierNeuralNetwork(List<Integer> neuronsInLayers) {
	//this.setNetworkType(NeuralNetworkType.RECTIFIER);

	NeuronProperties inputNeuronProperties = new NeuronProperties(InputNeuron.class, Linear.class);
       Layer layer = LayerFactory.createLayer(neuronsInLayers.get(0), inputNeuronProperties);

       this.addLayer(layer);

       // create layers
       Layer prevLayer = layer;

       for (int layerIdx = 1; layerIdx < neuronsInLayers.size()-1; layerIdx++) {
           Integer neuronsNum = neuronsInLayers.get(layerIdx);
           layer = LayerFactory.createLayer(neuronsNum, RectifiedLinear.class);

           this.addLayer(layer);
           ConnectionFactory.fullConnect(prevLayer, layer);

           prevLayer = layer;
       }

       int numberOfOutputNeurons = neuronsInLayers.get(neuronsInLayers.size() - 1);
       Layer outputLayer = LayerFactory.createLayer(numberOfOutputNeurons, Sigmoid.class);
       this.addLayer(outputLayer);
       ConnectionFactory.fullConnect(prevLayer, outputLayer);

       NeuralNetworkFactory.setDefaultIO(this); // set input and output cells for network
       this.setLearningRule(new MomentumBackpropagation());
       this.randomizeWeights(new HeZhangRenSunUniformWeightsRandomizer());
}
 
Example #21
Source File: CarEvaluationSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        String dataSetFile = "data_sets/car_evaluation_data.txt";
        int inputsCount = 21;
        int outputsCount = 4;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false);
       
        
        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);
       
        
        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetCarEvaluation.nnet");

        System.out.println("Done.");
    }
 
Example #22
Source File: ImageRecognitionHelper.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
        * Creates and returns new neural network for image recognition.
 * Assumes that all of the FractionRgbData objects in the given map have identical
 * length arrays in them so that the input layer of the neural network can be
 * created here.
 *
        * @param label neural network label
        * @param samplingResolution sampling resolution (image size)
 * @param imageLabels image labels
        * @param layersNeuronsCount neuron counts in hidden layers
 * @param transferFunctionType type of transfer function to use for neurons in network
        * @param colorMode color mode
    * @return
 */
public static NeuralNetwork createNewNeuralNetwork(String label, Dimension samplingResolution, ColorMode colorMode, List<String> imageLabels,  List<Integer> layersNeuronsCount, TransferFunctionType transferFunctionType) {

               int numberOfInputNeurons;
               if ((colorMode == ColorMode.COLOR_RGB) || (colorMode == ColorMode.COLOR_HSL) ){ // for full color rgb or hsl
                   numberOfInputNeurons = 3 * samplingResolution.getWidth() * samplingResolution.getHeight();
               } else { // for black n white network
                   numberOfInputNeurons = samplingResolution.getWidth() * samplingResolution.getHeight();
               }

               int numberOfOuputNeurons = imageLabels.size();

	layersNeuronsCount.add(0, numberOfInputNeurons);
	layersNeuronsCount.add(numberOfOuputNeurons);

	System.out.println("Neuron layer size counts vector = " + layersNeuronsCount);

	NeuralNetwork neuralNetwork = new MultiLayerPerceptron(layersNeuronsCount, transferFunctionType);

	neuralNetwork.setLabel(label);
	PluginBase imageRecognitionPlugin = new ImageRecognitionPlugin(samplingResolution, colorMode);
	neuralNetwork.addPlugin(imageRecognitionPlugin);

	assignLabelsToOutputNeurons(neuralNetwork, imageLabels);
               neuralNetwork.setLearningRule(new MomentumBackpropagation());

           return neuralNetwork;
}
 
Example #23
Source File: RGBImageRecognitionTrainingSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws IOException {
    
    // path to image directory
    String imageDir ="/home/zoran/Downloads/MihailoHSLTest/trening";
    
    // image names - used for output neuron labels
    List<String> imageLabels = new ArrayList();
    imageLabels.add("bird");                                                   
    imageLabels.add("cat");
    imageLabels.add("dog");
            

    // create dataset
    Map<String,FractionRgbData> map = ImageRecognitionHelper.getFractionRgbDataForDirectory (new File(imageDir), new Dimension(20, 20));
    DataSet dataSet = ImageRecognitionHelper.createRGBTrainingSet(imageLabels, map);

    // create neural network
    List <Integer> hiddenLayers = new ArrayList<>();
    hiddenLayers.add(12);
    NeuralNetwork nnet = ImageRecognitionHelper.createNewNeuralNetwork("someNetworkName", new Dimension(20,20), ColorMode.COLOR_RGB, imageLabels, hiddenLayers, TransferFunctionType.SIGMOID);

    // set learning rule parameters
    MomentumBackpropagation mb = (MomentumBackpropagation)nnet.getLearningRule();
    mb.setLearningRate(0.2);
    mb.setMaxError(0.9);
    mb.setMomentum(1);
  
    // traiin network
    System.out.println("NNet start learning...");
    nnet.learn(dataSet);
    System.out.println("NNet learned");                
    
}
 
Example #24
Source File: HSLImageRecognitionTrainingSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public static void main (String [] args) throws IOException {
    
    // path to image directory
    String imageDir ="/home/zoran/Downloads/MihailoHSLTest/trening";
    
    // image names - used for output neuron labels
    List<String> imageLabels = new ArrayList();
    imageLabels.add("bird");                                                   
    imageLabels.add("cat");
    imageLabels.add("dog");
            

    // create dataset
    Map<String,FractionHSLData> map = ImageRecognitionHelper.getFractionHSLDataForDirectory (new File(imageDir), new Dimension(20, 20));
    DataSet dataSet = ImageRecognitionHelper.createHSLTrainingSet(imageLabels, map);

    // create neural network
    List <Integer> hiddenLayers = new ArrayList<>();
    hiddenLayers.add(12);
    NeuralNetwork nnet = ImageRecognitionHelper.createNewNeuralNetwork("someNetworkName", new Dimension(20,20), ColorMode.COLOR_HSL, imageLabels, hiddenLayers, TransferFunctionType.SIGMOID);

    // set learning rule parameters
    MomentumBackpropagation mb = (MomentumBackpropagation)nnet.getLearningRule();
    mb.setLearningRate(0.2);
    mb.setMaxError(0.9);
    mb.setMomentum(1);
  
    // traiin network
    System.out.println("NNet start learning...");
    nnet.learn(dataSet);
    System.out.println("NNet learned");        
}
 
Example #25
Source File: Model.java    From o2oa with GNU Affero General Public License v3.0 5 votes vote down vote up
public NeuralNetwork<MomentumBackpropagation> createNeuralNetwork(Integer inValueCount, Integer outValueCount,
		Integer hiddenLayerCount) {
	NeuronProperties inputNeuronProperties = new NeuronProperties(InputNeuron.class, Linear.class);
	NeuronProperties hiddenNeuronProperties = new NeuronProperties(InputOutputNeuron.class, WeightedSum.class,
			Sigmoid.class);
	NeuronProperties outputNeuronProperties = new NeuronProperties(InputOutputNeuron.class, WeightedSum.class,
			Sigmoid.class);
	NeuralNetwork<MomentumBackpropagation> neuralNetwork = new NeuralNetwork<>();
	neuralNetwork.setNetworkType(NeuralNetworkType.MULTI_LAYER_PERCEPTRON);
	Layer inputLayer = LayerFactory.createLayer(inValueCount, inputNeuronProperties);
	inputLayer.addNeuron(new BiasNeuron());
	neuralNetwork.addLayer(inputLayer);
	List<Integer> hiddenNeurons = this.hiddenNeurons(inValueCount, outValueCount, hiddenLayerCount);
	for (Integer count : hiddenNeurons) {
		Layer layer = LayerFactory.createLayer(count, hiddenNeuronProperties);
		layer.addNeuron(new BiasNeuron());
		neuralNetwork.addLayer(layer);
	}
	Layer outputLayer = LayerFactory.createLayer(outValueCount, outputNeuronProperties);
	neuralNetwork.addLayer(outputLayer);
	for (int i = 0; i < (neuralNetwork.getLayersCount() - 1); i++) {
		Layer prevLayer = neuralNetwork.getLayers().get(i);
		Layer nextLayer = neuralNetwork.getLayers().get(i + 1);
		ConnectionFactory.fullConnect(prevLayer, nextLayer);
	}
	neuralNetwork.setLearningRule(this.createMomentumBackpropagation(
			MapTools.getDouble(this.getPropertyMap(), PROPERTY_MLP_MAXERROR, DEFAULT_MLP_MAXERROR),
			MapTools.getInteger(this.getPropertyMap(), PROPERTY_MLP_MAXITERATION, DEFAULT_MLP_MAXITERATION),
			MapTools.getDouble(this.getPropertyMap(), PROPERTY_MLP_LEARNINGRATE, DEFAULT_MLP_LEARNINGRATE),
			MapTools.getDouble(this.getPropertyMap(), PROPERTY_MLP_MOMENTUM, DEFAULT_MLP_MOMENTUM)));
	NeuralNetworkFactory.setDefaultIO(neuralNetwork);
	neuralNetwork.randomizeWeights();
	return neuralNetwork;
}
 
Example #26
Source File: Model.java    From o2oa with GNU Affero General Public License v3.0 5 votes vote down vote up
private MomentumBackpropagation createMomentumBackpropagation(Double maxError, Integer maxIteration,
		Double learningRate, Double momentum) {
	MomentumBackpropagation momentumBackpropagation = new MomentumBackpropagation();
	momentumBackpropagation.setMaxError(maxError);
	momentumBackpropagation.setMaxIterations(maxIteration);
	momentumBackpropagation.setLearningRate(learningRate);
	momentumBackpropagation.setMomentum(momentum);
	return momentumBackpropagation;
}
 
Example #27
Source File: BatchImageTrainer.java    From FakeImageDetection with GNU General Public License v3.0 5 votes vote down vote up
@Override
public void doRun() {
    try {
        System.out.println("Starting training thread....." + sampleDimension.toString() + " and " + imageLabels.toString());

        HashMap<String, BufferedImage> imagesMap = new HashMap<String, BufferedImage>();
        for (File file : srcDirectory.listFiles()) {
            imageLabels.add(FilenameUtils.removeExtension(file.getName()));
            if (sampleDimension.getWidth() > 0 && sampleDimension.getHeight() > 0) {
                Double w = sampleDimension.getWidth();
                Double h = sampleDimension.getHeight();
                imagesMap.put(file.getName(), ImageUtilities.resizeImage(ImageUtilities.loadImage(file), w.intValue(), h.intValue()));
            }
        }
        Map<String, FractionRgbData> imageRgbData = ImageUtilities.getFractionRgbDataForImages(imagesMap);
        DataSet learningData = ImageRecognitionHelper.createRGBTrainingSet(imageLabels, imageRgbData);

        nnet = NeuralNetwork.load(new FileInputStream(nnFile)); //Load NNetwork
        MomentumBackpropagation mBackpropagation = (MomentumBackpropagation) nnet.getLearningRule();
        mBackpropagation.setLearningRate(learningRate);
        mBackpropagation.setMaxError(maxError);
        mBackpropagation.setMomentum(momentum);

        System.out.println("Network Information\nLabel = " + nnet.getLabel()
                + "\n Input Neurons = " + nnet.getInputsCount()
                + "\n Number of layers = " + nnet.getLayersCount()
        );

        mBackpropagation.addListener(this);
        System.out.println("Starting training......");
        nnet.learn(learningData, mBackpropagation);
        //Training Completed
        listener.batchImageTrainingCompleted();
    } catch (FileNotFoundException ex) {
        System.out.println(ex.getMessage() + "\n" + ex.getLocalizedMessage());
    }

}
 
Example #28
Source File: GlassIdentificationSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");

        String dataSetFile = "data_sets/glass_identification_data.txt";
        int inputsCount = 9;
        int outputsCount = 7;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false);
        //dataSet.normalize();
        
        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);
       
        
        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.1);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralGlassIdentification.nnet");

        System.out.println("Done.");
    }
 
Example #29
Source File: Ionosphere.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource();
    System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
}
 
Example #30
Source File: WheatSeeds.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource();
    System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
}