org.neuroph.nnet.learning.BackPropagation Java Examples

The following examples show how to use org.neuroph.nnet.learning.BackPropagation. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: NeurophXOR.java    From tutorials with MIT License 6 votes vote down vote up
public static NeuralNetwork trainNeuralNetwork(NeuralNetwork ann) {
    int inputSize = 2;
    int outputSize = 1;
    DataSet ds = new DataSet(inputSize, outputSize);

    DataSetRow rOne = new DataSetRow(new double[] { 0, 1 }, new double[] { 1 });
    ds.addRow(rOne);
    DataSetRow rTwo = new DataSetRow(new double[] { 1, 1 }, new double[] { 0 });
    ds.addRow(rTwo);
    DataSetRow rThree = new DataSetRow(new double[] { 0, 0 }, new double[] { 0 });
    ds.addRow(rThree);
    DataSetRow rFour = new DataSetRow(new double[] { 1, 0 }, new double[] { 1 });
    ds.addRow(rFour);

    BackPropagation backPropagation = new BackPropagation();
    backPropagation.setMaxIterations(1000);

    ann.learn(ds, backPropagation);
    return ann;
}
 
Example #2
Source File: NeuralNetworkFactory.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
 * Creates and returns a new instance of Multi Layer Perceptron
 * @param layersStr space separated number of neurons in layers
 * @param transferFunctionType transfer function type for neurons
 * @return instance of Multi Layer Perceptron
 */
public static MultiLayerPerceptron createMLPerceptron(String layersStr, TransferFunctionType transferFunctionType, Class learningRule,  boolean useBias, boolean connectIO) {
	ArrayList<Integer> layerSizes = VectorParser.parseInteger(layersStr);
               NeuronProperties neuronProperties = new NeuronProperties(transferFunctionType, useBias);
	MultiLayerPerceptron nnet = new MultiLayerPerceptron(layerSizes, neuronProperties);
               
               // set learning rule - TODO: use reflection here
               if (learningRule.getName().equals(BackPropagation.class.getName()))  {
                   nnet.setLearningRule(new BackPropagation());
               } else if (learningRule.getName().equals(MomentumBackpropagation.class.getName())) {
                   nnet.setLearningRule(new MomentumBackpropagation());
               } else if (learningRule.getName().equals(DynamicBackPropagation.class.getName())) {
                   nnet.setLearningRule(new DynamicBackPropagation());
               } else if (learningRule.getName().equals(ResilientPropagation.class.getName())) {
                   nnet.setLearningRule(new ResilientPropagation());
               } 

               // connect io
               if (connectIO) {
                   nnet.connectInputsToOutputs();
               }

	return nnet;
}
 
Example #3
Source File: NetworkUtils.java    From developerWorks with Apache License 2.0 6 votes vote down vote up
/**
 * Returns a NxNxNxN style string showing the layer structure
 * of the specified network.
 * 
 * @param network
 * @return
 */
public static String getNetworkStructure(NeuralNetwork<BackPropagation> network) {
  StringBuilder sb = new StringBuilder();
  //
  // First the inputs
  if (network != null) {
    sb.append(network.getInputsCount());
    //
    // Now for the hidden layers
    for (Layer layer : network.getLayers()) {
      sb.append("x");
      sb.append(layer.getNeuronsCount());
    }
    //
    // Finally, the outputs
    sb.append("x");
    sb.append(network.getOutputsCount());
  }
  return sb.toString();
}
 
Example #4
Source File: MNISTExample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
        public void handleLearningEvent(LearningEvent event) {
            BackPropagation bp = (BackPropagation) event.getSource();
            LOG.info("Current iteration: " + bp.getCurrentIteration());
            LOG.info("Error: " + bp.getTotalNetworkError());
            LOG.info("Calculation time: " + (System.currentTimeMillis() - start) / 1000.0);
         //   neuralNetwork.save(bp.getCurrentIteration() + "CNN_MNIST" + bp.getCurrentIteration() + ".nnet");
            start = System.currentTimeMillis();
//            NeuralNetworkEvaluationService.completeEvaluation(neuralNetwork, testSet);
        }
 
Example #5
Source File: NetworkUtils.java    From developerWorks with Apache License 2.0 5 votes vote down vote up
/**
 * Runs the specified network using the Neuroph API.
 * 
 * @param network
 * @param input
 * @return
 */
public static <T extends NeuralNetwork<BackPropagation>> double[] runNetwork(T network, double[] input) {
  double[] ret;
  network.setInput(input);
  network.calculate();
  // Return value is the network's output
  ret = network.getOutput();
  if (log.isTraceEnabled()) {
    log.trace("Input : " + Arrays.toString(input));
    log.trace("Output: " + Arrays.toString(ret));
  }
  return ret;
}
 
Example #6
Source File: MlpNetworkTrainer.java    From developerWorks with Apache License 2.0 5 votes vote down vote up
/**
 * Randomizes the network weights based on settings in {@link NetworkProperties}, rather than
 * letting Neuroph do it for us.
 * 
 * @param network
 *          The network (must support BackPropagation learning rule) for which its weights
 *          are to be randomized.
 */
private void randomizeNetworkWeights(NeuralNetwork<BackPropagation> network) {
  Random randWeight = new Random();
  double minWeight =
      randWeight.nextDouble() * (NetworkProperties.getMinWeight() - NetworkProperties.getMinMinWeight())
          + NetworkProperties.getMinMinWeight();
  double maxWeight =
      randWeight.nextDouble() * (NetworkProperties.getMaxWeight() - NetworkProperties.getMinMaxWeight())
          + NetworkProperties.getMinMaxWeight();
  log.info("Randomizing weights: min=" + minWeight + ", max=" + maxWeight);
  network.randomizeWeights(minWeight, maxWeight);
}
 
Example #7
Source File: Cifar10Example.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    LOG.info("Epoch no#: [{}]. Error [{}]", bp.getCurrentIteration(), bp.getTotalNetworkError());
    LOG.info("Epoch execution time: {} sec", (System.currentTimeMillis() - start) / 1000.0);
   // neuralNetwork.save(bp.getCurrentIteration() + "_MNIST_CNN-MIC.nnet");

    start = System.currentTimeMillis();
  //  if (bp.getCurrentIteration() % 5 == 0)
  //      Evaluation.runFullEvaluation(neuralNetwork, testSet);
}
 
Example #8
Source File: DigitsRecognition.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public static void main(String args[]) {

        //create training set from Data.DIGITS
        DataSet dataSet = generateTrainingSet();

        int inputCount = DigitData.CHAR_HEIGHT * DigitData.CHAR_WIDTH;
        int outputCount = DigitData.DIGITS.length;
        int hiddenNeurons = 19;

        //create neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputCount, hiddenNeurons, outputCount);
        //get backpropagation learning rule from network
        BackPropagation learningRule = neuralNet.getLearningRule();

        learningRule.setLearningRate(0.5);
        learningRule.setMaxError(0.001);
        learningRule.setMaxIterations(5000);

        //add learning listener in order to print out training info
        learningRule.addListener(new LearningEventListener() {
            @Override
            public void handleLearningEvent(LearningEvent event) {
                BackPropagation bp = (BackPropagation) event.getSource();
                if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
                    System.out.println();
                    System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations");
                    System.out.println("With total error " + bp.getTotalNetworkError() + '\n');
                } else {
                    System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
                }
            }
        });

        //train neural network
        neuralNet.learn(dataSet);

        //train the network with training set
        testNeuralNetwork(neuralNet, dataSet);

    }
 
Example #9
Source File: IonosphereSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example #10
Source File: IonosphereSample2.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example #11
Source File: DiabetesSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example #12
Source File: CnnMNIST.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    LOG.info("Epoch no#: [{}]. Error [{}]", bp.getCurrentIteration(), bp.getTotalNetworkError());
    LOG.info("Epoch execution time: {} sec", (System.currentTimeMillis() - start) / 1000.0);
   // neuralNetwork.save(bp.getCurrentIteration() + "_MNIST_CNN-MIC.nnet");

    start = System.currentTimeMillis();
  //  if (bp.getCurrentIteration() % 5 == 0)
  //      Evaluation.runFullEvaluation(neuralNetwork, testSet);
}
 
Example #13
Source File: GermanCreditDataSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example #14
Source File: IrisOptimization.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
    String inputFileName = "/iris_data.txt";

    DataSet irisDataSet = DataSet.createFromFile(inputFileName, 4, 3, ",", false);
    BackPropagation learningRule = createLearningRule();

    NeuralNetwork neuralNet = new MultilayerPerceptronOptimazer<>()
            .withLearningRule(learningRule)
            .createOptimalModel(irisDataSet);

    neuralNet.learn(irisDataSet);
    Evaluation.runFullEvaluation(neuralNet, irisDataSet);

}
 
Example #15
Source File: BrestCancerSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example #16
Source File: BackpropagationTraining.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Method that set up learning rule with given settings, learns dataset and
 * creates statistics from results of the test
 */
@Override
public void testNeuralNet() {
    BackPropagation bp = (BackPropagation) this.setParameters();
    this.getNeuralNet().setLearningRule(bp);
    this.getNeuralNet().learn(this.getDataset());
    this.getStats().addData(new TrainingResult(bp.getCurrentIteration(), bp.getTotalNetworkError(), createMatrix()));
    this.getStats().calculateParameters();

}
 
Example #17
Source File: BackpropagationTraining.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Create instance of learning rule and setup given parameters
 * @return returns learning rule with predefined parameters
 */
@Override
public LearningRule setParameters() {
    BackPropagation bp = new BackPropagation();
    bp.setLearningRate(getSettings().getLearningRate());
    bp.setMaxError(getSettings().getMaxError());
    bp.setBatchMode(getSettings().isBatchMode());
    bp.setMaxIterations(getSettings().getMaxIterations());
    return bp;
}
 
Example #18
Source File: BreastCancerSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example #19
Source File: IrisClassificationSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 *  Runs this sample
 */
public static void main(String[] args) {    
    // get the path to file with data
    String inputFileName = "data_sets/iris_data_normalised.txt";
    
    // create MultiLayerPerceptron neural network
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(4, 16, 3);
    // create training set from file
    DataSet irisDataSet = DataSet.createFromFile(inputFileName, 4, 3, ",");

    // attach learningn listener to print out info about error at each iteration
    neuralNet.getLearningRule().addListener((event)->{
        BackPropagation bp = (BackPropagation) event.getSource();
        System.out.println("Current iteration: " + bp.getCurrentIteration());
        System.out.println("Error: " + bp.getTotalNetworkError());        
    });
    
    neuralNet.getLearningRule().setLearningRate(0.5);
    neuralNet.getLearningRule().setMaxError(0.01);
    neuralNet.getLearningRule().setMaxIterations(30000);

    // train the network with training set
    neuralNet.learn(irisDataSet);

    neuralNet.save("irisNet.nnet");
    
    System.out.println("Done training.");
    System.out.println("Testing network...");
}
 
Example #20
Source File: TrainNetwork.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());

    }
}
 
Example #21
Source File: PredictingTheReligionSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
}
 
Example #22
Source File: ForestFiresSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
}
 
Example #23
Source File: ConceptLearningAndClassificationSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
}
 
Example #24
Source File: AnimalsClassificationSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
}
 
Example #25
Source File: ShuttleLandingControlSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
}
 
Example #26
Source File: GlassIdentificationSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
}
 
Example #27
Source File: CarEvaluationSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
}
 
Example #28
Source File: LensesClassificationSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
}
 
Example #29
Source File: AutoTrainer.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
     *
     * You can get results calling getResults() method.
     *
     * @param neuralNetwork type of neural net
     * @param dataSet
     */
    public void train(DataSet dataSet) {// mozda da se vrati Training setting koji je najbolje resenje za dati dataset.??
        generateTrainingSettings();
        List<TrainingResult> statResults = null;
        DataSet trainingSet, testSet; // validationSet;

        if (splitTrainTest) {
            DataSet[] dataSplit = dataSet.split(splitPercentage, 100-splitPercentage); //opet ne radi Maven za neuroph 2.92
            trainingSet = dataSplit[0];
            testSet = dataSplit[1];
        } else {
            trainingSet = dataSet;
            testSet = dataSet;
        }

        if (generateStatistics) {
            statResults = new ArrayList<>();
        }

        int trainingNo = 0;
        for (TrainingSettings trainingSetting : trainingSettingsList) {
            System.out.println("-----------------------------------------------------------------------------------");
            trainingNo++;
            System.out.println("##TRAINING: " + trainingNo);
            trainingSetting.setTrainingSet(splitPercentage);
            trainingSetting.setTestSet(100 - splitPercentage);
            //int subtrainNo = 0;

            for (int subtrainNo = 1; subtrainNo <= repeat; subtrainNo++) {
                System.out.println("#SubTraining: " + subtrainNo);

                MultiLayerPerceptron neuralNet
                        = new MultiLayerPerceptron(dataSet.getInputSize(), trainingSetting.getHiddenNeurons(), dataSet.getOutputSize());

                BackPropagation bp = neuralNet.getLearningRule();

                bp.setLearningRate(trainingSetting.getLearningRate());
                bp.setMaxError(trainingSetting.getMaxError());
                bp.setMaxIterations(trainingSetting.getMaxIterations());

                neuralNet.learn(trainingSet);
//                  testNeuralNetwork(neuralNet, testSet); // not implemented
                ConfusionMatrix cm = new ConfusionMatrix(new String[]{""});
                TrainingResult result = new TrainingResult(trainingSetting, bp.getTotalNetworkError(), bp.getCurrentIteration(),cm);
                System.out.println(subtrainNo + ") iterations: " + bp.getCurrentIteration());

                if (generateStatistics) {
                    statResults.add(result);
                } else {
                    results.add(result);
                }

            }

            if (generateStatistics) {
                TrainingResult trainingStats = calculateTrainingStatistics(trainingSetting, statResults);
                results.add(trainingStats);
                statResults.clear();
            }

        }

    }
 
Example #30
Source File: SegmentChallengeSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {

        System.out.println("Creating training and test set from file...");
        String dataSetFile = "data_sets/segment challenge.txt";
        String testSetFileName = "data_sets/segment test.txt";
        int inputsCount = 19;
        int outputsCount = 7;

        //Create training data set from file
        DataSet trainingSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",");
        System.out.println("Training set size: " + trainingSet.getRows().size());
        trainingSet.shuffle();

        //Normalizing training data set
        Normalizer normalizer = new MaxNormalizer(trainingSet);
        normalizer.normalize(trainingSet);

        //Create test data set from file
        DataSet testSet = DataSet.createFromFile(testSetFileName, inputsCount, outputsCount, ",");
        System.out.println("Test set size: " + testSet.getRows().size());
        System.out.println("--------------------------------------------------");
        testSet.shuffle();

        //Normalizing training data set
        normalizer.normalize(testSet);

        System.out.println("Creating neural network...");
        //Create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 17, 10, outputsCount);
        //attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener((event) -> {
            BackPropagation bp = (BackPropagation) event.getSource();
            if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
                double error = bp.getTotalNetworkError();
                System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
                System.out.println("With total error: " + formatDecimalNumber(error));
            } else {
                System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
            }
        });

        learningRule.setLearningRate(0.01);
        learningRule.setMaxError(0.001);
        learningRule.setMaxIterations(12000);

        System.out.println("Training network...");
        //train the network with training set
        neuralNet.learn(trainingSet);

        System.out.println("Testing network...\n\n");
        testNeuralNetwork(neuralNet, testSet);

        System.out.println("Done.");
        System.out.println("**************************************************");
//        }
    }