Java Code Examples for org.neuroph.nnet.MultiLayerPerceptron#getLearningRule()

The following examples show how to use org.neuroph.nnet.MultiLayerPerceptron#getLearningRule() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BalanceScaleSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        String dataSetFile = "data_sets/balance_scale_data.txt";
        int inputsCount = 20;
        int outputsCount = 3;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false);
        
        
        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);

        
        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetBalanceScale.nnet");

        System.out.println("Done.");
    }
 
Example 2
Source File: CarEvaluationSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        String dataSetFile = "data_sets/car_evaluation_data.txt";
        int inputsCount = 21;
        int outputsCount = 4;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false);
       
        
        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);
       
        
        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetCarEvaluation.nnet");

        System.out.println("Done.");
    }
 
Example 3
Source File: WineClassificationSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        // get path to training set
        String dataSetFile = "data_sets/wine_classification_data.txt";
        int inputsCount = 13;
        int outputsCount = 3;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false);

        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);

        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetWineClassification.nnet");

        System.out.println("Done.");
    }
 
Example 4
Source File: AnimalsClassificationSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        String dataSetFile = "data_sets/animals_data.txt";
        int inputsCount = 20;
        int outputsCount = 7;

        // create training set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", true);
        
        
        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount);

        
        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetAnimals.nnet");

        System.out.println("Done.");
    }
 
Example 5
Source File: XorResilientPropagationSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Runs this sample
 */
public  void run() {

    // create training set (logical XOR function)
    DataSet trainingSet = new DataSet(2, 1);
    trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0}));
    trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1}));
    trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1}));
    trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0}));

    // create multi layer perceptron
    MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1);
    // set ResilientPropagation learning rule
    myMlPerceptron.setLearningRule(new ResilientPropagation());
    LearningRule learningRule = myMlPerceptron.getLearningRule();
    learningRule.addListener(this);

    // learn the training set
    System.out.println("Training neural network...");
    myMlPerceptron.learn(trainingSet);

    int iterations = ((SupervisedLearning)myMlPerceptron.getLearningRule()).getCurrentIteration();
    System.out.println("Learned in "+iterations+" iterations");

    // test perceptron
    System.out.println("Testing trained neural network");
    testNeuralNetwork(myMlPerceptron, trainingSet);

}
 
Example 6
Source File: Ionosphere.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {
    System.out.println("Creating training set...");
    // get path to training set
    String trainingSetFileName = "data_sets/ionospheredata.txt";
    int inputsCount = 34;
    int outputsCount = 1;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",", false);

    // split data into training and test set
    DataSet[] trainTestSplit = dataSet.split(0.6, 0.4);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    // normalize data
    Normalizer norm = new MaxNormalizer(trainingSet);
    norm.normalize(trainingSet);
    norm.normalize(testSet);

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 30, 25, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener(this);

    // set learning rate and max error
    learningRule.setLearningRate(0.1);
    learningRule.setMaxError(0.01);
    System.out.println("Training network...");
    // train the network with training set
    neuralNet.learn(trainingSet);
    System.out.println("Training completed.");
    System.out.println("Testing network...");

    System.out.println("Network performance on the test set");
    evaluate(neuralNet, testSet);

    System.out.println("Saving network");
    // save neural network to file
    neuralNet.save("nn1.nnet");

    System.out.println("Done.");

    System.out.println();
    System.out.println("Network outputs for test set");
    testNeuralNetwork(neuralNet, testSet);
}
 
Example 7
Source File: BrestCancerSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {

        System.out.println("Creating training and test set from file...");
        String dataSetFile = "data_sets/breast cancer.txt";
        int inputsCount = 30;
        int outputsCount = 2; // use onlz one output - binarz classification, transform dat aset

        //Create data set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",");

        //Creatinig training set (70%) and test set (30%)
        DataSet[] trainTestSplit = dataSet.split(0.7, 0.3);
        DataSet trainingSet = trainTestSplit[0];
        DataSet testSet = trainTestSplit[1];

        //Normalizing data set
        Normalizer normalizer = new MaxNormalizer(trainingSet);
        normalizer.normalize(trainingSet);
        normalizer.normalize(testSet);

        System.out.println("Creating neural network...");
        //Create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 16, outputsCount);

        //attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        learningRule.setLearningRate(0.3);
        learningRule.setMaxError(0.001);
        learningRule.setMaxIterations(5000);

        System.out.println("Training network...");
        //train the network with training set
        neuralNet.learn(trainingSet);

        System.out.println("Testing network...\n\n");
        testNeuralNetwork(neuralNet, testSet);

        System.out.println("Done.");

        System.out.println("**************************************************");

    }
 
Example 8
Source File: GermanCreditDataSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {

        System.out.println("Creating training and test set from file...");
        String dataSetFile = "data_sets/german credit data.txt";
        int inputsCount = 24;
        int outputsCount = 2;

        //Create data set from file
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, " ");
        dataSet.shuffle();

        //Normalizing data set
        Normalizer normalizer = new MaxNormalizer(dataSet);
        normalizer.normalize(dataSet);

        //Creatinig training set (70%) and test set (30%)
        DataSet[] trainingAndTestSet = dataSet.createTrainingAndTestSubsets(70, 30);
        DataSet trainingSet = trainingAndTestSet[0];
        DataSet testSet = trainingAndTestSet[1];

        System.out.println("Creating neural network...");
        //Create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 12, 6, outputsCount);

        //attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        learningRule.setLearningRate(0.01);
        learningRule.setMaxError(0.001);
        learningRule.setMaxIterations(10000);

        System.out.println("Training network...");
        //train the network with training set
        neuralNet.learn(trainingSet);

        System.out.println("Testing network...\n\n");
        testNeuralNetwork(neuralNet, testSet);

        System.out.println("Done.");

        System.out.println("**************************************************");

    }
 
Example 9
Source File: Sonar.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {
    System.out.println("Creating training set...");
    // get path to training set
    String trainingSetFileName = "data_sets/sonardata.txt";
    int inputsCount = 60;
    int outputsCount = 1;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",", false);

    // split data into train and test set
    DataSet[] trainTestSplit = dataSet.split(0.6, 0.4);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    // normalize data using max normalization
    Normalizer norm = new MaxNormalizer(trainingSet);
    norm.normalize(trainingSet);
    norm.normalize(testSet);

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 10, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener(this);

    // set learning rate and max error
    learningRule.setLearningRate(0.1);
    learningRule.setMaxError(0.01);
    System.out.println("Training network...");
    // train the network with training set
    neuralNet.learn(trainingSet);
    System.out.println("Training completed.");
    System.out.println("Testing network...");

    System.out.println("Network performance on the test set");
    evaluate(neuralNet, testSet);

    System.out.println("Saving network");
    // save neural network to file
    neuralNet.save("nn1.nnet");

    System.out.println("Done.");

    System.out.println();
    System.out.println("Network outputs for test set");
    testNeuralNetwork(neuralNet, testSet);
}
 
Example 10
Source File: Abalone.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {
    System.out.println("Creating training set...");
    // get path to training set
    String trainingSetFileName = "data_sets/abalonerings.txt";
    int inputsCount = 8;
    int outputsCount = 29;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, "\t", true);

    // split data into train and test set
    DataSet[] trainTestSplit = dataSet.split(0.6, 0.4);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    // normalize data
    Normalizer norm = new MaxNormalizer(trainingSet);
    norm.normalize(trainingSet);
    norm.normalize(testSet);

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 10, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener(this);

    // set learning rate and max error
    learningRule.setLearningRate(0.1);
    learningRule.setMaxIterations(5000);

    System.out.println("Training network...");
    // train the network with training set
    neuralNet.learn(trainingSet);
    System.out.println("Training completed.");
    System.out.println("Testing network...");

    System.out.println("Network performance on the test set");
    evaluate(neuralNet, testSet);

    System.out.println("Saving network");
    // save neural network to file
    neuralNet.save("nn1.nnet");

    System.out.println("Done.");

    System.out.println();
    System.out.println("Network outputs for test set");
    testNeuralNetwork(neuralNet, testSet);
}
 
Example 11
Source File: Banknote.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {
    System.out.println("Creating training set...");
    // get path to training set
    String trainingSetFileName = "data_sets/databanknote.txt";
    int inputsCount = 4;
    int outputsCount = 1;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",", false);
    DataSet[] trainTestSplit = dataSet.split(0.6, 0.4);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    Normalizer norm = new MaxNormalizer(trainingSet);
    norm.normalize(trainingSet);
    norm.normalize(testSet);

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 1, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener(this);

    // set learning rate and max error
    learningRule.setLearningRate(0.1);
    learningRule.setMaxError(0.01);
    System.out.println("Training network...");
    // train the network with training set
    neuralNet.learn(trainingSet);
    System.out.println("Training completed.");
    System.out.println("Testing network...");

    System.out.println("Network performance on the test set");
    evaluate(neuralNet, testSet);

    System.out.println("Saving network");
    // save neural network to file
    neuralNet.save("nn1.nnet");

    System.out.println("Done.");

    System.out.println();
    System.out.println("Network outputs for test set");
    testNeuralNetwork(neuralNet, testSet);
}
 
Example 12
Source File: BostonHousePrice.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {
    System.out.println("Creating training set...");
    // get path to training set
    String trainingSetFileName = "data_sets/bostonhouse.txt";
    int inputsCount = 13;
    int outputsCount = 1;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",");
    DataSet[] trainTestSplit = dataSet.split(0.6, 0.4);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    // normalize data
    Normalizer norm = new MaxNormalizer(trainingSet);
    norm.normalize(trainingSet);
    norm.normalize(testSet);
    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 2, 2, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener(this);

    System.out.println("Training network...");
    // train the network with training set
    neuralNet.learn(trainingSet);
    System.out.println("Training completed.");
    System.out.println("Testing network...");

    System.out.println("Network performance on the test set");
    evaluate(neuralNet, testSet);

    System.out.println("Saving network");
    // save neural network to file
    neuralNet.save("nn1.nnet");

    System.out.println("Done.");

    System.out.println();
    System.out.println("Network outputs for test set");
    testNeuralNetwork(neuralNet, testSet);
}
 
Example 13
Source File: Banknote.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {
    System.out.println("Creating data set...");
    String dataSetFile = "data_sets/ml10standard/databanknote.txt";
    int inputsCount = 4;
    int outputsCount = 1;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",", false);
    DataSet[] trainTestSplit = dataSet.split(0.6, 0.4);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    Normalizer norm = new MaxNormalizer(trainingSet);
    norm.normalize(trainingSet);
    norm.normalize(testSet);

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 1, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener((event) -> {
        MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource();
        System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
    });

    // set learning rate and max error
    learningRule.setLearningRate(0.1);
    learningRule.setMaxError(0.01);
    System.out.println("Training network...");
    // train the network with training set
    neuralNet.learn(trainingSet);
    System.out.println("Training completed.");
    System.out.println("Testing network...");

    System.out.println("Network performance on the test set");
    evaluate(neuralNet, testSet);

    System.out.println("Saving network");
    // save neural network to file
    neuralNet.save("nn1.nnet");

    System.out.println("Done.");
}
 
Example 14
Source File: BostonHousePrice.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {
    System.out.println("Creating data set...");
    String dataSetFile = "data_sets/ml10standard/bostonhouse.txt";
    int inputsCount = 13;
    int outputsCount = 1;

    // create data set from file
    DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",");

    // split data into training and test set
    DataSet[] trainTestSplit = dataSet.split(0.6, 0.4);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    // normalize data
    Normalizer norm = new MaxNormalizer(trainingSet);
    norm.normalize(trainingSet);
    norm.normalize(testSet);

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 2, 2, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener(event -> {
        MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource();
        System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
    });

    System.out.println("Training network...");
    // train the network with training set
    neuralNet.learn(trainingSet);
    System.out.println("Training completed.");
    System.out.println("Testing network...");

    System.out.println("Network performance on the test set");
    evaluate(neuralNet, testSet);

    System.out.println("Saving network");
    // save neural network to file
    neuralNet.save("nn1.nnet");

    System.out.println("Done.");

}
 
Example 15
Source File: Abalone.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {
    System.out.println("Creating data set...");
    String dataSetFile = "data_sets/ml10standard/abalonerings.txt";
    int inputsCount = 8;
    int outputsCount = 29;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", true);
    DataSet[] trainTestSplit = dataSet.split(0.7, 0.3);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    Normalizer norm = new MaxNormalizer(trainingSet);
    norm.normalize(trainingSet);
    norm.normalize(testSet);

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 10, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener((event) -> {
        MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource();
        System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());        
    });

    // set learning rate and max error
    learningRule.setLearningRate(0.1);
    learningRule.setMaxIterations(5000);

    System.out.println("Training network...");
    // train the network with training set
    neuralNet.learn(trainingSet);
    System.out.println("Training completed.");
    System.out.println("Testing network...");

    System.out.println("Network performance on the test set");
    evaluate(neuralNet, testSet);

    System.out.println("Saving network");
    // save neural network to file
    neuralNet.save("nn1.nnet");

    System.out.println("Done.");
}
 
Example 16
Source File: SegmentChallengeSample.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {

        System.out.println("Creating training and test set from file...");
        String dataSetFile = "data_sets/segment challenge.txt";
        String testSetFileName = "data_sets/segment test.txt";
        int inputsCount = 19;
        int outputsCount = 7;

        //Create training data set from file
        DataSet trainingSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",");
        System.out.println("Training set size: " + trainingSet.getRows().size());
        trainingSet.shuffle();

        //Normalizing training data set
        Normalizer normalizer = new MaxNormalizer(trainingSet);
        normalizer.normalize(trainingSet);

        //Create test data set from file
        DataSet testSet = DataSet.createFromFile(testSetFileName, inputsCount, outputsCount, ",");
        System.out.println("Test set size: " + testSet.getRows().size());
        System.out.println("--------------------------------------------------");
        testSet.shuffle();

        //Normalizing training data set
        normalizer.normalize(testSet);

        System.out.println("Creating neural network...");
        //Create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 17, 10, outputsCount);
        //attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener((event) -> {
            BackPropagation bp = (BackPropagation) event.getSource();
            if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
                double error = bp.getTotalNetworkError();
                System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
                System.out.println("With total error: " + formatDecimalNumber(error));
            } else {
                System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
            }
        });

        learningRule.setLearningRate(0.01);
        learningRule.setMaxError(0.001);
        learningRule.setMaxIterations(12000);

        System.out.println("Training network...");
        //train the network with training set
        neuralNet.learn(trainingSet);

        System.out.println("Testing network...\n\n");
        testNeuralNetwork(neuralNet, testSet);

        System.out.println("Done.");
        System.out.println("**************************************************");
//        }
    }
 
Example 17
Source File: AutoTrainer.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
     *
     * You can get results calling getResults() method.
     *
     * @param neuralNetwork type of neural net
     * @param dataSet
     */
    public void train(DataSet dataSet) {// mozda da se vrati Training setting koji je najbolje resenje za dati dataset.??
        generateTrainingSettings();
        List<TrainingResult> statResults = null;
        DataSet trainingSet, testSet; // validationSet;

        if (splitTrainTest) {
            DataSet[] dataSplit = dataSet.split(splitPercentage, 100-splitPercentage); //opet ne radi Maven za neuroph 2.92
            trainingSet = dataSplit[0];
            testSet = dataSplit[1];
        } else {
            trainingSet = dataSet;
            testSet = dataSet;
        }

        if (generateStatistics) {
            statResults = new ArrayList<>();
        }

        int trainingNo = 0;
        for (TrainingSettings trainingSetting : trainingSettingsList) {
            System.out.println("-----------------------------------------------------------------------------------");
            trainingNo++;
            System.out.println("##TRAINING: " + trainingNo);
            trainingSetting.setTrainingSet(splitPercentage);
            trainingSetting.setTestSet(100 - splitPercentage);
            //int subtrainNo = 0;

            for (int subtrainNo = 1; subtrainNo <= repeat; subtrainNo++) {
                System.out.println("#SubTraining: " + subtrainNo);

                MultiLayerPerceptron neuralNet
                        = new MultiLayerPerceptron(dataSet.getInputSize(), trainingSetting.getHiddenNeurons(), dataSet.getOutputSize());

                BackPropagation bp = neuralNet.getLearningRule();

                bp.setLearningRate(trainingSetting.getLearningRate());
                bp.setMaxError(trainingSetting.getMaxError());
                bp.setMaxIterations(trainingSetting.getMaxIterations());

                neuralNet.learn(trainingSet);
//                  testNeuralNetwork(neuralNet, testSet); // not implemented
                ConfusionMatrix cm = new ConfusionMatrix(new String[]{""});
                TrainingResult result = new TrainingResult(trainingSetting, bp.getTotalNetworkError(), bp.getCurrentIteration(),cm);
                System.out.println(subtrainNo + ") iterations: " + bp.getCurrentIteration());

                if (generateStatistics) {
                    statResults.add(result);
                } else {
                    results.add(result);
                }

            }

            if (generateStatistics) {
                TrainingResult trainingStats = calculateTrainingStatistics(trainingSetting, statResults);
                results.add(trainingStats);
                statResults.clear();
            }

        }

    }
 
Example 18
Source File: WheatSeeds.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {
    System.out.println("Creating data set...");
    String dataSetFile = "data_sets/ml10standard/seeds.txt";
    int inputsCount = 7;
    int outputsCount = 3;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t");

    // split data into train and test set
    DataSet[] trainTestSplit = dataSet.split(0.6, 0.4);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 2, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener((event)->{
        MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource();
        System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError());
    });

    // set learning rate and max error
    learningRule.setLearningRate(0.1);
    learningRule.setMaxError(0.01);
    learningRule.setMaxIterations(5000);
    System.out.println("Training network...");
    // train the network with training set
    neuralNet.learn(trainingSet);
    System.out.println("Training completed.");
    System.out.println("Testing network...");

    System.out.println("Network performance on the test set");
    evaluate(neuralNet, testSet);

    System.out.println("Saving network");
    // save neural network to file
    neuralNet.save("nn1.nnet");

    System.out.println("Done.");

    System.out.println();
    System.out.println("Network outputs for test set");
    testNeuralNetwork(neuralNet, testSet);
}
 
Example 19
Source File: PimaIndiansDiabetes.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void run() {
    System.out.println("Creating training set...");
    // get path to training set
    String trainingSetFileName = "data_sets/pimadata.txt";
    int inputsCount = 8;
    int outputsCount = 1;

    // create training set from file
    DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, "\t", false);

    // split data into training and test set
    DataSet[] trainTestSplit = dataSet.split(0.6, 0.4);
    DataSet trainingSet = trainTestSplit[0];
    DataSet testSet = trainTestSplit[1];

    // normalize training and test set
    Normalizer norm = new MaxNormalizer(trainingSet);
    norm.normalize(trainingSet);
    norm.normalize(testSet);

    System.out.println("Creating neural network...");
    MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 15, 5, outputsCount);

    neuralNet.setLearningRule(new MomentumBackpropagation());
    MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
    learningRule.addListener(this);

    // set learning rate and max error
    learningRule.setLearningRate(0.1);
    learningRule.setMaxError(0.03);
    System.out.println("Training network...");
    // train the network with training set
    neuralNet.learn(trainingSet);
    System.out.println("Training completed.");
    System.out.println("Testing network...");

    System.out.println("Network performance on the test set");
    evaluate(neuralNet, testSet);

    System.out.println("Saving network");
    // save neural network to file
    neuralNet.save("nn1.nnet");

    System.out.println("Done.");

    System.out.println();
    System.out.println("Network outputs for test set");
    testNeuralNetwork(neuralNet, testSet);
}
 
Example 20
Source File: LensesClassificationSample.java    From NeurophFramework with Apache License 2.0 3 votes vote down vote up
public void run() {

        System.out.println("Creating training set...");
        
        String dataSetFile = "data_sets/lenses_data.txt";
        int inputsCount = 9;
        int outputsCount = 3;

        System.out.println("Creating training set...");
        DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, " ", false);


        System.out.println("Creating neural network...");
        // create MultiLayerPerceptron neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 16, outputsCount);


        // attach listener to learning rule
        MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule();
        learningRule.addListener(this);

        // set learning rate and max error
        learningRule.setLearningRate(0.2);
        learningRule.setMaxError(0.01);

        System.out.println("Training network...");
        // train the network with training set
        neuralNet.learn(dataSet);

        System.out.println("Training completed.");
        System.out.println("Testing network...");

        testNeuralNetwork(neuralNet, dataSet);

        System.out.println("Saving network");
        // save neural network to file
        neuralNet.save("MyNeuralNetLenses.nnet");

        System.out.println("Done.");
    }