Java Code Examples for org.nd4j.linalg.dataset.api.preprocessor.DataNormalization#fit()

The following examples show how to use org.nd4j.linalg.dataset.api.preprocessor.DataNormalization#fit() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: IrisFileDataSource.java    From FederatedAndroidTrainer with MIT License 7 votes vote down vote up
private void createDataSource() throws IOException, InterruptedException {
    //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing
    int numLinesToSkip = 0;
    String delimiter = ",";
    RecordReader recordReader = new CSVRecordReader(numLinesToSkip, delimiter);
    recordReader.initialize(new InputStreamInputSplit(dataFile));

    //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network
    int labelIndex = 4;     //5 values in each row of the iris.txt CSV: 4 input features followed by an integer label (class) index. Labels are the 5th value (index 4) in each row
    int numClasses = 3;     //3 classes (types of iris flowers) in the iris data set. Classes have integer values 0, 1 or 2

    DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, numClasses);
    DataSet allData = iterator.next();
    allData.shuffle();

    SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.80);  //Use 80% of data for training

    trainingData = testAndTrain.getTrain();
    testData = testAndTrain.getTest();

    //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance):
    DataNormalization normalizer = new NormalizerStandardize();
    normalizer.fit(trainingData);           //Collect the statistics (mean/stdev) from the training data. This does not modify the input data
    normalizer.transform(trainingData);     //Apply normalization to the training data
    normalizer.transform(testData);         //Apply normalization to the test data. This is using statistics calculated from the *training* set
}
 
Example 2
Source File: NormalizerTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
public float testItervsDataset(DataNormalization preProcessor) {
    DataSet dataCopy = data.copy();
    DataSetIterator dataIter = new TestDataSetIterator(dataCopy, batchSize);
    preProcessor.fit(dataCopy);
    preProcessor.transform(dataCopy);
    INDArray transformA = dataCopy.getFeatures();

    preProcessor.fit(dataIter);
    dataIter.setPreProcessor(preProcessor);
    DataSet next = dataIter.next();
    INDArray transformB = next.getFeatures();

    while (dataIter.hasNext()) {
        next = dataIter.next();
        INDArray transformb = next.getFeatures();
        transformB = Nd4j.vstack(transformB, transformb);
    }

    return Transforms.abs(transformB.div(transformA).rsub(1)).maxNumber().floatValue();
}
 
Example 3
Source File: NormalizerMinMaxScalerTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testGivenMaxMinConstant() {
    double tolerancePerc = 1; // 1% of correct value
    int nSamples = 500;
    int nFeatures = 3;

    INDArray featureSet = Nd4j.rand(nSamples, nFeatures).mul(0.1).add(10);
    INDArray labelSet = Nd4j.zeros(nSamples, 1);
    DataSet sampleDataSet = new DataSet(featureSet, labelSet);

    double givenMin = -1000;
    double givenMax = 1000;
    DataNormalization myNormalizer = new NormalizerMinMaxScaler(givenMin, givenMax);
    DataSet transformed = sampleDataSet.copy();

    myNormalizer.fit(sampleDataSet);
    myNormalizer.transform(transformed);

    //feature set is basically all 10s -> should transform to the min
    INDArray expected = Nd4j.ones(nSamples, nFeatures).mul(givenMin);
    INDArray delta = Transforms.abs(transformed.getFeatures().sub(expected)).div(expected);
    double maxdeltaPerc = delta.max(0, 1).mul(100).getDouble(0, 0);
    assertTrue(maxdeltaPerc < tolerancePerc);
}
 
Example 4
Source File: DiabetesFileDataSource.java    From FederatedAndroidTrainer with MIT License 6 votes vote down vote up
private void createDataSource() throws IOException, InterruptedException {
    //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing
    int numLinesToSkip = 0;
    String delimiter = ",";
    RecordReader recordReader = new CSVRecordReader(numLinesToSkip, delimiter);
    recordReader.initialize(new InputStreamInputSplit(dataFile));

    //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network
    int labelIndex = 11;

    DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, labelIndex, true);
    DataSet allData = iterator.next();

    SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.80);  //Use 80% of data for training

    trainingData = testAndTrain.getTrain();
    testData = testAndTrain.getTest();

    //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance):
    DataNormalization normalizer = new NormalizerStandardize();
    normalizer.fit(trainingData);           //Collect the statistics (mean/stdev) from the training data. This does not modify the input data
    normalizer.transform(trainingData);     //Apply normalization to the training data
    normalizer.transform(testData);         //Apply normalization to the test data. This is using statistics calculated from the *training* set
}
 
Example 5
Source File: RecordReaderDataSetiteratorTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testNormalizerPrefetchReset() throws Exception {
    //Check NPE fix for: https://github.com/deeplearning4j/deeplearning4j/issues/4214
    RecordReader csv = new CSVRecordReader();
    csv.initialize(new FileSplit(Resources.asFile("iris.txt")));

    int batchSize = 3;

    DataSetIterator iter = new RecordReaderDataSetIterator(csv, batchSize, 4, 4, true);

    DataNormalization normalizer = new NormalizerMinMaxScaler(0, 1);
    normalizer.fit(iter);
    iter.setPreProcessor(normalizer);

    iter.inputColumns();    //Prefetch
    iter.totalOutcomes();
    iter.hasNext();
    iter.reset();
    iter.next();
}
 
Example 6
Source File: ImageInstanceIterator.java    From wekaDeeplearning4j with GNU General Public License v3.0 6 votes vote down vote up
/**
 * This method returns the iterator. Scales all intensity values: it divides them by 255.
 *
 * @param data the dataset to use
 * @param seed the seed for the random number generator
 * @param batchSize the batch size to use
 * @return the iterator
 */
@Override
public DataSetIterator getDataSetIterator(Instances data, int seed, int batchSize)
    throws Exception {

  batchSize = Math.min(data.numInstances(), batchSize);
  validate(data);
  ImageRecordReader reader = getImageRecordReader(data);

  // Required for supporting channels-last models (currently only EfficientNet)
  if (getChannelsLast())
    reader.setNchw_channels_first(false);

  final int labelIndex = 1; // Use explicit label index position
  final int numPossibleLabels = data.numClasses();
  DataSetIterator tmpIter =
      new RecordReaderDataSetIterator(reader, batchSize, labelIndex, numPossibleLabels);
  DataNormalization scaler = new ImagePreProcessingScaler(0, 1);
  scaler.fit(tmpIter);
  tmpIter.setPreProcessor(scaler);
  return tmpIter;
}
 
Example 7
Source File: HyperParameterTuningArbiterUiExample.java    From Java-Deep-Learning-Cookbook with MIT License 5 votes vote down vote up
public DataSetIteratorSplitter dataSplit(DataSetIterator iterator) throws IOException, InterruptedException {
    DataNormalization dataNormalization = new NormalizerStandardize();
    dataNormalization.fit(iterator);
    iterator.setPreProcessor(dataNormalization);
    DataSetIteratorSplitter splitter = new DataSetIteratorSplitter(iterator,1000,0.8);
    return splitter;
}
 
Example 8
Source File: HyperParameterTuning.java    From Java-Deep-Learning-Cookbook with MIT License 5 votes vote down vote up
public DataSetIteratorSplitter dataSplit(DataSetIterator iterator) throws IOException, InterruptedException {
    DataNormalization dataNormalization = new NormalizerStandardize();
    dataNormalization.fit(iterator);
    iterator.setPreProcessor(dataNormalization);
    DataSetIteratorSplitter splitter = new DataSetIteratorSplitter(iterator,1000,0.8);
    return splitter;
}
 
Example 9
Source File: BNGradientCheckTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testGradient2dFixedGammaBeta() {
        DataNormalization scaler = new NormalizerMinMaxScaler();
        DataSetIterator iter = new IrisDataSetIterator(150, 150);
        scaler.fit(iter);
        iter.setPreProcessor(scaler);
        DataSet ds = iter.next();
        INDArray input = ds.getFeatures();
        INDArray labels = ds.getLabels();

        for (boolean useLogStd : new boolean[]{true, false}) {
            MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().updater(new NoOp())
                    .dataType(DataType.DOUBLE)
                    .seed(12345L)
                    .dist(new NormalDistribution(0, 1)).list()
                    .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY).build())
                    .layer(1, new BatchNormalization.Builder().useLogStd(useLogStd).lockGammaBeta(true).gamma(2.0).beta(0.5).nOut(3)
                            .build())
                    .layer(2, new ActivationLayer.Builder().activation(Activation.TANH).build())
                    .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                            .activation(Activation.SOFTMAX).nIn(3).nOut(3).build());

            MultiLayerNetwork mln = new MultiLayerNetwork(builder.build());
            mln.init();

//            for (int j = 0; j < mln.getnLayers(); j++)
//                System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());

            //Mean and variance vars are not gradient checkable; mean/variance "gradient" is used to implement running mean/variance calc
            //i.e., runningMean = decay * runningMean + (1-decay) * batchMean
            //However, numerical gradient will be 0 as forward pass doesn't depend on this "parameter"
            Set<String> excludeParams = new HashSet<>(Arrays.asList("1_mean", "1_var", "1_log10stdev"));
            boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input)
                    .labels(labels).excludeParams(excludeParams));

            assertTrue(gradOK);
            TestUtils.testModelSerialization(mln);
        }
    }
 
Example 10
Source File: HyperParameterTuningArbiterUiExample.java    From Java-Deep-Learning-Cookbook with MIT License 5 votes vote down vote up
public DataSetIteratorSplitter dataSplit(DataSetIterator iterator) throws IOException, InterruptedException {
    DataNormalization dataNormalization = new NormalizerStandardize();
    dataNormalization.fit(iterator);
    iterator.setPreProcessor(dataNormalization);
    DataSetIteratorSplitter splitter = new DataSetIteratorSplitter(iterator,1000,0.8);
    return splitter;
}
 
Example 11
Source File: HyperParameterTuning.java    From Java-Deep-Learning-Cookbook with MIT License 5 votes vote down vote up
public DataSetIteratorSplitter dataSplit(DataSetIterator iterator) throws IOException, InterruptedException {
    DataNormalization dataNormalization = new NormalizerStandardize();
    dataNormalization.fit(iterator);
    iterator.setPreProcessor(dataNormalization);
    DataSetIteratorSplitter splitter = new DataSetIteratorSplitter(iterator,1000,0.8);
    return splitter;
}
 
Example 12
Source File: PreProcessor3D4DTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private void test3dRevert(DataNormalization SUT) {
    INDArray features = Nd4j.rand(new int[] {5, 2, 10}, 12345).muli(2).addi(1);
    DataSet data = new DataSet(features, Nd4j.zeros(5, 1, 10));
    DataSet dataCopy = data.copy();

    SUT.fit(data);

    SUT.preProcess(data);
    assertNotEquals(data, dataCopy);

    SUT.revert(data);
    assertEquals(dataCopy.getFeatures(), data.getFeatures());
    assertEquals(dataCopy.getLabels(), data.getLabels());
}
 
Example 13
Source File: NormalizationTests.java    From DataVec with Apache License 2.0 5 votes vote down vote up
@Test
public void testMeanStdZeros() {
    List<List<Writable>> data = new ArrayList<>();
    Schema.Builder builder = new Schema.Builder();
    int numColumns = 6;
    for (int i = 0; i < numColumns; i++)
        builder.addColumnDouble(String.valueOf(i));

    for (int i = 0; i < 5; i++) {
        List<Writable> record = new ArrayList<>(numColumns);
        data.add(record);
        for (int j = 0; j < numColumns; j++) {
            record.add(new DoubleWritable(1.0));
        }

    }

    INDArray arr = RecordConverter.toMatrix(data);

    Schema schema = builder.build();
    JavaRDD<List<Writable>> rdd = sc.parallelize(data);
    DataRowsFacade dataFrame = DataFrames.toDataFrame(schema, rdd);

    //assert equivalent to the ndarray pre processing
    NormalizerStandardize standardScaler = new NormalizerStandardize();
    standardScaler.fit(new DataSet(arr.dup(), arr.dup()));
    INDArray standardScalered = arr.dup();
    standardScaler.transform(new DataSet(standardScalered, standardScalered));
    DataNormalization zeroToOne = new NormalizerMinMaxScaler();
    zeroToOne.fit(new DataSet(arr.dup(), arr.dup()));
    INDArray zeroToOnes = arr.dup();
    zeroToOne.transform(new DataSet(zeroToOnes, zeroToOnes));
    List<Row> rows = Normalization.stdDevMeanColumns(dataFrame, dataFrame.get().columns());
    INDArray assertion = DataFrames.toMatrix(rows);
    //compare standard deviation
    assertTrue(standardScaler.getStd().equalsWithEps(assertion.getRow(0), 1e-1));
    //compare mean
    assertTrue(standardScaler.getMean().equalsWithEps(assertion.getRow(1), 1e-1));

}
 
Example 14
Source File: NormalizeUciData.java    From SKIL_Examples with Apache License 2.0 4 votes vote down vote up
public void run() throws Exception {
    File trainingOutputFile = new File(trainOutputPath);
    File testOutputFile = new File(testOutputPath);

    if (trainingOutputFile.exists() || testOutputFile.exists()) {
        System.out.println(String.format("Warning: overwriting output files (%s, %s)", trainOutputPath, testOutputPath));

        trainingOutputFile.delete();
        testOutputFile.delete();
    }

    System.out.format("downloading from %s\n", downloadUrl);
    System.out.format("writing training output to %s\n", trainOutputPath);
    System.out.format("writing testing output to %s\n", testOutputPath);

    URL url = new URL(downloadUrl);
    String data = IOUtils.toString(url);
    String[] lines = data.split("\n");
    List<INDArray> arrays = new LinkedList<INDArray>();
    List<Integer> labels = new LinkedList<Integer>();

    for (int i=0; i<lines.length; i++) {
        String line = lines[i];
        String[] cols = line.split("\\s+");

        int label = i / 100;
        INDArray array = Nd4j.zeros(1, 60);

        for (int j=0; j<cols.length; j++) {
            Double d = Double.parseDouble(cols[j]);
            array.putScalar(0, j, d);
        }

        arrays.add(array);
        labels.add(label);
    }

    // Shuffle with **known** seed
    Collections.shuffle(arrays, new Random(12345));
    Collections.shuffle(labels, new Random(12345));

    INDArray trainData = Nd4j.zeros(450, 60);
    INDArray testData = Nd4j.zeros(150, 60);

    for (int i=0; i<arrays.size(); i++) {
        INDArray arr = arrays.get(i);

        if (i < 450) { // Training
            trainData.putRow(i, arr);
        } else { // Test
            testData.putRow(i-450, arr);
        }
    }

    DataSet trainDs = new DataSet(trainData, trainData);
    DataSetIterator trainIt = new ListDataSetIterator(trainDs.asList());

    DataSet testDs = new DataSet(testData, testData);
    DataSetIterator testIt = new ListDataSetIterator(testDs.asList());

    // Fit normalizer on training data only!
    DataNormalization normalizer = dataNormalizer.getNormalizer();
    normalizer.fit(trainIt);

    // Print out basic summary stats
    switch (normalizer.getType()) {
        case STANDARDIZE:
            System.out.format("Normalizer - Standardize:\n  mean=%s\n  std= %s\n",
                    ((NormalizerStandardize)normalizer).getMean(),
                    ((NormalizerStandardize)normalizer).getStd());
    }

    // Use same normalizer for both
    trainIt.setPreProcessor(normalizer);
    testIt.setPreProcessor(normalizer);

    String trainOutput = toCsv(trainIt, labels.subList(0, 450), new int[]{1, 60});
    String testOutput = toCsv(testIt, labels.subList(450, 600), new int[]{1, 60});

    FileUtils.write(trainingOutputFile, trainOutput);
    System.out.format("wrote normalized training file to %s\n", trainingOutputFile);

    FileUtils.write(testOutputFile, testOutput);
    System.out.format("wrote normalized test file to %s\n", testOutputFile);

}
 
Example 15
Source File: ModelUtils.java    From gluon-samples with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
public void evaluateModel(MultiLayerNetwork model, boolean invertColors) throws IOException {
        LOGGER.info("******EVALUATE MODEL******");

        ParentPathLabelGenerator labelMaker = new ParentPathLabelGenerator();
        ImageRecordReader recordReader = new ImageRecordReader(height,width,channels,labelMaker);
//        recordReader.setListeners(new LogRecordListener());

        // Initialize the record reader
        // add a listener, to extract the name

        File testData = new File(DATA_PATH + "/mnist_png/testing");
        FileSplit test = new FileSplit(testData,NativeImageLoader.ALLOWED_FORMATS,randNumGen);

        // The model trained on the training dataset split
        // now that it has trained we evaluate against the
        // test data of images the network has not seen

        recordReader.initialize(test);
        DataNormalization scaler = new ImagePreProcessingScaler(invertColors ? 1 : 0, invertColors ? 0 : 1);
        DataSetIterator testIter = new RecordReaderDataSetIterator(recordReader,batchSize,1,outputNum);
        scaler.fit(testIter);
        testIter.setPreProcessor(scaler);

        /*
        log the order of the labels for later use
        In previous versions the label order was consistent, but random
        In current verions label order is lexicographic
        preserving the RecordReader Labels order is no
        longer needed left in for demonstration
        purposes
        */
        LOGGER.info(recordReader.getLabels().toString());

        // Create Eval object with 10 possible classes
        Evaluation eval = new Evaluation(outputNum);


        // Evaluate the network
        while (testIter.hasNext()) {
            DataSet next = testIter.next();
            INDArray output = model.output(next.getFeatureMatrix());
            // Compare the Feature Matrix from the model
            // with the labels from the RecordReader
            eval.eval(next.getLabels(), output);

        }

        LOGGER.info(eval.stats());
    }
 
Example 16
Source File: NormalizationTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void normalizationTests() {
    List<List<Writable>> data = new ArrayList<>();
    Schema.Builder builder = new Schema.Builder();
    int numColumns = 6;
    for (int i = 0; i < numColumns; i++)
        builder.addColumnDouble(String.valueOf(i));

    for (int i = 0; i < 5; i++) {
        List<Writable> record = new ArrayList<>(numColumns);
        data.add(record);
        for (int j = 0; j < numColumns; j++) {
            record.add(new DoubleWritable(1.0));
        }

    }

    INDArray arr = RecordConverter.toMatrix(DataType.DOUBLE, data);

    Schema schema = builder.build();
    JavaRDD<List<Writable>> rdd = sc.parallelize(data);
    assertEquals(schema, DataFrames.fromStructType(DataFrames.fromSchema(schema)));
    assertEquals(rdd.collect(), DataFrames.toRecords(DataFrames.toDataFrame(schema, rdd)).getSecond().collect());

    Dataset<Row> dataFrame = DataFrames.toDataFrame(schema, rdd);
    dataFrame.show();
    Normalization.zeromeanUnitVariance(dataFrame).show();
    Normalization.normalize(dataFrame).show();

    //assert equivalent to the ndarray pre processing
    NormalizerStandardize standardScaler = new NormalizerStandardize();
    standardScaler.fit(new DataSet(arr.dup(), arr.dup()));
    INDArray standardScalered = arr.dup();
    standardScaler.transform(new DataSet(standardScalered, standardScalered));
    DataNormalization zeroToOne = new NormalizerMinMaxScaler();
    zeroToOne.fit(new DataSet(arr.dup(), arr.dup()));
    INDArray zeroToOnes = arr.dup();
    zeroToOne.transform(new DataSet(zeroToOnes, zeroToOnes));

    INDArray zeroMeanUnitVarianceDataFrame =
                    RecordConverter.toMatrix(DataType.DOUBLE, Normalization.zeromeanUnitVariance(schema, rdd).collect());
    INDArray zeroMeanUnitVarianceDataFrameZeroToOne =
                    RecordConverter.toMatrix(DataType.DOUBLE, Normalization.normalize(schema, rdd).collect());
    assertEquals(standardScalered, zeroMeanUnitVarianceDataFrame);
    assertTrue(zeroToOnes.equalsWithEps(zeroMeanUnitVarianceDataFrameZeroToOne, 1e-1));

}
 
Example 17
Source File: GradientCheckTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testAutoEncoder() {
        //As above (testGradientMLP2LayerIrisSimple()) but with L2, L1, and both L2/L1 applied
        //Need to run gradient through updater, so that L2 can be applied

        Activation[] activFns = {Activation.SIGMOID, Activation.TANH};
        boolean[] characteristic = {false, true}; //If true: run some backprop steps first

        LossFunction[] lossFunctions = {LossFunction.MCXENT, LossFunction.MSE};
        Activation[] outputActivations = {Activation.SOFTMAX, Activation.TANH};

        DataNormalization scaler = new NormalizerMinMaxScaler();
        DataSetIterator iter = new IrisDataSetIterator(150, 150);
        scaler.fit(iter);
        iter.setPreProcessor(scaler);
        DataSet ds = iter.next();
        INDArray input = ds.getFeatures();
        INDArray labels = ds.getLabels();

        NormalizerStandardize norm = new NormalizerStandardize();
        norm.fit(ds);
        norm.transform(ds);

        double[] l2vals = {0.2, 0.0, 0.2};
        double[] l1vals = {0.0, 0.3, 0.3}; //i.e., use l2vals[i] with l1vals[i]

        for (Activation afn : activFns) {
            for (boolean doLearningFirst : characteristic) {
                for (int i = 0; i < lossFunctions.length; i++) {
                    for (int k = 0; k < l2vals.length; k++) {
                        LossFunction lf = lossFunctions[i];
                        Activation outputActivation = outputActivations[i];
                        double l2 = l2vals[k];
                        double l1 = l1vals[k];

                        Nd4j.getRandom().setSeed(12345);
                        MultiLayerConfiguration conf =
                                        new NeuralNetConfiguration.Builder()
                                                        .dataType(DataType.DOUBLE)
                                                        .updater(new NoOp())
                                                        .l2(l2).l1(l1)
                                                        .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
                                                        .seed(12345L)
                                                        .dist(new NormalDistribution(0, 1))
                                                        .list().layer(0,
                                                                        new AutoEncoder.Builder().nIn(4).nOut(3)
                                                                                        .activation(afn).build())
                                                        .layer(1, new OutputLayer.Builder(lf).nIn(3).nOut(3)
                                                                        .activation(outputActivation).build())
                                                        .build();

                        MultiLayerNetwork mln = new MultiLayerNetwork(conf);
                        mln.init();

                        String msg;
                        if (doLearningFirst) {
                            //Run a number of iterations of learning
                            mln.setInput(ds.getFeatures());
                            mln.setLabels(ds.getLabels());
                            mln.computeGradientAndScore();
                            double scoreBefore = mln.score();
                            for (int j = 0; j < 10; j++)
                                mln.fit(ds);
                            mln.computeGradientAndScore();
                            double scoreAfter = mln.score();
                            //Can't test in 'characteristic mode of operation' if not learning
                            msg = "testGradMLP2LayerIrisSimple() - score did not (sufficiently) decrease during learning - activationFn="
                                            + afn + ", lossFn=" + lf + ", outputActivation=" + outputActivation
                                            + ", doLearningFirst=" + doLearningFirst + ", l2=" + l2 + ", l1=" + l1
                                            + " (before=" + scoreBefore + ", scoreAfter=" + scoreAfter + ")";
                            assertTrue(msg, scoreAfter < scoreBefore);
                        }

                        msg = "testGradMLP2LayerIrisSimple() - activationFn=" + afn + ", lossFn=" + lf
                                        + ", outputActivation=" + outputActivation + ", doLearningFirst="
                                        + doLearningFirst + ", l2=" + l2 + ", l1=" + l1;
                        if (PRINT_RESULTS) {
                            System.out.println(msg);
//                            for (int j = 0; j < mln.getnLayers(); j++)
//                                System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
                        }

                        boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
                                        DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);
                        assertTrue(msg, gradOK);
                        TestUtils.testModelSerialization(mln);
                    }
                }
            }
        }
    }
 
Example 18
Source File: CustomerRetentionPredictionExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) throws IOException, InterruptedException {

       final int labelIndex=11;
       final int batchSize=8;
       final int numClasses=2;
       final INDArray weightsArray = Nd4j.create(new double[]{0.57, 0.75});

       final RecordReader recordReader = generateReader(new ClassPathResource("Churn_Modelling.csv").getFile());
       final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize)
                                                                .classification(labelIndex,numClasses)
                                                                .build();
       final DataNormalization dataNormalization = new NormalizerStandardize();
       dataNormalization.fit(dataSetIterator);
       dataSetIterator.setPreProcessor(dataNormalization);
       final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8);

       log.info("Building Model------------------->>>>>>>>>");

        final MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
                                                                    .weightInit(WeightInit.RELU_UNIFORM)
                                                                    .updater(new Adam(0.015D))
                                                                    .list()
                                                                    .layer(new DenseLayer.Builder().nIn(11).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(4).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new OutputLayer.Builder(new LossMCXENT(weightsArray)).nIn(4).nOut(2).activation(Activation.SOFTMAX).build())
                                                                    .build();

        final UIServer uiServer = UIServer.getInstance();
        final StatsStorage statsStorage = new InMemoryStatsStorage();

        final MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(configuration);
        multiLayerNetwork.init();
        multiLayerNetwork.setListeners(new ScoreIterationListener(100),
                                       new StatsListener(statsStorage));
        uiServer.attach(statsStorage);
        multiLayerNetwork.fit(dataSetIteratorSplitter.getTrainIterator(),100);

        final Evaluation evaluation =  multiLayerNetwork.evaluate(dataSetIteratorSplitter.getTestIterator(),Arrays.asList("0","1"));
        System.out.println(evaluation.stats());

        final File file = new File("model.zip");
        ModelSerializer.writeModel(multiLayerNetwork,file,true);
        ModelSerializer.addNormalizerToModel(file,dataNormalization);


    }
 
Example 19
Source File: IrisClassifier.java    From tutorials with MIT License 4 votes vote down vote up
public static void main(String[] args) throws IOException, InterruptedException {

        DataSet allData;
        try (RecordReader recordReader = new CSVRecordReader(0, ',')) {
            recordReader.initialize(new FileSplit(new ClassPathResource("iris.txt").getFile()));

            DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader, 150, FEATURES_COUNT, CLASSES_COUNT);
            allData = iterator.next();
        }

        allData.shuffle(42);

        DataNormalization normalizer = new NormalizerStandardize();
        normalizer.fit(allData);
        normalizer.transform(allData);

        SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.65);
        DataSet trainingData = testAndTrain.getTrain();
        DataSet testData = testAndTrain.getTest();

        MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
                .iterations(1000)
                .activation(Activation.TANH)
                .weightInit(WeightInit.XAVIER)
                .regularization(true)
                .learningRate(0.1).l2(0.0001)
                .list()
                .layer(0, new DenseLayer.Builder().nIn(FEATURES_COUNT).nOut(3)
                        .build())
                .layer(1, new DenseLayer.Builder().nIn(3).nOut(3)
                        .build())
                .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                        .activation(Activation.SOFTMAX)
                        .nIn(3).nOut(CLASSES_COUNT).build())
                .backpropType(BackpropType.Standard).pretrain(false)
                .build();

        MultiLayerNetwork model = new MultiLayerNetwork(configuration);
        model.init();
        model.fit(trainingData);

        INDArray output = model.output(testData.getFeatures());

        Evaluation eval = new Evaluation(CLASSES_COUNT);
        eval.eval(testData.getLabels(), output);
        System.out.println(eval.stats());

    }
 
Example 20
Source File: CustomerRetentionPredictionExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) throws IOException, InterruptedException {

       final int labelIndex=11;
       final int batchSize=8;
       final int numClasses=2;
       final INDArray weightsArray = Nd4j.create(new double[]{0.57, 0.75});

       final RecordReader recordReader = generateReader(new ClassPathResource("Churn_Modelling.csv").getFile());
       final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize)
                                                                .classification(labelIndex,numClasses)
                                                                .build();
       final DataNormalization dataNormalization = new NormalizerStandardize();
       dataNormalization.fit(dataSetIterator);
       dataSetIterator.setPreProcessor(dataNormalization);
       final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8);

       log.info("Building Model------------------->>>>>>>>>");

        final MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
                                                                    .weightInit(WeightInit.RELU_UNIFORM)
                                                                    .updater(new Adam(0.015D))
                                                                    .list()
                                                                    .layer(new DenseLayer.Builder().nIn(11).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(6).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new DenseLayer.Builder().nIn(6).nOut(4).activation(Activation.RELU).dropOut(0.9).build())
                                                                    .layer(new OutputLayer.Builder(new LossMCXENT(weightsArray)).nIn(4).nOut(2).activation(Activation.SOFTMAX).build())
                                                                    .build();

        final UIServer uiServer = UIServer.getInstance();
        final StatsStorage statsStorage = new InMemoryStatsStorage();

        final MultiLayerNetwork multiLayerNetwork = new MultiLayerNetwork(configuration);
        multiLayerNetwork.init();
        multiLayerNetwork.setListeners(new ScoreIterationListener(100),
                                       new StatsListener(statsStorage));
        uiServer.attach(statsStorage);
        multiLayerNetwork.fit(dataSetIteratorSplitter.getTrainIterator(),100);

        final Evaluation evaluation =  multiLayerNetwork.evaluate(dataSetIteratorSplitter.getTestIterator(),Arrays.asList("0","1"));
        System.out.println(evaluation.stats());

        final File file = new File("model.zip");
        ModelSerializer.writeModel(multiLayerNetwork,file,true);
        ModelSerializer.addNormalizerToModel(file,dataNormalization);


    }