Java Code Examples for org.nd4j.linalg.dataset.api.preprocessor.DataNormalization

The following examples show how to use org.nd4j.linalg.dataset.api.preprocessor.DataNormalization. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
@Test
public void testMultiLayerNetwork() throws Exception {
    Pair<MultiLayerNetwork, DataNormalization> trainedNetwork = TrainUtils.getTrainedNetwork();
    MultiLayerNetwork save = trainedNetwork.getLeft();
    File dir = testDir.newFolder();
    File tmpZip = new File(dir, "dl4j_mln_model.zip");
    tmpZip.deleteOnExit();
    ModelSerializer.writeModel(save, tmpZip, true);

    ModelStep modelPipelineStep = Dl4jStep.builder()
            .inputName("default")
            .outputName("output")
            .path(tmpZip.getAbsolutePath())
            .build();

    Dl4jInferenceExecutionerFactory factory = new Dl4jInferenceExecutionerFactory();
    InitializedInferenceExecutionerConfig initializedInferenceExecutionerConfig = factory.create(modelPipelineStep);
    MultiLayerNetworkInferenceExecutioner multiLayerNetworkInferenceExecutioner = (MultiLayerNetworkInferenceExecutioner) initializedInferenceExecutionerConfig.getInferenceExecutioner();
    assertNotNull(multiLayerNetworkInferenceExecutioner);
    assertNotNull(multiLayerNetworkInferenceExecutioner.model());
    assertNotNull(multiLayerNetworkInferenceExecutioner.modelLoader());
}
 
Example 2
@Test
public void testComputationGraph() throws Exception {
    Pair<MultiLayerNetwork, DataNormalization> trainedNetwork = TrainUtils.getTrainedNetwork();
    ComputationGraph save = trainedNetwork.getLeft().toComputationGraph();
    File dir = testDir.newFolder();
    File tmpZip = new File(dir, "dl4j_cg_model.zip");
    tmpZip.deleteOnExit();
    ModelSerializer.writeModel(save, tmpZip, true);

    ModelStep modelPipelineStep = Dl4jStep.builder()
            .inputName("default")
            .outputName("output")
            .path(tmpZip.getAbsolutePath())
            .build();

    Dl4jInferenceExecutionerFactory factory = new Dl4jInferenceExecutionerFactory();
    InitializedInferenceExecutionerConfig initializedInferenceExecutionerConfig = factory.create(modelPipelineStep);
    MultiComputationGraphInferenceExecutioner multiComputationGraphInferenceExecutioner = (MultiComputationGraphInferenceExecutioner) initializedInferenceExecutionerConfig.getInferenceExecutioner();
    assertNotNull(multiComputationGraphInferenceExecutioner);
    assertNotNull(multiComputationGraphInferenceExecutioner.model());
    assertNotNull(multiComputationGraphInferenceExecutioner.modelLoader());
}
 
Example 3
Source Project: FederatedAndroidTrainer   Source File: IrisFileDataSource.java    License: MIT License 6 votes vote down vote up
private void createDataSource() throws IOException, InterruptedException {
    //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing
    int numLinesToSkip = 0;
    String delimiter = ",";
    RecordReader recordReader = new CSVRecordReader(numLinesToSkip, delimiter);
    recordReader.initialize(new InputStreamInputSplit(dataFile));

    //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network
    int labelIndex = 4;     //5 values in each row of the iris.txt CSV: 4 input features followed by an integer label (class) index. Labels are the 5th value (index 4) in each row
    int numClasses = 3;     //3 classes (types of iris flowers) in the iris data set. Classes have integer values 0, 1 or 2

    DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, numClasses);
    DataSet allData = iterator.next();
    allData.shuffle();

    SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.80);  //Use 80% of data for training

    trainingData = testAndTrain.getTrain();
    testData = testAndTrain.getTest();

    //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance):
    DataNormalization normalizer = new NormalizerStandardize();
    normalizer.fit(trainingData);           //Collect the statistics (mean/stdev) from the training data. This does not modify the input data
    normalizer.transform(trainingData);     //Apply normalization to the training data
    normalizer.transform(testData);         //Apply normalization to the test data. This is using statistics calculated from the *training* set
}
 
Example 4
Source Project: FederatedAndroidTrainer   Source File: DiabetesFileDataSource.java    License: MIT License 6 votes vote down vote up
private void createDataSource() throws IOException, InterruptedException {
    //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing
    int numLinesToSkip = 0;
    String delimiter = ",";
    RecordReader recordReader = new CSVRecordReader(numLinesToSkip, delimiter);
    recordReader.initialize(new InputStreamInputSplit(dataFile));

    //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network
    int labelIndex = 11;

    DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, labelIndex, true);
    DataSet allData = iterator.next();

    SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.80);  //Use 80% of data for training

    trainingData = testAndTrain.getTrain();
    testData = testAndTrain.getTest();

    //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance):
    DataNormalization normalizer = new NormalizerStandardize();
    normalizer.fit(trainingData);           //Collect the statistics (mean/stdev) from the training data. This does not modify the input data
    normalizer.transform(trainingData);     //Apply normalization to the training data
    normalizer.transform(testData);         //Apply normalization to the test data. This is using statistics calculated from the *training* set
}
 
Example 5
/**
 * This method returns the iterator. Scales all intensity values: it divides them by 255.
 *
 * @param data the dataset to use
 * @param seed the seed for the random number generator
 * @param batchSize the batch size to use
 * @return the iterator
 */
@Override
public DataSetIterator getDataSetIterator(Instances data, int seed, int batchSize)
    throws Exception {

  batchSize = Math.min(data.numInstances(), batchSize);
  validate(data);
  ImageRecordReader reader = getImageRecordReader(data);

  // Required for supporting channels-last models (currently only EfficientNet)
  if (getChannelsLast())
    reader.setNchw_channels_first(false);

  final int labelIndex = 1; // Use explicit label index position
  final int numPossibleLabels = data.numClasses();
  DataSetIterator tmpIter =
      new RecordReaderDataSetIterator(reader, batchSize, labelIndex, numPossibleLabels);
  DataNormalization scaler = new ImagePreProcessingScaler(0, 1);
  scaler.fit(tmpIter);
  tmpIter.setPreProcessor(scaler);
  return tmpIter;
}
 
Example 6
public void trainModel(MultiLayerNetwork model, boolean invertColors, InputStream customImage, int customLabel) throws Exception {
    List<INDArray> extraFeatures = new LinkedList<>();
    List<Integer> extraLabels = new LinkedList<>();
    final INDArray[] customData = {null, null};
    if (customImage != null) {
        NativeImageLoader loader = new NativeImageLoader(width, height, channels);
        DataNormalization scaler = invertColors ? new ImagePreProcessingScaler(1, 0) : new ImagePreProcessingScaler(0, 1);
        customData[0] = loader.asMatrix(customImage);
        scaler.transform(customData[0]);
        customData[1] = Nd4j.create(1, 10);
        customData[1].putScalar(customLabel, 1.0);
        extraFeatures.add(customData[0]);
        extraLabels.add(customLabel);
    }
    trainModel(model, extraFeatures, extraLabels);
}
 
Example 7
/**
 * This method returns the iterator. Scales all intensity values: it divides them by 255.
 *
 * @param data the dataset to use
 * @param seed the seed for the random number generator
 * @param batchSize the batch size to use
 * @return the iterator
 */
@Override
public DataSetIterator getDataSetIterator(Instances data, int seed, int batchSize)
    throws Exception {

  batchSize = Math.min(data.numInstances(), batchSize);
  validate(data);
  ImageRecordReader reader = getImageRecordReader(data);

  // Required for supporting channels-last models (currently only EfficientNet)
  if (getChannelsLast())
    reader.setNchw_channels_first(false);

  final int labelIndex = 1; // Use explicit label index position
  final int numPossibleLabels = data.numClasses();
  DataSetIterator tmpIter =
      new RecordReaderDataSetIterator(reader, batchSize, labelIndex, numPossibleLabels);
  DataNormalization scaler = new ImagePreProcessingScaler(0, 1);
  scaler.fit(tmpIter);
  tmpIter.setPreProcessor(scaler);
  return tmpIter;
}
 
Example 8
Source Project: nd4j   Source File: NormalizerMinMaxScalerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testGivenMaxMinConstant() {
    double tolerancePerc = 1; // 1% of correct value
    int nSamples = 500;
    int nFeatures = 3;

    INDArray featureSet = Nd4j.rand(nSamples, nFeatures).mul(0.1).add(10);
    INDArray labelSet = Nd4j.zeros(nSamples, 1);
    DataSet sampleDataSet = new DataSet(featureSet, labelSet);

    double givenMin = -1000;
    double givenMax = 1000;
    DataNormalization myNormalizer = new NormalizerMinMaxScaler(givenMin, givenMax);
    DataSet transformed = sampleDataSet.copy();

    myNormalizer.fit(sampleDataSet);
    myNormalizer.transform(transformed);

    //feature set is basically all 10s -> should transform to the min
    INDArray expected = Nd4j.ones(nSamples, nFeatures).mul(givenMin);
    INDArray delta = Transforms.abs(transformed.getFeatures().sub(expected)).div(expected);
    double maxdeltaPerc = delta.max(0, 1).mul(100).getDouble(0, 0);
    assertTrue(maxdeltaPerc < tolerancePerc);
}
 
Example 9
Source Project: nd4j   Source File: NormalizerTests.java    License: Apache License 2.0 6 votes vote down vote up
public float testItervsDataset(DataNormalization preProcessor) {
    DataSet dataCopy = data.copy();
    DataSetIterator dataIter = new TestDataSetIterator(dataCopy, batchSize);
    preProcessor.fit(dataCopy);
    preProcessor.transform(dataCopy);
    INDArray transformA = dataCopy.getFeatures();

    preProcessor.fit(dataIter);
    dataIter.setPreProcessor(preProcessor);
    DataSet next = dataIter.next();
    INDArray transformB = next.getFeatures();

    while (dataIter.hasNext()) {
        next = dataIter.next();
        INDArray transformb = next.getFeatures();
        transformB = Nd4j.vstack(transformB, transformb);
    }

    return Transforms.abs(transformB.div(transformA).rsub(1)).maxNumber().floatValue();
}
 
Example 10
Source Project: deeplearning4j   Source File: NormalizerMinMaxScalerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testGivenMaxMinConstant() {
    double tolerancePerc = 1; // 1% of correct value
    int nSamples = 500;
    int nFeatures = 3;

    INDArray featureSet = Nd4j.rand(nSamples, nFeatures).mul(0.1).add(10);
    INDArray labelSet = Nd4j.zeros(nSamples, 1);
    DataSet sampleDataSet = new DataSet(featureSet, labelSet);

    double givenMin = -1000;
    double givenMax = 1000;
    DataNormalization myNormalizer = new NormalizerMinMaxScaler(givenMin, givenMax);
    DataSet transformed = sampleDataSet.copy();

    myNormalizer.fit(sampleDataSet);
    myNormalizer.transform(transformed);

    //feature set is basically all 10s -> should transform to the min
    INDArray expected = Nd4j.ones(nSamples, nFeatures).mul(givenMin);
    INDArray delta = Transforms.abs(transformed.getFeatures().sub(expected)).div(expected);
    double maxdeltaPerc = delta.max(0, 1).mul(100).getDouble(0);
    assertTrue(maxdeltaPerc < tolerancePerc);
}
 
Example 11
@Test
public void testNormalizerPrefetchReset() throws Exception {
    //Check NPE fix for: https://github.com/deeplearning4j/deeplearning4j/issues/4214
    RecordReader csv = new CSVRecordReader();
    csv.initialize(new FileSplit(Resources.asFile("iris.txt")));

    int batchSize = 3;

    DataSetIterator iter = new RecordReaderDataSetIterator(csv, batchSize, 4, 4, true);

    DataNormalization normalizer = new NormalizerMinMaxScaler(0, 1);
    normalizer.fit(iter);
    iter.setPreProcessor(normalizer);

    iter.inputColumns();    //Prefetch
    iter.totalOutcomes();
    iter.hasNext();
    iter.reset();
    iter.next();
}
 
Example 12
Source Project: deeplearning4j   Source File: TestImageNet.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testImageNetLabels() throws IOException {
    // set up model
    ZooModel model = VGG19.builder().numClasses(0).build(); //num labels doesn't matter since we're getting pretrained imagenet
    ComputationGraph initializedModel = (ComputationGraph) model.initPretrained();

    // set up input and feedforward
    NativeImageLoader loader = new NativeImageLoader(224, 224, 3);
    ClassLoader classloader = Thread.currentThread().getContextClassLoader();
    INDArray image = loader.asMatrix(classloader.getResourceAsStream("deeplearning4j-zoo/goldenretriever.jpg"));
    DataNormalization scaler = new VGG16ImagePreProcessor();
    scaler.transform(image);
    INDArray[] output = initializedModel.output(false, image);

    // check output labels of result
    String decodedLabels = new ImageNetLabels().decodePredictions(output[0]);
    log.info(decodedLabels);
    assertTrue(decodedLabels.contains("golden_retriever"));

    // clean up for current model
    Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread();
    System.gc();
}
 
Example 13
private static DataSetIteratorSplitter createDataSetSplitter() throws IOException, InterruptedException {
    final RecordReader recordReader = DataSetIteratorHelper.generateReader(new ClassPathResource("Churn_Modelling.csv").getFile());
    final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize)
            .classification(labelIndex,numClasses)
            .build();
    final DataNormalization dataNormalization = new NormalizerStandardize();
    dataNormalization.fit(dataSetIterator);
    dataSetIterator.setPreProcessor(dataNormalization);
    final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8);
    return dataSetIteratorSplitter;
}
 
Example 14
public DataSetIteratorSplitter dataSplit(DataSetIterator iterator) throws IOException, InterruptedException {
    DataNormalization dataNormalization = new NormalizerStandardize();
    dataNormalization.fit(iterator);
    iterator.setPreProcessor(dataNormalization);
    DataSetIteratorSplitter splitter = new DataSetIteratorSplitter(iterator,1000,0.8);
    return splitter;
}
 
Example 15
Source Project: Java-Deep-Learning-Cookbook   Source File: HyperParameterTuning.java    License: MIT License 5 votes vote down vote up
public DataSetIteratorSplitter dataSplit(DataSetIterator iterator) throws IOException, InterruptedException {
    DataNormalization dataNormalization = new NormalizerStandardize();
    dataNormalization.fit(iterator);
    iterator.setPreProcessor(dataNormalization);
    DataSetIteratorSplitter splitter = new DataSetIteratorSplitter(iterator,1000,0.8);
    return splitter;
}
 
Example 16
private static DataSetIteratorSplitter createDataSetSplitter() throws IOException, InterruptedException {
    final RecordReader recordReader = DataSetIteratorHelper.generateReader(new ClassPathResource("Churn_Modelling.csv").getFile());
    final DataSetIterator dataSetIterator = new RecordReaderDataSetIterator.Builder(recordReader,batchSize)
            .classification(labelIndex,numClasses)
            .build();
    final DataNormalization dataNormalization = new NormalizerStandardize();
    dataNormalization.fit(dataSetIterator);
    dataSetIterator.setPreProcessor(dataNormalization);
    final DataSetIteratorSplitter dataSetIteratorSplitter = new DataSetIteratorSplitter(dataSetIterator,1250,0.8);
    return dataSetIteratorSplitter;
}
 
Example 17
public DataSetIteratorSplitter dataSplit(DataSetIterator iterator) throws IOException, InterruptedException {
    DataNormalization dataNormalization = new NormalizerStandardize();
    dataNormalization.fit(iterator);
    iterator.setPreProcessor(dataNormalization);
    DataSetIteratorSplitter splitter = new DataSetIteratorSplitter(iterator,1000,0.8);
    return splitter;
}
 
Example 18
Source Project: Java-Deep-Learning-Cookbook   Source File: HyperParameterTuning.java    License: MIT License 5 votes vote down vote up
public DataSetIteratorSplitter dataSplit(DataSetIterator iterator) throws IOException, InterruptedException {
    DataNormalization dataNormalization = new NormalizerStandardize();
    dataNormalization.fit(iterator);
    iterator.setPreProcessor(dataNormalization);
    DataSetIteratorSplitter splitter = new DataSetIteratorSplitter(iterator,1000,0.8);
    return splitter;
}
 
Example 19
Source Project: konduit-serving   Source File: TestUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static InferenceConfiguration getConfig(TemporaryFolder trainDir) throws Exception {
    Pair<MultiLayerNetwork, DataNormalization> multiLayerNetwork = TrainUtils.getTrainedNetwork();
    File modelSave = trainDir.newFile("model.zip");
    ModelSerializer.writeModel(multiLayerNetwork.getFirst(), modelSave, false);

    Schema.Builder schemaBuilder = new Schema.Builder();
    schemaBuilder.addColumnDouble("petal_length")
            .addColumnDouble("petal_width")
            .addColumnDouble("sepal_width")
            .addColumnDouble("sepal_height");
    Schema inputSchema = schemaBuilder.build();

    Schema.Builder outputSchemaBuilder = new Schema.Builder();
    outputSchemaBuilder.addColumnDouble("setosa");
    outputSchemaBuilder.addColumnDouble("versicolor");
    outputSchemaBuilder.addColumnDouble("virginica");
    Schema outputSchema = outputSchemaBuilder.build();

    ServingConfig servingConfig = ServingConfig.builder()
            .createLoggingEndpoints(true)
            .build();

    Dl4jStep modelPipelineStep = Dl4jStep.builder()
            .inputName("default")
            .inputColumnName("default", SchemaTypeUtils.columnNames(inputSchema))
            .inputSchema("default", SchemaTypeUtils.typesForSchema(inputSchema))
            .outputSchema("default", SchemaTypeUtils.typesForSchema(outputSchema))
            .path(modelSave.getAbsolutePath())
            .outputColumnName("default", SchemaTypeUtils.columnNames(outputSchema))
            .build();

    return InferenceConfiguration.builder()
            .servingConfig(servingConfig)
            .step(modelPipelineStep)
            .build();
}
 
Example 20
Source Project: konduit-serving   Source File: KonduitServingMainTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Returns an inference configuration
 * @param fail If true, the network file won't be trained and saved on the file path.
 *             This is useful for deliberately failing the deployment for {@link KonduitServingMainTest#testFailure(TestContext)}
 * @return An {@link InferenceConfiguration} object.
 * @throws Exception throws if there's an exception training a test network.
 */
public InferenceConfiguration getConfig(boolean fail) throws Exception {
    File modelSave = folder.newFile("model.zip");

    if(!fail) {
        Pair<MultiLayerNetwork, DataNormalization> multiLayerNetwork = TrainUtils.getTrainedNetwork();
        ModelSerializer.writeModel(multiLayerNetwork.getFirst(), modelSave, false);
    }

    Schema.Builder schemaBuilder = new Schema.Builder();
    schemaBuilder.addColumnDouble("petal_length")
            .addColumnDouble("petal_width")
            .addColumnDouble("sepal_width")
            .addColumnDouble("sepal_height");
    Schema inputSchema = schemaBuilder.build();

    Schema.Builder outputSchemaBuilder = new Schema.Builder();
    outputSchemaBuilder.addColumnDouble("setosa");
    outputSchemaBuilder.addColumnDouble("versicolor");
    outputSchemaBuilder.addColumnDouble("virginica");
    Schema outputSchema = outputSchemaBuilder.build();

    ServingConfig servingConfig = ServingConfig.builder()
            .httpPort(PortUtils.getAvailablePort())
            .build();

    Dl4jStep modelPipelineStep = Dl4jStep.builder()
            .inputName("default")
            .inputColumnName("default", SchemaTypeUtils.columnNames(inputSchema))
            .inputSchema("default", SchemaTypeUtils.typesForSchema(inputSchema))
            .outputSchema("default", SchemaTypeUtils.typesForSchema(outputSchema))
            .path(modelSave.getAbsolutePath())
            .outputColumnName("default", SchemaTypeUtils.columnNames(outputSchema))
            .build();

    return InferenceConfiguration.builder()
            .servingConfig(servingConfig)
            .step(modelPipelineStep)
            .build();
}
 
Example 21
Source Project: konduit-serving   Source File: BaseDl4JVerticalTest.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public JsonObject getConfigObject() throws Exception {
    Pair<MultiLayerNetwork, DataNormalization> multiLayerNetwork = getTrainedNetwork();
    File modelSave = new File(temporary.getRoot(), "model.zip");
    ModelSerializer.writeModel(multiLayerNetwork.getFirst(), modelSave, true);

    Schema.Builder schemaBuilder = new Schema.Builder();
    schemaBuilder.addColumnDouble("petal_length")
            .addColumnDouble("petal_width")
            .addColumnDouble("sepal_width")
            .addColumnDouble("sepal_height");
    Schema inputSchema = schemaBuilder.build();

    Schema.Builder outputSchemaBuilder = new Schema.Builder();
    outputSchemaBuilder.addColumnDouble("setosa");
    outputSchemaBuilder.addColumnDouble("versicolor");
    outputSchemaBuilder.addColumnDouble("virginica");
    Schema outputSchema = outputSchemaBuilder.build();

    Nd4j.getRandom().setSeed(42);

    ServingConfig servingConfig = ServingConfig.builder()
            .httpPort(port)
            .build();

    ModelStep modelPipelineStep = Dl4jStep.builder()
            .path(modelSave.getAbsolutePath())
            .build()
            .setInput(inputSchema)
            .setOutput(outputSchema);

    InferenceConfiguration inferenceConfiguration = InferenceConfiguration.builder()
            .servingConfig(servingConfig)
            .step(modelPipelineStep)
            .build();

    return new JsonObject(inferenceConfiguration.toJson());
}
 
Example 22
@Override
public JsonObject getConfigObject() throws Exception {
    Pair<MultiLayerNetwork, DataNormalization> multiLayerNetwork = getTrainedNetwork();
    File modelSave = new File(temporary.getRoot(), "model.zip");
    ModelSerializer.writeModel(multiLayerNetwork.getFirst(), modelSave, true);

    inputSchema = TrainUtils.getIrisInputSchema();
    Schema outputSchema = getIrisOutputSchema();
    Nd4j.getRandom().setSeed(42);

    TransformProcess.Builder transformProcessBuilder = new TransformProcess.Builder(inputSchema);
    for (int i = 0; i < inputSchema.numColumns(); i++) {
        transformProcessBuilder.convertToDouble(inputSchema.getName(i));
    }

    TransformProcess transformProcess = transformProcessBuilder.build();

    TransformProcessStep transformStep = new TransformProcessStep(transformProcess, outputSchema);

    ServingConfig servingConfig = ServingConfig.builder()
            .httpPort(port)
            .build();

    ModelStep modelStepConfig = Dl4jStep.builder().path(modelSave.getAbsolutePath()).build()
            .setInput(inputSchema)
            .setOutput(outputSchema);

    InferenceConfiguration inferenceConfiguration = InferenceConfiguration.builder()
            .servingConfig(servingConfig)
            .step(transformStep)
            .step(modelStepConfig)
            .build();

    System.out.println(inferenceConfiguration.toJson());
    return new JsonObject(inferenceConfiguration.toJson());
}
 
Example 23
Source Project: konduit-serving   Source File: PortsTest.java    License: Apache License 2.0 5 votes vote down vote up
private static String trainAndSaveModel() throws Exception {
    Pair<MultiLayerNetwork, DataNormalization> multiLayerNetwork = TrainUtils.getTrainedNetwork();
    File modelSave = folder.newFile("model.zip");
    ModelSerializer.writeModel(multiLayerNetwork.getFirst(), modelSave, false);

    return modelSave.getAbsolutePath();
}
 
Example 24
private INDArray imageFileToMatrix(File file) throws IOException {
    NativeImageLoader loader = new NativeImageLoader(224, 224, 3);
    INDArray image = loader.asMatrix(new FileInputStream(file));
    DataNormalization dataNormalization = new VGG16ImagePreProcessor();
    dataNormalization.transform(image);
    return image;
}
 
Example 25
Source Project: twse-captcha-solver-dl4j   Source File: SolverDL4j.java    License: MIT License 5 votes vote down vote up
/**
 * Describe <code>loadImage</code> method here.
 *
 * @param path a <code>File</code> value
 * @return an <code>INDArray</code> value
 * @exception IOException if an error occurs
 */
private INDArray loadImage(File path) throws IOException {
  int height = 60;
  int width = 200;
  int channels = 1;

  // height, width, channels
  NativeImageLoader loader = new NativeImageLoader(height, width, channels);
  INDArray image = loader.asMatrix(path);

  DataNormalization scaler = new ImagePreProcessingScaler(0, 1);
  scaler.transform(image);

  return image;
}
 
Example 26
Source Project: java-ml-projects   Source File: ImageClassifier.java    License: Apache License 2.0 5 votes vote down vote up
public ImageClassifier(Model model, List<String> labels, int width, int height, int channels,
		DataNormalization normalization) {
	this.labels = labels;
	this.width = width;
	this.height = height;
	this.channels = channels;
	this.normalization = normalization;
	this.dl4jModel = model;
}
 
Example 27
Source Project: dl4j-tutorials   Source File: Predict.java    License: MIT License 5 votes vote down vote up
public static void main(String[] args) throws Exception {
    String testPath = "data/test";
    File testDir = new File(testPath);
    File[] files = testDir.listFiles();

    Pair<MultiLayerNetwork, Normalizer> modelAndNormalizer = ModelSerializer
            .restoreMultiLayerNetworkAndNormalizer(new File("model/AlexNet.zip"), false);

    NativeImageLoader imageLoader = new NativeImageLoader(256, 256, 3);

    MultiLayerNetwork network = modelAndNormalizer.getFirst();
    DataNormalization normalizer = (DataNormalization) modelAndNormalizer.getSecond();

    Map<Integer, String> map = new HashMap<>();
    map.put(0, "CITY");
    map.put(1, "DESERT");
    map.put(2, "FARMLAND");
    map.put(3, "LAKE");
    map.put(4, "MOUNTAIN");
    map.put(5, "OCEAN");

    for (File file : files) {
        INDArray indArray = imageLoader.asMatrix(file);
        normalizer.transform(indArray);

        int[] values = network.predict(indArray);
        String label = map.get(values[0]);

        System.out.println(file.getName() + "," + label);
    }
}
 
Example 28
Source Project: DataVec   Source File: NormalizationTests.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testMeanStdZeros() {
    List<List<Writable>> data = new ArrayList<>();
    Schema.Builder builder = new Schema.Builder();
    int numColumns = 6;
    for (int i = 0; i < numColumns; i++)
        builder.addColumnDouble(String.valueOf(i));

    for (int i = 0; i < 5; i++) {
        List<Writable> record = new ArrayList<>(numColumns);
        data.add(record);
        for (int j = 0; j < numColumns; j++) {
            record.add(new DoubleWritable(1.0));
        }

    }

    INDArray arr = RecordConverter.toMatrix(data);

    Schema schema = builder.build();
    JavaRDD<List<Writable>> rdd = sc.parallelize(data);
    DataRowsFacade dataFrame = DataFrames.toDataFrame(schema, rdd);

    //assert equivalent to the ndarray pre processing
    NormalizerStandardize standardScaler = new NormalizerStandardize();
    standardScaler.fit(new DataSet(arr.dup(), arr.dup()));
    INDArray standardScalered = arr.dup();
    standardScaler.transform(new DataSet(standardScalered, standardScalered));
    DataNormalization zeroToOne = new NormalizerMinMaxScaler();
    zeroToOne.fit(new DataSet(arr.dup(), arr.dup()));
    INDArray zeroToOnes = arr.dup();
    zeroToOne.transform(new DataSet(zeroToOnes, zeroToOnes));
    List<Row> rows = Normalization.stdDevMeanColumns(dataFrame, dataFrame.get().columns());
    INDArray assertion = DataFrames.toMatrix(rows);
    //compare standard deviation
    assertTrue(standardScaler.getStd().equalsWithEps(assertion.getRow(0), 1e-1));
    //compare mean
    assertTrue(standardScaler.getMean().equalsWithEps(assertion.getRow(1), 1e-1));

}
 
Example 29
public void trainModel(MultiLayerNetwork model, boolean invertColors, List<InputStream> customImage,
        List<Integer> customLabel) throws Exception {

    List<INDArray> extraFeatures = new LinkedList<>();
    List<Integer> extraLabels = new LinkedList<>();
    for (int i = 0; i < customImage.size(); i++) {
        NativeImageLoader loader = new NativeImageLoader(width, height, channels);
        DataNormalization scaler = invertColors ? new ImagePreProcessingScaler(1, 0) : new ImagePreProcessingScaler(0, 1);
        INDArray feature = loader.asMatrix(customImage.get(i));
        scaler.transform(feature);
        extraFeatures.add(feature);
        extraLabels.add(customLabel.get(i));
    }
    trainModel(model, extraFeatures, extraLabels);
}
 
Example 30
private String predict(MultiLayerNetwork model, INDArray nd, boolean invertColors) {
        // invert black-white 
        DataNormalization scaler = new ImagePreProcessingScaler(invertColors? 1 : 0, 
        invertColors ? 0 :1);
        scaler.transform(nd);
        preprocess(nd);
//      System.out.println("I have to predict "+nd);
        int p = model.predict(nd)[0];
        System.out.println("prediction = "+model.predict(nd)[0]);
        return String.valueOf(p);
    }