org.deeplearning4j.datasets.iterator.AsyncDataSetIterator Java Examples

The following examples show how to use org.deeplearning4j.datasets.iterator.AsyncDataSetIterator. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestInstantiation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public static void runTest(ZooModel model, String modelName, int numClasses) throws Exception {
    ignoreIfCuda();
    int gridWidth = -1;
    int gridHeight = -1;
    if (modelName.equals("TinyYOLO") || modelName.equals("YOLO2")) {
        int[] inputShapes = model.metaData().getInputShape()[0];
        gridWidth = DarknetHelper.getGridWidth(inputShapes);
        gridHeight = DarknetHelper.getGridHeight(inputShapes);
        numClasses += 4;
    }

    // set up data iterator
    int[] inputShape = model.metaData().getInputShape()[0];
    DataSetIterator iter = new BenchmarkDataSetIterator(
            new int[]{8, inputShape[0], inputShape[1], inputShape[2]}, numClasses, 1,
            gridWidth, gridHeight);

    Model initializedModel = model.init();
    AsyncDataSetIterator async = new AsyncDataSetIterator(iter);
    if (initializedModel instanceof MultiLayerNetwork) {
        ((MultiLayerNetwork) initializedModel).fit(async);
    } else {
        ((ComputationGraph) initializedModel).fit(async);
    }
    async.shutdown();

    // clean up for current model
    model = null;
    initializedModel = null;
    async = null;
    iter = null;
    Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread();
    System.gc();
    Thread.sleep(1000);
    System.gc();
}
 
Example #2
Source File: DataSetIteratorHelper.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static DataSetIterator trainIteratorFeaturized(){
    DataSetIterator trainIter = new ExistingMiniBatchDataSetIterator(new File("{PATH-TO-SAVE-TRAIN-SAMPLES}"),"churn-"+featurizeExtractionLayer+"-train-%d.bin");
    return new AsyncDataSetIterator(trainIter);

}
 
Example #3
Source File: DataSetIteratorHelper.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static DataSetIterator testIteratorFeaturized(){
    DataSetIterator testIter = new ExistingMiniBatchDataSetIterator(new File("{PATH-TO-SAVE-TEST-SAMPLES}"),"churn-"+featurizeExtractionLayer+"-test-%d.bin");
    return new AsyncDataSetIterator(testIter);
}
 
Example #4
Source File: DataSetIteratorHelper.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static DataSetIterator trainIteratorFeaturized(){
    DataSetIterator trainIter = new ExistingMiniBatchDataSetIterator(new File("{PATH-TO-SAVE-TRAIN-SAMPLES}"),"churn-"+featurizeExtractionLayer+"-train-%d.bin");
    return new AsyncDataSetIterator(trainIter);

}
 
Example #5
Source File: DataSetIteratorHelper.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static DataSetIterator testIteratorFeaturized(){
    DataSetIterator testIter = new ExistingMiniBatchDataSetIterator(new File("{PATH-TO-SAVE-TEST-SAMPLES}"),"churn-"+featurizeExtractionLayer+"-test-%d.bin");
    return new AsyncDataSetIterator(testIter);
}
 
Example #6
Source File: DL4JSentimentAnalysisExample.java    From Java-for-Data-Science with MIT License 4 votes vote down vote up
public static void main(String[] args) throws Exception {

        getModelData();
        
        System.out.println("Total memory = " + Runtime.getRuntime().totalMemory());

        int batchSize = 50;
        int vectorSize = 300;
        int nEpochs = 5;
        int truncateReviewsToLength = 300;

        MultiLayerConfiguration sentimentNN = new NeuralNetConfiguration.Builder()
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
                .updater(Updater.RMSPROP)
                .regularization(true).l2(1e-5)
                .weightInit(WeightInit.XAVIER)
                .gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue).gradientNormalizationThreshold(1.0)
                .learningRate(0.0018)
                .list()
                .layer(0, new GravesLSTM.Builder().nIn(vectorSize).nOut(200)
                        .activation("softsign").build())
                .layer(1, new RnnOutputLayer.Builder().activation("softmax")
                        .lossFunction(LossFunctions.LossFunction.MCXENT).nIn(200).nOut(2).build())
                .pretrain(false).backprop(true).build();

        MultiLayerNetwork net = new MultiLayerNetwork(sentimentNN);
        net.init();
        net.setListeners(new ScoreIterationListener(1));

        WordVectors wordVectors = WordVectorSerializer.loadGoogleModel(new File(GNEWS_VECTORS_PATH), true, false);
        DataSetIterator trainData = new AsyncDataSetIterator(new SentimentExampleIterator(EXTRACT_DATA_PATH, wordVectors, batchSize, truncateReviewsToLength, true), 1);
        DataSetIterator testData = new AsyncDataSetIterator(new SentimentExampleIterator(EXTRACT_DATA_PATH, wordVectors, 100, truncateReviewsToLength, false), 1);

        for (int i = 0; i < nEpochs; i++) {
            net.fit(trainData);
            trainData.reset();

            Evaluation evaluation = new Evaluation();
            while (testData.hasNext()) {
                DataSet t = testData.next();
                INDArray dataFeatures = t.getFeatureMatrix();
                INDArray dataLabels = t.getLabels();
                INDArray inMask = t.getFeaturesMaskArray();
                INDArray outMask = t.getLabelsMaskArray();
                INDArray predicted = net.output(dataFeatures, false, inMask, outMask);

                evaluation.evalTimeSeries(dataLabels, predicted, outMask);
            }
            testData.reset();

            System.out.println(evaluation.stats());
        }
    }