Java Code Examples for org.deeplearning4j.nn.graph.ComputationGraph

The following examples show how to use org.deeplearning4j.nn.graph.ComputationGraph. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
/**
 * Attempts to download weights for the given zoo model
 * @param zooModel Model to try download weights for
 * @return new ComputationGraph initialized with the given PretrainedType
 */
protected ComputationGraph downloadWeights(org.deeplearning4j.zoo.ZooModel zooModel) {
    try {
        log.info(String.format("Downloading %s weights", m_pretrainedType));
        Object pretrained = zooModel.initPretrained(m_pretrainedType.getBackend());
        if (pretrained == null) {
            throw new Exception("Error while initialising model");
        }
        if (pretrained instanceof MultiLayerNetwork) {
            return ((MultiLayerNetwork) pretrained).toComputationGraph();
        } else {
            return (ComputationGraph) pretrained;
        }
    } catch (Exception ex) {
        ex.printStackTrace();
        return null;
    }
}
 
Example 2
Source Project: deeplearning4j   Source File: TestVertxUI.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testUICompGraph() {

    StatsStorage ss = new InMemoryStatsStorage();

    UIServer uiServer = UIServer.getInstance();
    uiServer.attach(ss);

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in")
                    .addLayer("L0", new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build(),
                                    "in")
                    .addLayer("L1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                                    .activation(Activation.SOFTMAX).nIn(4).nOut(3).build(), "L0")
                    .setOutputs("L1").build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));

    DataSetIterator iter = new IrisDataSetIterator(150, 150);

    for (int i = 0; i < 100; i++) {
        net.fit(iter);
    }
}
 
Example 3
Source Project: deeplearning4j   Source File: ScoreUtil.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Score based on the loss function
 * @param model the model to score with
 * @param testData the test data to score
 * @param average whether to average the score
 *                for the whole batch or not
 * @return the score for the given test set
 */
public static double score(ComputationGraph model, MultiDataSetIterator testData, boolean average) {
    //TODO: do this properly taking into account division by N, L1/L2 etc
    double sumScore = 0.0;
    int totalExamples = 0;
    while (testData.hasNext()) {
        MultiDataSet ds = testData.next();
        long numExamples = ds.getFeatures(0).size(0);
        sumScore += numExamples * model.score(ds);
        totalExamples += numExamples;
    }

    if (!average)
        return sumScore;
    return sumScore / totalExamples;
}
 
Example 4
Source Project: jstarcraft-ai   Source File: GraphTestCase.java    License: Apache License 2.0 6 votes vote down vote up
private ComputationGraph getOldFunction() {
    NeuralNetConfiguration.Builder netBuilder = new NeuralNetConfiguration.Builder();
    // 设置随机种子
    netBuilder.seed(6);
    netBuilder.setL1(l1Regularization);
    netBuilder.setL1Bias(l1Regularization);
    netBuilder.setL2(l2Regularization);
    netBuilder.setL2Bias(l2Regularization);
    netBuilder.weightInit(WeightInit.XAVIER_UNIFORM);
    netBuilder.updater(new Sgd(learnRatio)).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT);

    GraphBuilder graphBuilder = netBuilder.graphBuilder();
    graphBuilder.addInputs("leftInput", "rightInput");
    graphBuilder.addLayer("leftEmbed", new EmbeddingLayer.Builder().nIn(5).nOut(5).hasBias(true).activation(Activation.IDENTITY).build(), "leftInput");
    graphBuilder.addLayer("rightEmbed", new EmbeddingLayer.Builder().nIn(5).nOut(5).hasBias(true).activation(Activation.IDENTITY).build(), "rightInput");
    graphBuilder.addVertex("embed", new MergeVertex(), "leftEmbed", "rightEmbed");
    graphBuilder.addLayer("output", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nIn(10).nOut(1).build(), "embed");
    graphBuilder.setOutputs("output");

    ComputationGraphConfiguration configuration = graphBuilder.build();
    ComputationGraph graph = new ComputationGraph(configuration);
    graph.init();
    return graph;
}
 
Example 5
private INDArray backPropagateStyles(ComputationGraph vgg16FineTune,
                                     HashMap<String, INDArray> StyleActivationsGramMap,
                                     Map<String, INDArray> generatedActivationsMap) throws Exception {
    INDArray styleBackProb = Nd4j.zeros(new int[]{1, CHANNELS, HEIGHT, WIDTH});
    CountDownLatch countDownLatch = new CountDownLatch(STYLE_LAYERS.length);

    for (String styleLayer : STYLE_LAYERS) {
        String[] split = styleLayer.split(",");
        String styleLayerName = split[0];
        INDArray styleGramValues = StyleActivationsGramMap.get(styleLayerName);
        INDArray generatedValues = generatedActivationsMap.get(styleLayerName);
        double weight = Double.parseDouble(split[1]);
        int index = findLayerIndex(styleLayerName);
        executorService.execute(() -> {
            INDArray dStyleValues = styleCostFunction.styleContentFunctionDerivative(styleGramValues, generatedValues).transpose();
            INDArray backProb = backPropagate(vgg16FineTune, dStyleValues.reshape(generatedValues.shape()), index).muli(weight);
            styleBackProb.addi(backProb);
            countDownLatch.countDown();
        });
    }
    countDownLatch.await();
    return styleBackProb;
}
 
Example 6
Source Project: deeplearning4j   Source File: ReverseTimeSeriesVertex.java    License: Apache License 2.0 6 votes vote down vote up
public ReverseTimeSeriesVertex(ComputationGraph graph, String name, int vertexIndex, String inputName, DataType dataType) {
    super(graph, name, vertexIndex, null, null, dataType);
    this.inputName = inputName;


    if (inputName == null) {
        // Don't use masks
        this.inputIdx = -1;
    } else {
        // Find the given input
        this.inputIdx = graph.getConfiguration().getNetworkInputs().indexOf(inputName);
        if (inputIdx == -1)
            throw new IllegalArgumentException("Invalid input name: \"" + inputName + "\" not found in list "
                    + "of network inputs (" + graph.getConfiguration().getNetworkInputs() + ")");
    }
}
 
Example 7
Source Project: deeplearning4j   Source File: DL4jServlet.java    License: Apache License 2.0 6 votes vote down vote up
private O process(MultiDataSet mds) {
    O result = null;
    if (parallelEnabled) {
        // process result
        result = inferenceAdapter.apply(parallelInference.output(mds.getFeatures(), mds.getFeaturesMaskArrays()));
    } else {
        synchronized (this) {
            if (model instanceof ComputationGraph)
                result = inferenceAdapter.apply(((ComputationGraph) model).output(false, mds.getFeatures(), mds.getFeaturesMaskArrays()));
            else if (model instanceof MultiLayerNetwork) {
                Preconditions.checkArgument(mds.getFeatures().length > 0 || (mds.getFeaturesMaskArrays() != null && mds.getFeaturesMaskArrays().length > 0),
                        "Input data for MultilayerNetwork is invalid!");
                result = inferenceAdapter.apply(((MultiLayerNetwork) model).output(mds.getFeatures()[0], false,
                        mds.getFeaturesMaskArrays() != null ? mds.getFeaturesMaskArrays()[0] : null, null));
            }
        }
    }
    return result;
}
 
Example 8
/**
 * Build the multilayer network defined by the networkconfiguration and the list of layers.
 */
protected void createModel() throws Exception {
  final INDArray features = getFirstBatchFeatures(trainData);
  ComputationGraphConfiguration.GraphBuilder gb =
      netConfig.builder().seed(getSeed()).graphBuilder();

  // Set ouput size
  final Layer lastLayer = layers[layers.length - 1];
  final int nOut = trainData.numClasses();
  if (lastLayer instanceof FeedForwardLayer) {
    ((FeedForwardLayer) lastLayer).setNOut(nOut);
  }

  if (getInstanceIterator() instanceof CnnTextEmbeddingInstanceIterator) {
    makeCnnTextLayerSetup(gb);
  } else {
    makeDefaultLayerSetup(gb);
  }

  gb.setInputTypes(InputType.inferInputType(features));
  ComputationGraphConfiguration conf =
      gb.build();
  ComputationGraph model = new ComputationGraph(conf);
  model.init();
  this.model = model;
}
 
Example 9
Source Project: wekaDeeplearning4j   Source File: Dl4jVGG.java    License: GNU General Public License v3.0 6 votes vote down vote up
public ComputationGraph init(int numLabels, long seed, int[] shape, boolean filterMode) {
    ZooModel net = null;
    if (m_variation == VGG.VARIATION.VGG16) {
        net = org.deeplearning4j.zoo.model.VGG16.builder()
                .cacheMode(CacheMode.NONE)
                .workspaceMode(Preferences.WORKSPACE_MODE)
                .inputShape(shape)
                .numClasses(numLabels)
                .build();
    } else if (m_variation == VGG.VARIATION.VGG19) {
        net = org.deeplearning4j.zoo.model.VGG19.builder()
                .cacheMode(CacheMode.NONE)
                .workspaceMode(Preferences.WORKSPACE_MODE)
                .inputShape(shape)
                .numClasses(numLabels)
                .build();
    }

    ComputationGraph defaultNet = net.init();

    return attemptToLoadWeights(net, defaultNet, seed, numLabels, filterMode);
}
 
Example 10
Source Project: deeplearning4j   Source File: CenterLossOutputLayerTest.java    License: Apache License 2.0 6 votes vote down vote up
private ComputationGraph getGraph(int numLabels, double lambda) {
    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .dist(new NormalDistribution(0, 1)).updater(new NoOp())
                    .graphBuilder().addInputs("input1")
                    .addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.RELU).build(),
                                    "input1")
                    .addLayer("lossLayer", new CenterLossOutputLayer.Builder()
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5).nOut(numLabels)
                                    .lambda(lambda).activation(Activation.SOFTMAX).build(), "l1")
                    .setOutputs("lossLayer").build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();

    return graph;
}
 
Example 11
Source Project: deeplearning4j   Source File: KerasLambdaTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testModelLambdaLayerImport() throws Exception {
    KerasLayer.registerLambdaLayer("lambda_3", new ExponentialLambda());
    KerasLayer.registerLambdaLayer("lambda_4", new TimesThreeLambda());

    String modelPath = "modelimport/keras/examples/lambda/model_lambda.h5";

    try(InputStream is = Resources.asStream(modelPath)) {
        File modelFile = testDir.newFile("tempModel" + System.currentTimeMillis() + ".h5");
        Files.copy(is, modelFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
        ComputationGraph model = new KerasModel().modelBuilder().modelHdf5Filename(modelFile.getAbsolutePath())
                .enforceTrainingConfig(false).buildModel().getComputationGraph();

        System.out.println(model.summary());
        INDArray input = Nd4j.create(new int[]{10, 784});

        model.output(input);
    } finally {
        KerasLayer.clearLambdaLayers(); // Clear all lambdas, so other tests aren't affected.
    }
}
 
Example 12
private static void saveH5File(File modelFile, File outputFolder) {
    try {
        INDArray testShape = Nd4j.zeros(1, 3, 224, 224);
        String modelName = modelFile.getName();
        Method method = null;
        try {
            method = InputType.class.getMethod("setDefaultCNN2DFormat", CNN2DFormat.class);
            method.invoke(null, CNN2DFormat.NCHW);
        } catch (NoSuchMethodException ex) {
            System.err.println("setDefaultCNN2DFormat() not found on InputType class... " +
                    "Are you using the custom built deeplearning4j-nn.jar?");
            System.exit(1);
        }

        if (modelName.contains("EfficientNet")) {
            // Fixes for EfficientNet family of models
            testShape = Nd4j.zeros(1, 224, 224, 3);
            method.invoke(null, CNN2DFormat.NHWC);
            // We don't want the resulting .zip files to have 'Fixed' in the name, so we'll strip it off here
            modelName = modelName.replace("Fixed", "");
        }
        ComputationGraph kerasModel = KerasModelImport.importKerasModelAndWeights(modelFile.getAbsolutePath());
        kerasModel.feedForward(testShape, false);
        // e.g. ResNet50.h5 -> KerasResNet50.zip
        modelName = "Keras" + modelName.replace(".h5", ".zip");
        String newZip = Paths.get(outputFolder.getPath(), modelName).toString();
        kerasModel.save(new File(newZip));
        System.out.println("Saved file " + newZip);
    } catch (Exception e) {
        System.err.println("\n\nCouldn't save " + modelFile.getName());
        e.printStackTrace();
    }
}
 
Example 13
Source Project: deeplearning4j   Source File: TestTFKerasModelImport.java    License: Apache License 2.0 5 votes vote down vote up
private void testModelImportWithKeras(String path) throws Exception{
    Model kerasModel = new Model(path);
    ComputationGraph dl4jModel = KerasModelImport.importKerasModelAndWeights(path);
    Assert.assertEquals(kerasModel.numInputs(), dl4jModel.getNumInputArrays());
    Assert.assertEquals(kerasModel.numOutputs(), dl4jModel.getNumOutputArrays());
    INDArray[] kerasInputArrays = new INDArray[kerasModel.numInputs()];
    INDArray[] dl4jInputArrays = new INDArray[kerasModel.numInputs()];

    for (int i = 0; i < kerasInputArrays.length; i ++) {
        long[] shape = kerasModel.inputShapeAt(i);
        for (int j = 0; j < shape.length; j++) {
            if (shape[j] < 0) {
                shape[j] = 1;
            }
        }

        kerasInputArrays[i] = Nd4j.rand(shape);
    }

    INDArray[] kerasOut = kerasModel.predict(kerasInputArrays);
    INDArray[] dl4jOut = dl4jModel.output(dl4jInputArrays);

    Assert.assertEquals(kerasOut.length, dl4jOut.length);

    for (int i = 0; i < kerasOut.length; i++){
        INDArray kerasOutArr = kerasOut[i];
        kerasOutArr = kerasOutArr.reshape(1, -1);// bit of relaxation on shape
        kerasOutArr= kerasOutArr.castTo(DataType.DOUBLE);
        Nd4j.getAffinityManager().ensureLocation(dl4jOut[i], AffinityManager.Location.HOST);
        INDArray dl4jOutArr = dl4jOut[i].reshape(1, -1);
        System.out.println(kerasOutArr.shapeInfoToString());
        System.out.println(dl4jOutArr.shapeInfoToString());
        Assert.assertEquals(kerasOutArr, dl4jOutArr);
    }
}
 
Example 14
Source Project: konduit-serving   Source File: KerasDl4jHandler.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void handle(RoutingContext event) {
    File kerasFile = getTmpFileWithContext(event);
    ModelType type = getTypeFromContext(event);
    try {
        ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
        switch (type) {
            case FUNCTIONAL:
                ComputationGraph computationGraph = KerasModelImport.importKerasModelAndWeights(kerasFile.getAbsolutePath());
                ModelSerializer.writeModel(computationGraph, byteArrayOutputStream, true);
                break;
            case SEQUENTIAL:
                MultiLayerNetwork multiLayerConfiguration = KerasModelImport.importKerasSequentialModelAndWeights(kerasFile.getAbsolutePath());
                ModelSerializer.writeModel(multiLayerConfiguration, byteArrayOutputStream, true);
                break;
        }

        Buffer buffer = Buffer.buffer(byteArrayOutputStream.toByteArray());
        File newFile = new File("tmpFile-" + UUID.randomUUID().toString() + ".xml");
        FileUtils.writeByteArrayToFile(newFile, buffer.getBytes());
        event.response().sendFile(newFile.getAbsolutePath(), resultHandler -> {
            if (resultHandler.failed()) {
                resultHandler.cause().printStackTrace();
                event.response().setStatusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR);

            } else {
                event.response().setStatusCode(200);
            }
        });

        event.response().exceptionHandler(Throwable::printStackTrace);

    } catch (Exception e) {
        event.response().setStatusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR);
        event.response().setStatusMessage("Error importing model " + e.getMessage());
    }
}
 
Example 15
Source Project: deeplearning4j   Source File: RegressionTest060.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void regressionTestCGLSTM1() throws Exception {

    File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_CG_LSTM_1.zip");

    ComputationGraph net = ModelSerializer.restoreComputationGraph(f, true);

    ComputationGraphConfiguration conf = net.getConfiguration();
    assertEquals(3, conf.getVertices().size());

    GravesLSTM l0 = (GravesLSTM) ((LayerVertex) conf.getVertices().get("0")).getLayerConf().getLayer();
    assertEquals("tanh", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
    assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);

    GravesBidirectionalLSTM l1 =
                    (GravesBidirectionalLSTM) ((LayerVertex) conf.getVertices().get("1")).getLayerConf().getLayer();
    assertEquals("softsign", l1.getActivationFn().toString());
    assertEquals(4, l1.getNIn());
    assertEquals(4, l1.getNOut());
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
    assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);

    RnnOutputLayer l2 = (RnnOutputLayer) ((LayerVertex) conf.getVertices().get("2")).getLayerConf().getLayer();
    assertEquals(4, l2.getNIn());
    assertEquals(5, l2.getNOut());
    assertEquals("softmax", l2.getActivationFn().toString());
    assertTrue(l2.getLossFn() instanceof LossMCXENT);
}
 
Example 16
Source Project: deeplearning4j   Source File: KerasModelEndToEndTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * MobileNet
 */
@Test
public void importMobileNet() throws Exception {
    ComputationGraph graph = importFunctionalModelH5Test("modelimport/keras/examples/mobilenet/alternative.hdf5");
    INDArray input = Nd4j.ones(10, 299, 299, 3);
    graph.output(input);
}
 
Example 17
@Override
public Buffer saveModel(ComputationGraph model) {
    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    try {
        ModelSerializer.writeModel(model, byteArrayOutputStream, true);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }

    return Buffer.buffer(byteArrayOutputStream.toByteArray());
}
 
Example 18
@Override
public Buffer saveModel(ComputationGraph model) {
    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    try {
        ModelSerializer.writeModel(model, byteArrayOutputStream, true);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }

    return Buffer.buffer(byteArrayOutputStream.toByteArray());
}
 
Example 19
Source Project: wekaDeeplearning4j   Source File: KerasVGG.java    License: GNU General Public License v3.0 5 votes vote down vote up
@Override
public ComputationGraph init(int numLabels, long seed, int[] shape, boolean filterMode) {
    VGG vgg = new VGG();
    vgg.setVariation(variation);

    return attemptToLoadWeights(vgg, null, seed, numLabels, filterMode);
}
 
Example 20
Source Project: deeplearning4j   Source File: KerasModelEndToEndTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * InceptionV3 Keras 2 no top
 */
@Test
public void importInceptionKeras2() throws Exception {
    int[] inputShape = new int[]{299, 299, 3};
    ComputationGraph graph = importFunctionalModelH5Test(
            "modelimport/keras/examples/inception/inception_tf_keras_2.h5", inputShape, false);
    INDArray input = Nd4j.ones(10, 299, 299, 3);        //TF = channels last = NHWC
    graph.output(input);
    System.out.println(graph.summary());
}
 
Example 21
Source Project: deeplearning4j   Source File: TestGraphNodes.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testStackVertexEmbedding() {
    Nd4j.getRandom().setSeed(12345);
    GraphVertex unstack = new StackVertex(null, "", -1, Nd4j.dataType());

    INDArray in1 = Nd4j.zeros(5, 1);
    INDArray in2 = Nd4j.zeros(5, 1);
    for (int i = 0; i < 5; i++) {
        in1.putScalar(i, 0, i);
        in2.putScalar(i, 0, i);
    }

    INDArray l = Nd4j.rand(5, 5);
    MultiDataSet ds = new org.nd4j.linalg.dataset.MultiDataSet(new INDArray[] {in1, in2}, new INDArray[] {l, l},
                    null, null);


    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in1", "in2")
                    .addVertex("stack", new org.deeplearning4j.nn.conf.graph.StackVertex(), "in1", "in2")
                    .addLayer("1", new EmbeddingLayer.Builder().nIn(5).nOut(5).build(), "stack")
                    .addVertex("unstack1", new org.deeplearning4j.nn.conf.graph.UnstackVertex(0, 2), "1")
                    .addVertex("unstack2", new org.deeplearning4j.nn.conf.graph.UnstackVertex(0, 2), "1")
                    .addLayer("out1", new OutputLayer.Builder().activation(Activation.TANH)
                                    .lossFunction(LossFunctions.LossFunction.L2).nIn(5).nOut(5).build(), "unstack1")
                    .addLayer("out2", new OutputLayer.Builder().activation(Activation.TANH)
                                    .lossFunction(LossFunctions.LossFunction.L2).nIn(5).nOut(5).build(), "unstack2")
                    .setOutputs("out1", "out2").build();

    ComputationGraph g = new ComputationGraph(conf);
    g.init();

    g.feedForward(new INDArray[] {in1, in2}, false);

    g.fit(ds);

}
 
Example 22
Source Project: deeplearning4j   Source File: NetworkUtils.java    License: Apache License 2.0 5 votes vote down vote up
private static void setLearningRate(ComputationGraph net, double newLr, ISchedule lrSchedule) {
    org.deeplearning4j.nn.api.Layer[] layers = net.getLayers();
    for (int i = 0; i < layers.length; i++) {
        setLearningRate(net, layers[i].conf().getLayer().getLayerName(), newLr, lrSchedule, false);
    }
    refreshUpdater(net);
}
 
Example 23
@Override
public ComputationGraph init(int numLabels, long seed, int[] shape, boolean filterMode) {
    ResNet resNet = new ResNet();
    resNet.setVariation(variation);

    return attemptToLoadWeights(resNet, null, seed, numLabels, filterMode);
}
 
Example 24
Source Project: deeplearning4j   Source File: YOLO2.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public ComputationGraph init() {
    ComputationGraph model = new ComputationGraph(conf());
    model.init();

    return model;
}
 
Example 25
@Override
public Iterator<Tuple2<Long, Double>> call(Iterator<MultiDataSet> dataSetIterator) throws Exception {
    if (!dataSetIterator.hasNext()) {
        return Collections.singletonList(new Tuple2<>(0L, 0.0)).iterator();
    }

    MultiDataSetIterator iter = new IteratorMultiDataSetIterator(dataSetIterator, minibatchSize); //Does batching where appropriate


    ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson(json));
    network.init();
    INDArray val = params.value().unsafeDuplication(); //.value() is shared by all executors on single machine -> OK, as params are not changed in score function
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcast set parameters");
    network.setParams(val);

    List<Tuple2<Long, Double>> out = new ArrayList<>();
    while (iter.hasNext()) {
        MultiDataSet ds = iter.next();
        double score = network.score(ds, false);

        long numExamples = ds.getFeatures(0).size(0);
        out.add(new Tuple2<>(numExamples, score * numExamples));
    }

    Nd4j.getExecutioner().commit();

    return out.iterator();
}
 
Example 26
Source Project: deeplearning4j   Source File: BaseStatsListener.java    License: Apache License 2.0 5 votes vote down vote up
private void updateExamplesMinibatchesCounts(Model model) {
    ModelInfo modelInfo = getModelInfo(model);
    int examplesThisMinibatch = 0;
    if (model instanceof MultiLayerNetwork) {
        examplesThisMinibatch = ((MultiLayerNetwork) model).batchSize();
    } else if (model instanceof ComputationGraph) {
        examplesThisMinibatch = ((ComputationGraph) model).batchSize();
    } else if (model instanceof Layer) {
        examplesThisMinibatch = ((Layer) model).getInputMiniBatchSize();
    }
    modelInfo.examplesSinceLastReport += examplesThisMinibatch;
    modelInfo.totalExamples += examplesThisMinibatch;
    modelInfo.minibatchesSinceLastReport++;
    modelInfo.totalMinibatches++;
}
 
Example 27
Source Project: deeplearning4j   Source File: ElementWiseVertexTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testElementWiseVertexForwardProduct() {
    int batchsz = 24;
    int featuresz = 17;
    ComputationGraphConfiguration cgc = new NeuralNetConfiguration.Builder().graphBuilder()
                    .addInputs("input1", "input2", "input3")
                    .addLayer("denselayer",
                                    new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
                                                    .build(),
                                    "input1")
                    /* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
                     * Invalid shape: Requested INDArray shape [1, 0] contains dimension size values < 1 (all dimensions must be 1 or more)
                     * at org.nd4j.linalg.factory.Nd4j.checkShapeValues(Nd4j.java:4877)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4867)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4820)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:3948)
                     * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:409)
                     * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:341)
                     */
                    .addVertex("elementwiseProduct", new ElementWiseVertex(ElementWiseVertex.Op.Product), "input1",
                                    "input2", "input3")
                    .addLayer("Product", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
                                    "elementwiseProduct")
                    .setOutputs("Product", "denselayer").build();

    ComputationGraph cg = new ComputationGraph(cgc);
    cg.init();


    INDArray input1 = Nd4j.rand(batchsz, featuresz);
    INDArray input2 = Nd4j.rand(batchsz, featuresz);
    INDArray input3 = Nd4j.rand(batchsz, featuresz);

    INDArray target = input1.dup().muli(input2).muli(input3);

    INDArray output = cg.output(input1, input2, input3)[0];
    INDArray squared = output.sub(target.castTo(output.dataType()));
    double rms = squared.mul(squared).sumNumber().doubleValue();
    Assert.assertEquals(0.0, rms, this.epsilon);
}
 
Example 28
private void runModelConfigTest(String path) throws Exception {
    try(InputStream is = Resources.asStream(path)) {
        ComputationGraphConfiguration config =
                new KerasModel().modelBuilder().modelJsonInputStream(is)
                        .enforceTrainingConfig(true).buildModel().getComputationGraphConfiguration();
        ComputationGraph model = new ComputationGraph(config);
        model.init();
    }
}
 
Example 29
private static INDArray getEmbeddings(ComputationGraph vgg16, File image) throws IOException {
    INDArray indArray = LOADER.asMatrix(image);
    IMAGE_PRE_PROCESSOR.preProcess(indArray);
    Map<String, INDArray> stringINDArrayMap = vgg16.feedForward(indArray, false);
    INDArray embeddings = stringINDArrayMap.get("embeddings");
    return embeddings;
}
 
Example 30
private void testResults(ComputationGraph cifar10, DataSetIterator testIterator, int iEpoch, String modelName) throws IOException {
        if (iEpoch % TEST_INTERVAL == 0) {
            Evaluation eval = cifar10.evaluate(testIterator);
            log.info(eval.stats());
            testIterator.reset();
        }
//        TestModels.TestResult test = TestModels.test(cifar10, modelName);
//        log.info("Test Results >> " + test);
    }