Java Code Examples for org.deeplearning4j.nn.conf.ComputationGraphConfiguration

The following examples show how to use org.deeplearning4j.nn.conf.ComputationGraphConfiguration. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: jstarcraft-ai   Source File: GraphTestCase.java    License: Apache License 2.0 6 votes vote down vote up
private ComputationGraph getOldFunction() {
    NeuralNetConfiguration.Builder netBuilder = new NeuralNetConfiguration.Builder();
    // 设置随机种子
    netBuilder.seed(6);
    netBuilder.setL1(l1Regularization);
    netBuilder.setL1Bias(l1Regularization);
    netBuilder.setL2(l2Regularization);
    netBuilder.setL2Bias(l2Regularization);
    netBuilder.weightInit(WeightInit.XAVIER_UNIFORM);
    netBuilder.updater(new Sgd(learnRatio)).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT);

    GraphBuilder graphBuilder = netBuilder.graphBuilder();
    graphBuilder.addInputs("leftInput", "rightInput");
    graphBuilder.addLayer("leftEmbed", new EmbeddingLayer.Builder().nIn(5).nOut(5).hasBias(true).activation(Activation.IDENTITY).build(), "leftInput");
    graphBuilder.addLayer("rightEmbed", new EmbeddingLayer.Builder().nIn(5).nOut(5).hasBias(true).activation(Activation.IDENTITY).build(), "rightInput");
    graphBuilder.addVertex("embed", new MergeVertex(), "leftEmbed", "rightEmbed");
    graphBuilder.addLayer("output", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nIn(10).nOut(1).build(), "embed");
    graphBuilder.setOutputs("output");

    ComputationGraphConfiguration configuration = graphBuilder.build();
    ComputationGraph graph = new ComputationGraph(configuration);
    graph.init();
    return graph;
}
 
Example 2
Source Project: deeplearning4j   Source File: TrainModule.java    License: Apache License 2.0 6 votes vote down vote up
private TrainModuleUtils.GraphInfo getGraphInfo(Triple<MultiLayerConfiguration,
        ComputationGraphConfiguration, NeuralNetConfiguration> conf) {
    if (conf == null) {
        return null;
    }

    if (conf.getFirst() != null) {
        return TrainModuleUtils.buildGraphInfo(conf.getFirst());
    } else if (conf.getSecond() != null) {
        return TrainModuleUtils.buildGraphInfo(conf.getSecond());
    } else if (conf.getThird() != null) {
        return TrainModuleUtils.buildGraphInfo(conf.getThird());
    } else {
        return null;
    }
}
 
Example 3
@Override
public VariationalAutoencoder getVaeLayer() {
    ComputationGraph network =
                    new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue()));
    network.init();
    INDArray val = ((INDArray) params.value()).unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcasted set parameters");
    network.setParams(val);

    Layer l = network.getLayer(0);
    if (!(l instanceof VariationalAutoencoder)) {
        throw new RuntimeException(
                        "Cannot use CGVaeReconstructionErrorWithKeyFunction on network that doesn't have a VAE "
                                        + "layer as layer 0. Layer type: " + l.getClass());
    }
    return (VariationalAutoencoder) l;
}
 
Example 4
/**
 * Build the multilayer network defined by the networkconfiguration and the list of layers.
 */
protected void createModel() throws Exception {
  final INDArray features = getFirstBatchFeatures(trainData);
  ComputationGraphConfiguration.GraphBuilder gb =
      netConfig.builder().seed(getSeed()).graphBuilder();

  // Set ouput size
  final Layer lastLayer = layers[layers.length - 1];
  final int nOut = trainData.numClasses();
  if (lastLayer instanceof FeedForwardLayer) {
    ((FeedForwardLayer) lastLayer).setNOut(nOut);
  }

  if (getInstanceIterator() instanceof CnnTextEmbeddingInstanceIterator) {
    makeCnnTextLayerSetup(gb);
  } else {
    makeDefaultLayerSetup(gb);
  }

  gb.setInputTypes(InputType.inferInputType(features));
  ComputationGraphConfiguration conf =
      gb.build();
  ComputationGraph model = new ComputationGraph(conf);
  model.init();
  this.model = model;
}
 
Example 5
Source Project: deeplearning4j   Source File: ActorCriticCompGraph.java    License: Apache License 2.0 6 votes vote down vote up
public void applyGradient(Gradient[] gradient, int batchSize) {
    if (recurrent) {
        // assume batch sizes of 1 for recurrent networks,
        // since we are learning each episode as a time serie
        batchSize = 1;
    }
    ComputationGraphConfiguration cgConf = cg.getConfiguration();
    int iterationCount = cgConf.getIterationCount();
    int epochCount = cgConf.getEpochCount();
    cg.getUpdater().update(gradient[0], iterationCount, epochCount, batchSize, LayerWorkspaceMgr.noWorkspaces());
    cg.params().subi(gradient[0].gradient());
    Collection<TrainingListener> iterationListeners = cg.getListeners();
    if (iterationListeners != null && iterationListeners.size() > 0) {
        for (TrainingListener listener : iterationListeners) {
            listener.iterationDone(cg, iterationCount, epochCount);
        }
    }
    cgConf.setIterationCount(iterationCount + 1);
}
 
Example 6
Source Project: deeplearning4j   Source File: ModelSerializerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testWriteCGModelInputStream() throws Exception {
    ComputationGraphConfiguration config = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(0.1))
                    .graphBuilder().addInputs("in")
                    .addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out",
                                    new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3)
                                            .activation(Activation.SOFTMAX).build(),
                                    "dense")
                    .setOutputs("out").build();

    ComputationGraph cg = new ComputationGraph(config);
    cg.init();

    File tempFile = tempDir.newFile();

    ModelSerializer.writeModel(cg, tempFile, true);
    FileInputStream fis = new FileInputStream(tempFile);

    ComputationGraph network = ModelSerializer.restoreComputationGraph(fis);

    assertEquals(network.getConfiguration().toJson(), cg.getConfiguration().toJson());
    assertEquals(cg.params(), network.params());
    assertEquals(cg.getUpdater().getStateViewArray(), network.getUpdater().getStateViewArray());
}
 
Example 7
Source Project: deeplearning4j   Source File: CenterLossOutputLayerTest.java    License: Apache License 2.0 6 votes vote down vote up
private ComputationGraph getGraph(int numLabels, double lambda) {
    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .dist(new NormalDistribution(0, 1)).updater(new NoOp())
                    .graphBuilder().addInputs("input1")
                    .addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.RELU).build(),
                                    "input1")
                    .addLayer("lossLayer", new CenterLossOutputLayer.Builder()
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5).nOut(numLabels)
                                    .lambda(lambda).activation(Activation.SOFTMAX).build(), "l1")
                    .setOutputs("lossLayer").build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();

    return graph;
}
 
Example 8
Source Project: deeplearning4j   Source File: TestVertxUI.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testUICompGraph() {

    StatsStorage ss = new InMemoryStatsStorage();

    UIServer uiServer = UIServer.getInstance();
    uiServer.attach(ss);

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in")
                    .addLayer("L0", new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build(),
                                    "in")
                    .addLayer("L1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                                    .activation(Activation.SOFTMAX).nIn(4).nOut(3).build(), "L0")
                    .setOutputs("L1").build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));

    DataSetIterator iter = new IrisDataSetIterator(150, 150);

    for (int i = 0; i < 100; i++) {
        net.fit(iter);
    }
}
 
Example 9
Source Project: deeplearning4j   Source File: MiscRegressionTests.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testFrozen() throws Exception {
    File f = new ClassPathResource("regression_testing/misc/legacy_frozen/configuration.json").getFile();
    String json = FileUtils.readFileToString(f, StandardCharsets.UTF_8.name());
    ComputationGraphConfiguration conf = ComputationGraphConfiguration.fromJson(json);

    int countFrozen = 0;
    for(Map.Entry<String,GraphVertex> e : conf.getVertices().entrySet()){
        GraphVertex gv = e.getValue();
        assertNotNull(gv);
        if(gv instanceof LayerVertex){
            LayerVertex lv = (LayerVertex)gv;
            Layer layer = lv.getLayerConf().getLayer();
            if(layer instanceof FrozenLayer)
                countFrozen++;
        }
    }

    assertTrue(countFrozen > 0);
}
 
Example 10
Source Project: deeplearning4j   Source File: TestUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static ComputationGraph testModelSerialization(ComputationGraph net){

        ComputationGraph restored;
        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            ModelSerializer.writeModel(net, baos, true);
            byte[] bytes = baos.toByteArray();

            ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
            restored = ModelSerializer.restoreComputationGraph(bais, true);

            assertEquals(net.getConfiguration(), restored.getConfiguration());
            assertEquals(net.params(), restored.params());
        } catch (IOException e){
            //Should never happen
            throw new RuntimeException(e);
        }

        //Also check the ComputationGraphConfiguration is serializable (required by Spark etc)
        ComputationGraphConfiguration conf = net.getConfiguration();
        serializeDeserializeJava(conf);

        return restored;
    }
 
Example 11
Source Project: deeplearning4j   Source File: ModelSerializerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testJavaSerde_1() throws Exception {
    int nIn = 5;
    int nOut = 6;

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).l1(0.01)
            .graphBuilder()
            .addInputs("in")
            .layer("0", new OutputLayer.Builder().nIn(nIn).nOut(nOut).build(), "in")
            .setOutputs("0")
            .validateOutputLayerConfig(false)
            .build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    DataSet dataSet = trivialDataSet();
    NormalizerStandardize norm = new NormalizerStandardize();
    norm.fit(dataSet);

    val b = SerializationUtils.serialize(net);

    ComputationGraph restored = SerializationUtils.deserialize(b);

    assertEquals(net, restored);
}
 
Example 12
Source Project: deeplearning4j   Source File: TestCompGraphCNN.java    License: Apache License 2.0 6 votes vote down vote up
protected static ComputationGraphConfiguration getMultiInputGraphConfig() {
    ComputationGraphConfiguration conf =
                    new NeuralNetConfiguration.Builder()
                                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                                    .graphBuilder().addInputs("input")
                                    .setInputTypes(InputType.convolutional(32, 32, 3))
                                    .addLayer("cnn1",
                                                    new ConvolutionLayer.Builder(4, 4).stride(2, 2).nIn(3).nOut(3)
                                                                    .build(),
                                                    "input")
                                    .addLayer("cnn2",
                                                    new ConvolutionLayer.Builder(4, 4).stride(2, 2).nIn(3).nOut(3)
                                                                    .build(),
                                                    "input")
                                    .addLayer("max1",
                                                    new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
                                                                    .stride(1, 1).kernelSize(2, 2).build(),
                                                    "cnn1", "cnn2")
                                    .addLayer("dnn1", new DenseLayer.Builder().nOut(7).build(), "max1")
                                    .addLayer("output", new OutputLayer.Builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1")
                                    .setOutputs("output").build();

    return conf;
}
 
Example 13
Source Project: deeplearning4j   Source File: ModelSerializerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testInvalidLoading1() throws Exception {
    ComputationGraphConfiguration config = new NeuralNetConfiguration.Builder()
            .graphBuilder().addInputs("in")
            .addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in")
            .addLayer("out",new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                            .activation(Activation.SOFTMAX).nIn(2).nOut(3).build(),
                    "dense")
            .setOutputs("out").build();

    ComputationGraph cg = new ComputationGraph(config);
    cg.init();

    File tempFile = tempDir.newFile();

    ModelSerializer.writeModel(cg, tempFile, true);

    try {
        ModelSerializer.restoreMultiLayerNetwork(tempFile);
        fail();
    } catch (Exception e){
        String msg = e.getMessage();
        assertTrue(msg, msg.contains("JSON") && msg.contains("restoreComputationGraph"));
    }
}
 
Example 14
Source Project: deeplearning4j   Source File: ComputationGraphTestRNN.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testTbpttMasking() {
    //Simple "does it throw an exception" type test...
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .graphBuilder().addInputs("in")
                    .addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE)
                                    .activation(Activation.IDENTITY).nIn(1).nOut(1).build(), "in")
                    .setOutputs("out").backpropType(BackpropType.TruncatedBPTT).tBPTTForwardLength(8)
                    .tBPTTBackwardLength(8).build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    MultiDataSet data = new MultiDataSet(new INDArray[] {Nd4j.linspace(1, 10, 10, Nd4j.dataType()).reshape(1, 1, 10)},
                    new INDArray[] {Nd4j.linspace(2, 20, 10, Nd4j.dataType()).reshape(1, 1, 10)}, null,
                    new INDArray[] {Nd4j.ones(1, 10)});

    net.fit(data);
}
 
Example 15
Source Project: deeplearning4j   Source File: TestListeners.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testListenersViaModelGraph() {
    TestListener.clearCounts();

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder()
                    .addInputs("in").addLayer("0",
                                    new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(10).nOut(10)
                                                    .activation(Activation.TANH).build(),
                                    "in")
                    .setOutputs("0").build();

    ComputationGraph model = new ComputationGraph(conf);
    model.init();

    StatsStorage ss = new InMemoryStatsStorage();
    model.setListeners(new TestListener(), new StatsListener(ss));

    testListenersForModel(model, null);

    assertEquals(1, ss.listSessionIDs().size());
    assertEquals(2, ss.listWorkerIDsForSession(ss.listSessionIDs().get(0)).size());
}
 
Example 16
Source Project: deeplearning4j   Source File: TestUtils.java    License: Apache License 2.0 6 votes vote down vote up
public static ComputationGraph testModelSerialization(ComputationGraph net){

        ComputationGraph restored;
        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            ModelSerializer.writeModel(net, baos, true);
            byte[] bytes = baos.toByteArray();

            ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
            restored = ModelSerializer.restoreComputationGraph(bais, true);

            assertEquals(net.getConfiguration(), restored.getConfiguration());
            assertEquals(net.params(), restored.params());
        } catch (IOException e){
            //Should never happen
            throw new RuntimeException(e);
        }

        //Also check the ComputationGraphConfiguration is serializable (required by Spark etc)
        ComputationGraphConfiguration conf = net.getConfiguration();
        serializeDeserializeJava(conf);

        return restored;
    }
 
Example 17
@Override
public VariationalAutoencoder getVaeLayer() {
    ComputationGraph network =
                    new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue()));
    network.init();
    INDArray val = ((INDArray) params.value()).unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcasted set parameters");
    network.setParams(val);

    Layer l = network.getLayer(0);
    if (!(l instanceof VariationalAutoencoder)) {
        throw new RuntimeException(
                        "Cannot use CGVaeReconstructionProbWithKeyFunction on network that doesn't have a VAE "
                                        + "layer as layer 0. Layer type: " + l.getClass());
    }
    return (VariationalAutoencoder) l;
}
 
Example 18
@Test
public void testBadTuning() {
    //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition

    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(2.0)) //Intentionally huge LR
                    .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
                    .addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
                                    .lossFunction(LossFunctions.LossFunction.MSE).build(), "in")
                    .setOutputs("0").build();
    ComputationGraph net = new ComputationGraph(conf);
    net.setListeners(new ScoreIterationListener(5));

    JavaRDD<DataSet> irisData = getIris();
    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5000))
                    .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(2, TimeUnit.MINUTES),
                                    new MaxScoreIterationTerminationCondition(7.5)) //Initial score is ~2.5
                    .scoreCalculator(new SparkLossCalculatorComputationGraph(
                                    irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc()))
                    .modelSaver(saver).build();

    TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);

    IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm,
                    esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
    EarlyStoppingResult result = trainer.fit();

    assertTrue(result.getTotalEpochs() < 5);
    assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition,
                    result.getTerminationReason());
    String expDetails = new MaxScoreIterationTerminationCondition(7.5).toString();
    assertEquals(expDetails, result.getTerminationDetails());
}
 
Example 19
Source Project: deeplearning4j   Source File: ParallelInference.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * This method duplicates model for future use during inference
 */
protected void initializeReplicaModel() {
    if (protoModel instanceof ComputationGraph) {
        if (!rootDevice) {
            this.replicatedModel = new ComputationGraph(ComputationGraphConfiguration
                    .fromJson(((ComputationGraph) protoModel).getConfiguration().toJson()));
            this.replicatedModel.init();

            synchronized (locker) {
                this.replicatedModel.setParams(protoModel.params().unsafeDuplication(true));

                Nd4j.getExecutioner().commit();
            }
        } else {
            this.replicatedModel = protoModel;
        }
    } else if (protoModel instanceof MultiLayerNetwork) {
        if (!rootDevice) {
            this.replicatedModel = new MultiLayerNetwork(MultiLayerConfiguration.fromJson(
                    ((MultiLayerNetwork) protoModel).getLayerWiseConfigurations().toJson()));
            this.replicatedModel.init();

            synchronized (locker) {
                this.replicatedModel.setParams(protoModel.params().unsafeDuplication(true));

                Nd4j.getExecutioner().commit();
            }
        } else {
            this.replicatedModel = protoModel;
        }
    }
}
 
Example 20
Source Project: deeplearning4j   Source File: ElementWiseVertexTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testElementWiseVertexForwardAdd() {
    int batchsz = 24;
    int featuresz = 17;
    ComputationGraphConfiguration cgc = new NeuralNetConfiguration.Builder().graphBuilder()
                    .addInputs("input1", "input2", "input3")
                    .addLayer("denselayer",
                                    new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
                                                    .build(),
                                    "input1")
                    /* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
                     * Invalid shape: Requested INDArray shape [1, 0] contains dimension size values < 1 (all dimensions must be 1 or more)
                     * at org.nd4j.linalg.factory.Nd4j.checkShapeValues(Nd4j.java:4877)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4867)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4820)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:3948)
                     * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:409)
                     * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:341)
                     */
                    .addVertex("elementwiseAdd", new ElementWiseVertex(ElementWiseVertex.Op.Add), "input1",
                                    "input2", "input3")
                    .addLayer("Add", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
                                    "elementwiseAdd")
                    .setOutputs("Add", "denselayer").build();

    ComputationGraph cg = new ComputationGraph(cgc);
    cg.init();


    INDArray input1 = Nd4j.rand(batchsz, featuresz);
    INDArray input2 = Nd4j.rand(batchsz, featuresz);
    INDArray input3 = Nd4j.rand(batchsz, featuresz);

    INDArray target = input1.dup().addi(input2).addi(input3);

    INDArray output = cg.output(input1, input2, input3)[0];
    INDArray squared = output.sub(target.castTo(output.dataType()));
    double rms = squared.mul(squared).sumNumber().doubleValue();
    Assert.assertEquals(0.0, rms, this.epsilon);
}
 
Example 21
private void buildBlock4a(ComputationGraphConfiguration.GraphBuilder graph) {
    convolution2dAndBN(graph, "inception_4a_3x3",
            96, 640, new int[]{1, 1}, new int[]{1, 1},
            192, 96, new int[]{3, 3}, new int[]{1, 1}
            , new int[]{1, 1, 1, 1}, "inception_3c");
    String rel1 = lastReluId();

    convolution2dAndBN(graph, "inception_4a_5x5",
            32, 640, new int[]{1, 1}, new int[]{1, 1},
            64, 32, new int[]{5, 5}, new int[]{1, 1}
            , new int[]{2, 2, 2, 2}, "inception_3c");
    String rel2 = lastReluId();

    graph.addLayer("avg7",
            new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG, new int[]{3, 3},
                    new int[]{3, 3})
                    .convolutionMode(ConvolutionMode.Truncate)
                    .build(),
            "inception_3c");
    convolution2dAndBN(graph, "inception_4a_pool",
            128, 640, new int[]{1, 1}, new int[]{1, 1},
            null, null, null, null
            , new int[]{2, 2, 2, 2}, "avg7");
    String pad1 = lastPaddingId();

    convolution2dAndBN(graph, "inception_4a_1x1",
            256, 640, new int[]{1, 1}, new int[]{1, 1},
            null, null, null, null
            , null, "inception_3c");
    String rel4 = lastReluId();
    graph.addVertex("inception_4a", new MergeVertex(), rel1, rel2, rel4, pad1);

}
 
Example 22
@Override
public Iterator<Tuple2<Long, Double>> call(Iterator<MultiDataSet> dataSetIterator) throws Exception {
    if (!dataSetIterator.hasNext()) {
        return Collections.singletonList(new Tuple2<>(0L, 0.0)).iterator();
    }

    MultiDataSetIterator iter = new IteratorMultiDataSetIterator(dataSetIterator, minibatchSize); //Does batching where appropriate


    ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson(json));
    network.init();
    INDArray val = params.value().unsafeDuplication(); //.value() is shared by all executors on single machine -> OK, as params are not changed in score function
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcast set parameters");
    network.setParams(val);

    List<Tuple2<Long, Double>> out = new ArrayList<>();
    while (iter.hasNext()) {
        MultiDataSet ds = iter.next();
        double score = network.score(ds, false);

        long numExamples = ds.getFeatures(0).size(0);
        out.add(new Tuple2<>(numExamples, score * numExamples));
    }

    Nd4j.getExecutioner().commit();

    return out.iterator();
}
 
Example 23
private void buildBlock5a(ComputationGraphConfiguration.GraphBuilder graph) {
    convolution2dAndBN(graph, "inception_5a_3x3",
            96, 1024, new int[]{1, 1}, new int[]{1, 1},
            384, 96, new int[]{3, 3}, new int[]{1, 1},
            new int[]{1, 1, 1, 1}, "inception_4e");
    String relu1 = lastReluId();

    graph.addLayer("avg9",
            new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.AVG, new int[]{3, 3},
                    new int[]{3, 3})
                    .convolutionMode(ConvolutionMode.Truncate)
                    .build(),
            "inception_4e");
    convolution2dAndBN(graph, "inception_5a_pool",
            96, 1024, new int[]{1, 1}, new int[]{1, 1},
            null, null, null, null,
            new int[]{1, 1, 1, 1}, "avg9");
    String pad1 = lastPaddingId();

    convolution2dAndBN(graph, "inception_5a_1x1",
            256, 1024, new int[]{1, 1}, new int[]{1, 1},
            null, null, null, null,
            null, "inception_4e");
    String rel3 = lastReluId();

    graph.addVertex("inception_5a", new MergeVertex(), relu1, pad1, rel3);
}
 
Example 24
@Override
protected void createModel() throws Exception {
  final INDArray features = getFirstBatchFeatures(trainData);
  log.info("Feature shape: {}", features.shape());
  ComputationGraphConfiguration.GraphBuilder gb =
      netConfig
          .builder()
          .seed(getSeed())
          .graphBuilder()
          .backpropType(BackpropType.TruncatedBPTT)
          .tBPTTBackwardLength(tBPTTbackwardLength)
          .tBPTTForwardLength(tBPTTforwardLength);

  // Set ouput size
  final Layer lastLayer = layers[layers.length - 1];
  final int nOut = trainData.numClasses();
  if (lastLayer.getBackend() instanceof RnnOutputLayer) {
    ((weka.dl4j.layers.RnnOutputLayer) lastLayer).setNOut(nOut);
  }

  String currentInput = "input";
  gb.addInputs(currentInput);
  // Collect layers
  for (Layer layer : layers) {
    String lName = layer.getLayerName();
    gb.addLayer(lName, layer.getBackend().clone(), currentInput);
    currentInput = lName;
  }
  gb.setOutputs(currentInput);
  gb.setInputTypes(InputType.inferInputType(features));

  ComputationGraphConfiguration conf = gb.build();
  ComputationGraph model = new ComputationGraph(conf);
  model.init();
  this.model = model;
}
 
Example 25
Source Project: deeplearning4j   Source File: TestDropout.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testSerialization(){

    IDropout[] dropouts = new IDropout[]{
            new Dropout(0.5),
            new AlphaDropout(0.5),
            new GaussianDropout(0.1),
            new GaussianNoise(0.1)};

    for(IDropout id : dropouts) {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .dropOut(id)
                .list()
                .layer(new DenseLayer.Builder().nIn(4).nOut(3).build())
                .layer(new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build())
                .build();
        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        TestUtils.testModelSerialization(net);

        ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder()
                .dropOut(id)
                .graphBuilder()
                .addInputs("in")
                .addLayer("0", new DenseLayer.Builder().nIn(4).nOut(3).build(), "in")
                .addLayer("1", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(3).nOut(3).build(), "0")
                .setOutputs("1")
                .build();

        ComputationGraph net2 = new ComputationGraph(conf2);
        net2.init();

        TestUtils.testModelSerialization(net2);
    }
}
 
Example 26
private void runModelConfigTest(String path) throws Exception {
    try(InputStream is = Resources.asStream(path)) {
        ComputationGraphConfiguration config =
                new KerasModel().modelBuilder().modelJsonInputStream(is)
                        .enforceTrainingConfig(true).buildModel().getComputationGraphConfiguration();
        ComputationGraph model = new ComputationGraph(config);
        model.init();
    }
}
 
Example 27
private void runModelConfigTest(String path) throws Exception {
    try(InputStream is = Resources.asStream(path)) {
        ComputationGraphConfiguration config =
                new KerasModel().modelBuilder().modelJsonInputStream(is)
                        .enforceTrainingConfig(false).buildModel().getComputationGraphConfiguration();
        ComputationGraph model = new ComputationGraph(config);
        model.init();
    }
}
 
Example 28
Source Project: deeplearning4j   Source File: RegressionTest080.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void regressionTestCGLSTM1() throws Exception {

    File f = Resources.asFile("regression_testing/080/080_ModelSerializer_Regression_CG_LSTM_1.zip");

    ComputationGraph net = ModelSerializer.restoreComputationGraph(f, true);

    ComputationGraphConfiguration conf = net.getConfiguration();
    assertEquals(3, conf.getVertices().size());

    GravesLSTM l0 = (GravesLSTM) ((LayerVertex) conf.getVertices().get("0")).getLayerConf().getLayer();
    assertTrue(l0.getActivationFn() instanceof ActivationTanH);
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
    assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);

    GravesBidirectionalLSTM l1 =
                    (GravesBidirectionalLSTM) ((LayerVertex) conf.getVertices().get("1")).getLayerConf().getLayer();
    assertTrue(l1.getActivationFn() instanceof ActivationSoftSign);
    assertEquals(4, l1.getNIn());
    assertEquals(4, l1.getNOut());
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
    assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);

    RnnOutputLayer l2 = (RnnOutputLayer) ((LayerVertex) conf.getVertices().get("2")).getLayerConf().getLayer();
    assertEquals(4, l2.getNIn());
    assertEquals(5, l2.getNOut());
    assertTrue(l2.getActivationFn() instanceof ActivationSoftmax);
    assertTrue(l2.getLossFn() instanceof LossMCXENT);
}
 
Example 29
Source Project: deeplearning4j   Source File: RegressionTest060.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void regressionTestCGLSTM1() throws Exception {

    File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_CG_LSTM_1.zip");

    ComputationGraph net = ModelSerializer.restoreComputationGraph(f, true);

    ComputationGraphConfiguration conf = net.getConfiguration();
    assertEquals(3, conf.getVertices().size());

    GravesLSTM l0 = (GravesLSTM) ((LayerVertex) conf.getVertices().get("0")).getLayerConf().getLayer();
    assertEquals("tanh", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
    assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);

    GravesBidirectionalLSTM l1 =
                    (GravesBidirectionalLSTM) ((LayerVertex) conf.getVertices().get("1")).getLayerConf().getLayer();
    assertEquals("softsign", l1.getActivationFn().toString());
    assertEquals(4, l1.getNIn());
    assertEquals(4, l1.getNOut());
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
    assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);

    RnnOutputLayer l2 = (RnnOutputLayer) ((LayerVertex) conf.getVertices().get("2")).getLayerConf().getLayer();
    assertEquals(4, l2.getNIn());
    assertEquals(5, l2.getNOut());
    assertEquals("softmax", l2.getActivationFn().toString());
    assertTrue(l2.getLossFn() instanceof LossMCXENT);
}
 
Example 30
Source Project: deeplearning4j   Source File: NetBroadcastTuple.java    License: Apache License 2.0 5 votes vote down vote up
public NetBroadcastTuple(MultiLayerConfiguration configuration, ComputationGraphConfiguration graphConfiguration,
                INDArray parameters, INDArray updaterState, AtomicInteger counter) {
    this.configuration = configuration;
    this.graphConfiguration = graphConfiguration;
    this.parameters = parameters;
    this.updaterState = updaterState;
    this.counter = counter;
}