Java Code Examples for org.deeplearning4j.nn.graph.ComputationGraph#output()

The following examples show how to use org.deeplearning4j.nn.graph.ComputationGraph#output() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WorkspaceTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSimpleOutputWorkspace() {
    final MemoryWorkspace workspace = Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("ExternalTestWorkspace");

    final INDArray input = Nd4j.rand(1, 30);

    final ComputationGraphConfiguration computationGraphConfiguration = new NeuralNetConfiguration.Builder()
            .graphBuilder()
            .addInputs("state")
            .addLayer("value_output", new OutputLayer.Builder().nIn(30).nOut(1).activation(Activation.IDENTITY)
                    .lossFunction(LossFunctions.LossFunction.MSE).build(), "state")
            .setOutputs("value_output")
            .build();

    final ComputationGraph computationGraph = new ComputationGraph(computationGraphConfiguration);
    computationGraph.init();

    try (final MemoryWorkspace ws = workspace.notifyScopeEntered()) {
        computationGraph.output(false, ws, input);
    }
}
 
Example 2
Source File: TestInstantiation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testInitPretrained() throws IOException {
    ignoreIfCuda();
    ZooModel model = ResNet50.builder().numClasses(0).build(); //num labels doesn't matter since we're getting pretrained imagenet
    assertTrue(model.pretrainedAvailable(PretrainedType.IMAGENET));

    ComputationGraph initializedModel = (ComputationGraph) model.initPretrained();
    INDArray f = Nd4j.rand(new int[]{1, 3, 224, 224});
    INDArray[] result = initializedModel.output(f);
    assertArrayEquals(result[0].shape(), new long[]{1, 1000});

    //Test fitting. Not ewe need to use transfer learning, as ResNet50 has a dense layer, not an OutputLayer
    initializedModel = new TransferLearning.GraphBuilder(initializedModel)
            .removeVertexAndConnections("fc1000")
            .addLayer("fc1000", new OutputLayer.Builder()
                    .lossFunction(LossFunctions.LossFunction.MCXENT)
                    .nIn(2048).nOut(1000).activation(Activation.SOFTMAX).build(), "flatten_1")
            .setOutputs("fc1000")
            .build();
    initializedModel.fit(new org.nd4j.linalg.dataset.DataSet(f, TestUtils.randomOneHot(1, 1000, 12345)));

}
 
Example 3
Source File: TestInstantiation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public void testInitRandomModel(ZooModel model, long[] inShape, long[] outShape){
    ignoreIfCuda();
    //Test initialization of NON-PRETRAINED models

    log.info("Testing {}", model.getClass().getSimpleName());
    ComputationGraph initializedModel = model.init();
    INDArray f = Nd4j.rand(DataType.FLOAT, inShape);
    INDArray[] result = initializedModel.output(f);
    assertArrayEquals(result[0].shape(), outShape);
    INDArray l = outShape.length == 2 ? TestUtils.randomOneHot(1, (int)outShape[1], 12345) : Nd4j.rand(DataType.FLOAT, outShape);
    initializedModel.fit(new org.nd4j.linalg.dataset.DataSet(f, l));

    // clean up for current model
    Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread();
    f.close();
    l.close();
    initializedModel.params().close();
    initializedModel.getFlattenedGradients().close();
    System.gc();
}
 
Example 4
Source File: TestInstantiation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public void testInitPretrained(ZooModel model, long[] inShape, long[] outShape) throws Exception {
    ignoreIfCuda();
    assertTrue(model.pretrainedAvailable(PretrainedType.IMAGENET));

    ComputationGraph initializedModel = (ComputationGraph) model.initPretrained();
    INDArray[] result = initializedModel.output(Nd4j.rand(inShape));
    assertArrayEquals(result[0].shape(),outShape);

    // clean up for current model
    Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread();
    initializedModel.params().close();
    for(INDArray arr : result){
        arr.close();
    }
    System.gc();
}
 
Example 5
Source File: ElementWiseVertexTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testElementWiseVertexForwardSubtract() {
    int batchsz = 24;
    int featuresz = 17;
    ComputationGraphConfiguration cgc = new NeuralNetConfiguration.Builder().graphBuilder()
                    .addInputs("input1", "input2")
                    .addLayer("denselayer",
                                    new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
                                                    .build(),
                                    "input1")
                    /* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
                     * Invalid shape: Requested INDArray shape [1, 0] contains dimension size values < 1 (all dimensions must be 1 or more)
                     * at org.nd4j.linalg.factory.Nd4j.checkShapeValues(Nd4j.java:4877)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4867)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4820)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:3948)
                     * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:409)
                     * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:341)
                     */
                    .addVertex("elementwiseSubtract", new ElementWiseVertex(ElementWiseVertex.Op.Subtract),
                                    "input1", "input2")
                    .addLayer("Subtract", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
                                    "elementwiseSubtract")
                    .setOutputs("Subtract", "denselayer").build();

    ComputationGraph cg = new ComputationGraph(cgc);
    cg.init();


    INDArray input1 = Nd4j.rand(batchsz, featuresz);
    INDArray input2 = Nd4j.rand(batchsz, featuresz);

    INDArray target = input1.dup().subi(input2);

    INDArray output = cg.output(input1, input2)[0];
    INDArray squared = output.sub(target);
    double rms = Math.sqrt(squared.mul(squared).sumNumber().doubleValue());
    Assert.assertEquals(0.0, rms, this.epsilon);
}
 
Example 6
Source File: ElementWiseVertexTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testElementWiseVertexForwardProduct() {
    int batchsz = 24;
    int featuresz = 17;
    ComputationGraphConfiguration cgc = new NeuralNetConfiguration.Builder().graphBuilder()
                    .addInputs("input1", "input2", "input3")
                    .addLayer("denselayer",
                                    new DenseLayer.Builder().nIn(featuresz).nOut(1).activation(Activation.IDENTITY)
                                                    .build(),
                                    "input1")
                    /* denselayer is not actually used, but it seems that you _need_ to have trainable parameters, otherwise, you get
                     * Invalid shape: Requested INDArray shape [1, 0] contains dimension size values < 1 (all dimensions must be 1 or more)
                     * at org.nd4j.linalg.factory.Nd4j.checkShapeValues(Nd4j.java:4877)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4867)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:4820)
                     * at org.nd4j.linalg.factory.Nd4j.create(Nd4j.java:3948)
                     * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:409)
                     * at org.deeplearning4j.nn.graph.ComputationGraph.init(ComputationGraph.java:341)
                     */
                    .addVertex("elementwiseProduct", new ElementWiseVertex(ElementWiseVertex.Op.Product), "input1",
                                    "input2", "input3")
                    .addLayer("Product", new ActivationLayer.Builder().activation(Activation.IDENTITY).build(),
                                    "elementwiseProduct")
                    .setOutputs("Product", "denselayer").build();

    ComputationGraph cg = new ComputationGraph(cgc);
    cg.init();


    INDArray input1 = Nd4j.rand(batchsz, featuresz);
    INDArray input2 = Nd4j.rand(batchsz, featuresz);
    INDArray input3 = Nd4j.rand(batchsz, featuresz);

    INDArray target = input1.dup().muli(input2).muli(input3);

    INDArray output = cg.output(input1, input2, input3)[0];
    INDArray squared = output.sub(target.castTo(output.dataType()));
    double rms = squared.mul(squared).sumNumber().doubleValue();
    Assert.assertEquals(0.0, rms, this.epsilon);
}
 
Example 7
Source File: KerasModelEndToEndTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * InceptionV3
 */
@Test
@Ignore
// Takes unreasonably long, but works
public void importInception() throws Exception {
    ComputationGraph graph = importFunctionalModelH5Test(
            "modelimport/keras/examples/inception/inception_v3_complete.h5");
    INDArray input = Nd4j.ones(10, 299, 299, 3);        //TF = channels last = NHWC
    graph.output(input);
    System.out.println(graph.summary());
}
 
Example 8
Source File: KerasModelEndToEndTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * InceptionV3 Keras 2 no top
 */
@Test
public void importInceptionKeras2() throws Exception {
    int[] inputShape = new int[]{299, 299, 3};
    ComputationGraph graph = importFunctionalModelH5Test(
            "modelimport/keras/examples/inception/inception_tf_keras_2.h5", inputShape, false);
    INDArray input = Nd4j.ones(10, 299, 299, 3);        //TF = channels last = NHWC
    graph.output(input);
    System.out.println(graph.summary());
}
 
Example 9
Source File: KerasModelEndToEndTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * MobileNet
 */
@Test
public void importMobileNet() throws Exception {
    ComputationGraph graph = importFunctionalModelH5Test("modelimport/keras/examples/mobilenet/alternative.hdf5");
    INDArray input = Nd4j.ones(10, 299, 299, 3);
    graph.output(input);
}
 
Example 10
Source File: TransferLearningCompGraphTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testChangeNOutNIn() {
    final String inputName = "input";
    final String changeNoutName = "changeNout";
    final String poolName = "pool";
    final String afterPoolName = "afterPool";
    final String outputName = "output";
    final INDArray input = Nd4j.create(new long[] {1, 2, 4, 4});
    final ComputationGraph graph = new ComputationGraph(new NeuralNetConfiguration.Builder()
            .graphBuilder()
            .addInputs(inputName)
            .setOutputs(outputName)
            .setInputTypes(InputType.inferInputTypes(input))
            .addLayer(changeNoutName, new Convolution2D.Builder(1, 1)
                    .nOut(10)
                    .build(), inputName)
            .addLayer(poolName, new SubsamplingLayer.Builder(1,1).build(), changeNoutName)
            .addLayer(afterPoolName, new Convolution2D.Builder(1, 1)
                    .nOut(7)
                    .build(), poolName)
            .addLayer(outputName, new OutputLayer.Builder()
                    .activation(Activation.SOFTMAX)
                    .nOut(2)
                    .build(), afterPoolName)
            .build());
    graph.init();

    final ComputationGraph newGraph = new TransferLearning.GraphBuilder(graph)
            .nOutReplace(changeNoutName, 5, WeightInit.XAVIER)
            .nInReplace(afterPoolName, 5, WeightInit.XAVIER)
            .build();

    newGraph.init();

    assertEquals("Incorrect number of outputs!", 5 , newGraph.layerSize(changeNoutName));
    assertEquals("Incorrect number of inputs!", 5, newGraph.layerInputSize(afterPoolName));
    newGraph.output(input);
}
 
Example 11
Source File: TransferLearningCompGraphTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testTransferLearningSameDiffLayersGraphVertex(){

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()

            .graphBuilder()
            .addInputs("in")
            .layer("l0", new LSTM.Builder().nIn(5).nOut(5).build(), "in")
            .addVertex("l1", new AttentionVertex.Builder().nHeads(1).headSize(5).nInKeys(5).nInQueries(5).nInValues(5).nOut(5).build(), "l0", "l0", "l0")
            .layer("out", new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build(), "l1")
            .setOutputs("out")
            .build();

    ComputationGraph cg = new ComputationGraph(conf);
    cg.init();

    INDArray arr = Nd4j.rand(DataType.FLOAT, 2, 5, 10);
    INDArray out = cg.output(arr)[0];


    ComputationGraph cg2 = new TransferLearning.GraphBuilder(cg).removeVertexAndConnections("out")
            .fineTuneConfiguration(FineTuneConfiguration.builder().updater(new Adam(0.01)).build())
            .removeVertexAndConnections("out")
            .addLayer("newOut", new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build(), "l1")
            .setOutputs("newOut")
            .build();

    cg2.output(arr);

    Map<String,INDArray> m = new HashMap<>(cg.paramTable());
    m.put("newOut_W", m.remove("out_W"));
    m.put("newOut_b", m.remove("out_b"));
    cg2.setParamTable(m);

    Map<String,INDArray> p1 = cg.paramTable();
    Map<String,INDArray> p2 = cg2.paramTable();
    for(String s : p1.keySet()){
        INDArray i1 = p1.get(s);
        INDArray i2 = p2.get(s.replaceAll("out", "newOut"));
        assertEquals(s, i1, i2);
    }

    INDArray out2 = cg2.outputSingle(arr);
    assertEquals(out, out2);
}
 
Example 12
Source File: TestSameDiffLambda.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSameDiffLamdaLayerBasic(){
    for(WorkspaceMode wsm : new WorkspaceMode[]{WorkspaceMode.ENABLED, WorkspaceMode.NONE}) {
        log.info("--- Workspace Mode: {} ---", wsm);


        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in")
                .addLayer("1", new SameDiffSimpleLambdaLayer(), "0")
                .addLayer("2", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "1")
                .setOutputs("2")
                .build();

        //Equavalent, not using SameDiff Lambda:
        ComputationGraphConfiguration confStd = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in")
                .addVertex("1", new ShiftVertex(1.0), "0")
                .addVertex("2", new ScaleVertex(2.0), "1")
                .addLayer("3", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "2")
                .setOutputs("3")
                .build();

        ComputationGraph lambda = new ComputationGraph(conf);
        lambda.init();

        ComputationGraph std = new ComputationGraph(confStd);
        std.init();

        lambda.setParams(std.params());

        INDArray in = Nd4j.rand(3, 5);
        INDArray labels = TestUtils.randomOneHot(3, 5);
        DataSet ds = new DataSet(in, labels);

        INDArray outLambda = lambda.outputSingle(in);
        INDArray outStd = std.outputSingle(in);

        assertEquals(outLambda, outStd);

        double scoreLambda = lambda.score(ds);
        double scoreStd = std.score(ds);

        assertEquals(scoreStd, scoreLambda, 1e-6);

        for (int i = 0; i < 3; i++) {
            lambda.fit(ds);
            std.fit(ds);

            String s = String.valueOf(i);
            assertEquals(s, std.params(), lambda.params());
            assertEquals(s, std.getFlattenedGradients(), lambda.getFlattenedGradients());
        }

        ComputationGraph loaded = TestUtils.testModelSerialization(lambda);
        outLambda = loaded.outputSingle(in);
        outStd = std.outputSingle(in);

        assertEquals(outStd, outLambda);

        //Sanity check on different minibatch sizes:
        INDArray newIn = Nd4j.vstack(in, in);
        INDArray outMbsd = lambda.output(newIn)[0];
        INDArray outMb = std.output(newIn)[0];
        assertEquals(outMb, outMbsd);
    }
}
 
Example 13
Source File: TestGraphNodes.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLastTimeStepWithTransfer(){
    int lstmLayerSize = 16;
    int numLabelClasses = 10;
    int numInputs = 5;

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
            .trainingWorkspaceMode(WorkspaceMode.NONE)
            .inferenceWorkspaceMode(WorkspaceMode.NONE)
            .seed(123)    //Random number generator seed for improved repeatability. Optional.
            .updater(new AdaDelta())
            .weightInit(WeightInit.XAVIER)
            .graphBuilder()
            .addInputs("rr")
            .setInputTypes(InputType.recurrent(30))
            .addLayer("1", new GravesLSTM.Builder().activation(Activation.TANH).nIn(numInputs).nOut(lstmLayerSize).dropOut(0.9).build(), "rr")
            .addLayer("2", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                    .activation(Activation.SOFTMAX).nOut(numLabelClasses).build(), "1")

            .setOutputs("2")
            .build();


    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    ComputationGraph updatedModel = new TransferLearning.GraphBuilder(net)
            .addVertex("laststepoutput", new LastTimeStepVertex("rr"), "2")
            .setOutputs("laststepoutput")
            .build();


    INDArray input = Nd4j.rand(new int[]{10, numInputs, 16});

    INDArray[] out = updatedModel.output(input);

    assertNotNull(out);
    assertEquals(1, out.length);
    assertNotNull(out[0]);

    assertArrayEquals(new long[]{10, numLabelClasses}, out[0].shape());

    Map<String,INDArray> acts = updatedModel.feedForward(input, false);

    assertEquals(4, acts.size());   //2 layers + input + vertex output
    assertNotNull(acts.get("laststepoutput"));
    assertArrayEquals(new long[]{10, numLabelClasses}, acts.get("laststepoutput").shape());

    String toString = out[0].toString();
}
 
Example 14
Source File: KerasModelEndToEndTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test @Ignore //AB 20200427 Bad keras model - Keras JSON has input shape [null, 10, 19, 19] (i.e., NCHW) but all layers are set to channels_last
public void importSepResPolicy() throws Exception {
    ComputationGraph model = importFunctionalModelH5Test("modelimport/keras/examples/agz/sep_res_policy.h5");
    INDArray input = Nd4j.create(32, 19, 19, 10);
    model.output(input);
}
 
Example 15
Source File: KerasModelEndToEndTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test @Ignore //AB 20200427 Bad keras model - Keras JSON has input shape [null, 10, 19, 19] (i.e., NCHW) but all layers are set to channels_last
public void importSepConvValue() throws Exception {
    ComputationGraph model = importFunctionalModelH5Test("modelimport/keras/examples/agz/sep_conv_value.h5");
    INDArray input = Nd4j.create(32, 19, 19, 10);
    model.output(input);
}
 
Example 16
Source File: KerasModelEndToEndTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test @Ignore //AB 20200427 Bad keras model - Keras JSON has input shape [null, 10, 19, 19] (i.e., NCHW) but all layers are set to channels_last
public void importDualRes() throws Exception {
    ComputationGraph model = importFunctionalModelH5Test("modelimport/keras/examples/agz/dual_res.h5");
    INDArray input = Nd4j.create(32, 19, 19, 10);
    model.output(input);
}
 
Example 17
Source File: TestDL4JStep.java    From konduit-serving with Apache License 2.0 4 votes vote down vote up
public INDArray[] predictFromFileCG(File f, INDArray in) throws Exception {
    ComputationGraph net = ComputationGraph.load(f, false);
    return net.output(in);
}
 
Example 18
Source File: ParallelInferenceTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test(timeout = 120000L)
public void testMultiOutputNet() throws Exception {

    int nIn = 5;

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
            .graphBuilder()
            .addInputs("in")
            .layer("out0", new OutputLayer.Builder().nIn(nIn).nOut(4).activation(Activation.SOFTMAX).build(), "in")
            .layer("out1", new OutputLayer.Builder().nIn(nIn).nOut(6).activation(Activation.SOFTMAX).build(), "in")
            .setOutputs("out0", "out1")
            .build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    Random r = new Random();
    for( InferenceMode m : InferenceMode.values()) {
        for( int w : new int[]{1,2}) {

            final ParallelInference inf =
                    new ParallelInference.Builder(net)
                            .inferenceMode(m)
                            .batchLimit(5)
                            .queueLimit(64)
                            .workers(w).build();

            List<INDArray[]> in = new ArrayList<>();
            List<INDArray[]> exp = new ArrayList<>();
            int runs = isIntegrationTests() ? 100 : 20;
            for (int i = 0; i < 100; i++) {
                int currNumEx = 1 + r.nextInt(3);
                INDArray inArr = Nd4j.rand(new int[]{currNumEx, nIn});
                in.add(new INDArray[]{inArr});

                INDArray[] out = net.output(inArr);
                exp.add(out);
            }

            testParallelInferenceMulti(inf, in, null, exp);
            inf.shutdown();
        }
    }

}
 
Example 19
Source File: RegressionTest100b4.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSyntheticBidirectionalRNNGraph() throws Exception {

    File f = Resources.asFile("regression_testing/100b4/SyntheticBidirectionalRNNGraph_100b4.bin");
    ComputationGraph net = ComputationGraph.load(f, true);

    Bidirectional l0 = (Bidirectional) net.getLayer("rnn1").conf().getLayer();

    LSTM l1 = (LSTM) l0.getFwd();
    assertEquals(16, l1.getNOut());
    assertEquals(new ActivationReLU(), l1.getActivationFn());
    assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l1));

    LSTM l2 = (LSTM) l0.getBwd();
    assertEquals(16, l2.getNOut());
    assertEquals(new ActivationReLU(), l2.getActivationFn());
    assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l2));

    Bidirectional l3 = (Bidirectional) net.getLayer("rnn2").conf().getLayer();

    SimpleRnn l4 = (SimpleRnn) l3.getFwd();
    assertEquals(16, l4.getNOut());
    assertEquals(new ActivationReLU(), l4.getActivationFn());
    assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l4));

    SimpleRnn l5 = (SimpleRnn) l3.getBwd();
    assertEquals(16, l5.getNOut());
    assertEquals(new ActivationReLU(), l5.getActivationFn());
    assertEquals(new L2Regularization(0.0001), TestUtils.getL2Reg(l5));

    MergeVertex mv = (MergeVertex) net.getVertex("concat");

    GlobalPoolingLayer gpl = (GlobalPoolingLayer) net.getLayer("pooling").conf().getLayer();
    assertEquals(PoolingType.MAX, gpl.getPoolingType());
    assertArrayEquals(new int[]{2}, gpl.getPoolingDimensions());
    assertTrue(gpl.isCollapseDimensions());

    OutputLayer outl = (OutputLayer) net.getLayer("out").conf().getLayer();
    assertEquals(3, outl.getNOut());
    assertEquals(new LossMCXENT(), outl.getLossFn());

    INDArray outExp;
    File f2 = Resources.asFile("regression_testing/100b4/SyntheticBidirectionalRNNGraph_Output_100b4.bin");
    try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) {
        outExp = Nd4j.read(dis);
    }

    INDArray in;
    File f3 = Resources.asFile("regression_testing/100b4/SyntheticBidirectionalRNNGraph_Input_100b4.bin");
    try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) {
        in = Nd4j.read(dis);
    }

    INDArray outAct = net.output(in)[0];

    assertEquals(outExp, outAct);
}
 
Example 20
Source File: KerasModelEndToEndTest.java    From deeplearning4j with Apache License 2.0 3 votes vote down vote up
/**
 * Import all AlphaGo Zero model variants, i.e.
 * - Dual residual architecture
 * - Dual convolutional architecture
 * - Separate (policy and value) residual architecture
 * - Separate (policy and value) convolutional architecture
 */
@Test @Ignore //AB 20200427 Bad keras model - Keras JSON has input shape [null, 10, 19, 19] (i.e., NCHW) but all layers are set to channels_last
public void importSepConvPolicy() throws Exception {
    ComputationGraph model = importFunctionalModelH5Test("modelimport/keras/examples/agz/sep_conv_policy.h5");
    INDArray input = Nd4j.create(32, 19, 19, 10);
    model.output(input);
}