Java Code Examples for org.deeplearning4j.nn.graph.ComputationGraph#outputSingle()

The following examples show how to use org.deeplearning4j.nn.graph.ComputationGraph#outputSingle() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TransformRotatingImages.java    From Java-Machine-Learning-for-Computer-Vision with MIT License 6 votes vote down vote up
private static void cropImageWithYOLOBoundingBox(ComputationGraph yolo,
                                                 Speed selectedSpeed, File file) throws Exception {
    if (file.isDirectory()) {
        return;
    }
    BufferedImage bufferedImage = ImageIO.read(file);
    INDArray features = LOADER.asMatrix(bufferedImage);
    opencv_core.Mat mat = LOADER.asMat(features);
    PRE_PROCESSOR.transform(features);
    INDArray results = yolo.outputSingle(features);
    Yolo2OutputLayer outputLayer = (Yolo2OutputLayer) yolo.getOutputLayer(0);
    List<DetectedObject> predictedObjects = outputLayer.getPredictedObjects(results, 0.5);
    YoloUtils.nms(predictedObjects, 0.5);
    Optional<DetectedObject> max = predictedObjects.stream()
            .max((o1, o2) -> ((Double) o1.getConfidence()).compareTo(o2.getConfidence()));
    createCroppedImage(mat, selectedSpeed, max.get(), file);
}
 
Example 2
Source File: Yolo.java    From Java-Machine-Learning-for-Computer-Vision with MIT License 5 votes vote down vote up
private void warmUp(ComputationGraph model) throws IOException {
    Yolo2OutputLayer outputLayer = (Yolo2OutputLayer) model.getOutputLayer(0);
    BufferedImage read = ImageIO.read(new File("CarTracking/src/main/resources/sample.jpg"));
    INDArray indArray = prepareImage(loader.asMatrix(read));
    INDArray results = model.outputSingle(indArray);
    outputLayer.getPredictedObjects(results, YOLO_DETECTION_THRESHOLD);
}
 
Example 3
Source File: Keras2ModelConfigurationTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test @Ignore("AB 2019/11/23 - known issue - see https://github.com/eclipse/deeplearning4j/issues/8373 and https://github.com/eclipse/deeplearning4j/issues/8441")
    public void ReshapeEmbeddingConcatTest() throws Exception{
        try(InputStream is = Resources.asStream("/modelimport/keras/configs/keras2/reshape_embedding_concat.json")) {
            ComputationGraphConfiguration config =
                    new KerasModel().modelBuilder().modelJsonInputStream(is)
                            .enforceTrainingConfig(false).buildModel().getComputationGraphConfiguration();
            ComputationGraph model = new ComputationGraph(config);
            model.init();
//            System.out.println(model.summary());
            model.outputSingle(Nd4j.zeros(1, 1), Nd4j.zeros(1, 1), Nd4j.zeros(1, 1));
        }
    }
 
Example 4
Source File: RegressionTest100b4.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testYoloHouseNumber() throws Exception {

    File f = Resources.asFile("regression_testing/100b4/HouseNumberDetection_100b4.bin");
    ComputationGraph net = ComputationGraph.load(f, true);

    int nBoxes = 5;
    int nClasses = 10;

    ConvolutionLayer cl = (ConvolutionLayer) ((LayerVertex) net.getConfiguration().getVertices()
            .get("convolution2d_9")).getLayerConf().getLayer();
    assertEquals(nBoxes * (5 + nClasses), cl.getNOut());
    assertEquals(new ActivationIdentity(), cl.getActivationFn());
    assertEquals(ConvolutionMode.Same, cl.getConvolutionMode());
    assertEquals(new WeightInitXavier(), cl.getWeightInitFn());
    assertArrayEquals(new int[]{1, 1}, cl.getKernelSize());

    INDArray outExp;
    File f2 = Resources.asFile("regression_testing/100b4/HouseNumberDetection_Output_100b4.bin");
    try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) {
        outExp = Nd4j.read(dis);
    }

    INDArray in;
    File f3 = Resources.asFile("regression_testing/100b4/HouseNumberDetection_Input_100b4.bin");
    try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) {
        in = Nd4j.read(dis);
    }

    INDArray outAct = net.outputSingle(in);

    boolean eq = outExp.equalsWithEps(outAct.castTo(outExp.dataType()), 1e-3);
    assertTrue(eq);
}
 
Example 5
Source File: RegressionTest100b3.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
@Ignore("AB 2019/05/23 - Failing on linux-x86_64-cuda-9.2 - see issue #7657")
public void testYoloHouseNumber() throws Exception {

    File f = Resources.asFile("regression_testing/100b3/HouseNumberDetection_100b3.bin");
    ComputationGraph net = ComputationGraph.load(f, true);

    int nBoxes = 5;
    int nClasses = 10;

    ConvolutionLayer cl = (ConvolutionLayer)((LayerVertex)net.getConfiguration().getVertices().get("convolution2d_9")).getLayerConf().getLayer();
    assertEquals(nBoxes * (5 + nClasses), cl.getNOut());
    assertEquals(new ActivationIdentity(), cl.getActivationFn());
    assertEquals(ConvolutionMode.Same, cl.getConvolutionMode());
    assertEquals(new WeightInitXavier(), cl.getWeightInitFn());
    assertArrayEquals(new int[]{1,1}, cl.getKernelSize());
    assertArrayEquals(new int[]{1,1}, cl.getKernelSize());

    INDArray outExp;
    File f2 = Resources.asFile("regression_testing/100b3/HouseNumberDetection_Output_100b3.bin");
    try(DataInputStream dis = new DataInputStream(new FileInputStream(f2))){
        outExp = Nd4j.read(dis);
    }

    INDArray in;
    File f3 = Resources.asFile("regression_testing/100b3/HouseNumberDetection_Input_100b3.bin");
    try(DataInputStream dis = new DataInputStream(new FileInputStream(f3))){
        in = Nd4j.read(dis);
    }

    INDArray outAct = net.outputSingle(in);

    boolean eq = outExp.equalsWithEps(outAct.castTo(outExp.dataType()), 1e-3);
    assertTrue(eq);
}
 
Example 6
Source File: RegressionTest100b6.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testYoloHouseNumber() throws Exception {

    File f = Resources.asFile("regression_testing/100b6/HouseNumberDetection_100b6.bin");
    ComputationGraph net = ComputationGraph.load(f, true);

    int nBoxes = 5;
    int nClasses = 10;

    ConvolutionLayer cl = (ConvolutionLayer) ((LayerVertex) net.getConfiguration().getVertices()
            .get("convolution2d_9")).getLayerConf().getLayer();
    assertEquals(nBoxes * (5 + nClasses), cl.getNOut());
    assertEquals(new ActivationIdentity(), cl.getActivationFn());
    assertEquals(ConvolutionMode.Same, cl.getConvolutionMode());
    assertEquals(new WeightInitXavier(), cl.getWeightInitFn());
    assertArrayEquals(new int[]{1, 1}, cl.getKernelSize());

    INDArray outExp;
    File f2 = Resources.asFile("regression_testing/100b6/HouseNumberDetection_Output_100b6.bin");
    try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) {
        outExp = Nd4j.read(dis);
    }

    INDArray in;
    File f3 = Resources.asFile("regression_testing/100b6/HouseNumberDetection_Input_100b6.bin");
    try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) {
        in = Nd4j.read(dis);
    }

    INDArray outAct = net.outputSingle(in);

    boolean eq = outExp.equalsWithEps(outAct.castTo(outExp.dataType()), 1e-3);
    assertTrue(eq);
}
 
Example 7
Source File: OutputLayerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testCnnLossLayerCompGraph(){

    for(WorkspaceMode ws : WorkspaceMode.values()) {
        log.info("*** Testing workspace: " + ws);

        for (Activation a : new Activation[]{Activation.TANH, Activation.SELU}) {
            //Check that (A+identity) is equal to (identity+A), for activation A
            //i.e., should get same output and weight gradients for both

            ComputationGraphConfiguration conf1 =
                    new NeuralNetConfiguration.Builder().seed(12345L)
                            .updater(new NoOp())
                            .convolutionMode(ConvolutionMode.Same)
                            .inferenceWorkspaceMode(ws)
                            .trainingWorkspaceMode(ws)
                            .graphBuilder()
                            .addInputs("in")
                            .addLayer("0", new ConvolutionLayer.Builder().nIn(3).nOut(4).activation(Activation.IDENTITY)
                                    .kernelSize(2, 2).stride(1, 1)
                                    .dist(new NormalDistribution(0, 1.0))
                                    .updater(new NoOp()).build(), "in")
                            .addLayer("1", new CnnLossLayer.Builder(LossFunction.MSE)
                                    .activation(a)
                                    .build(), "0")
                            .setOutputs("1")
                            .build();

            ComputationGraphConfiguration conf2 =
                    new NeuralNetConfiguration.Builder().seed(12345L)
                            .updater(new NoOp())
                            .convolutionMode(ConvolutionMode.Same)
                            .inferenceWorkspaceMode(ws)
                            .trainingWorkspaceMode(ws)
                            .graphBuilder()
                            .addInputs("in")
                            .addLayer("0", new ConvolutionLayer.Builder().nIn(3).nOut(4).activation(a)
                                    .kernelSize(2, 2).stride(1, 1)
                                    .dist(new NormalDistribution(0, 1.0))
                                    .updater(new NoOp()).build(), "in")
                            .addLayer("1", new CnnLossLayer.Builder(LossFunction.MSE)
                                    .activation(Activation.IDENTITY)
                                    .build(), "0")
                            .setOutputs("1")
                            .build();

            ComputationGraph graph = new ComputationGraph(conf1);
            graph.init();

            ComputationGraph graph2 = new ComputationGraph(conf2);
            graph2.init();


            graph2.setParams(graph.params());


            INDArray in = Nd4j.rand(new int[]{3, 3, 5, 5});

            INDArray out1 = graph.outputSingle(in);
            INDArray out2 = graph2.outputSingle(in);

            assertEquals(out1, out2);

            INDArray labels = Nd4j.rand(out1.shape());

            graph.setInput(0,in);
            graph.setLabels(labels);

            graph2.setInput(0,in);
            graph2.setLabels(labels);

            graph.computeGradientAndScore();
            graph2.computeGradientAndScore();

            assertEquals(graph.score(), graph2.score(), 1e-6);
            assertEquals(graph.gradient().gradient(), graph2.gradient().gradient());

            //Also check computeScoreForExamples
            INDArray in2a = Nd4j.rand(new int[]{1, 3, 5, 5});
            INDArray labels2a = Nd4j.rand(new int[]{1, 4, 5, 5});

            INDArray in2 = Nd4j.concat(0, in2a, in2a);
            INDArray labels2 = Nd4j.concat(0, labels2a, labels2a);

            INDArray s = graph.scoreExamples(new DataSet(in2, labels2), false);
            assertArrayEquals(new long[]{2, 1}, s.shape());
            assertEquals(s.getDouble(0), s.getDouble(1), 1e-6);

            TestUtils.testModelSerialization(graph);
        }
    }
}
 
Example 8
Source File: TestSameDiffLambda.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSameDiffLamdaLayerBasic(){
    for(WorkspaceMode wsm : new WorkspaceMode[]{WorkspaceMode.ENABLED, WorkspaceMode.NONE}) {
        log.info("--- Workspace Mode: {} ---", wsm);


        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in")
                .addLayer("1", new SameDiffSimpleLambdaLayer(), "0")
                .addLayer("2", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "1")
                .setOutputs("2")
                .build();

        //Equavalent, not using SameDiff Lambda:
        ComputationGraphConfiguration confStd = new NeuralNetConfiguration.Builder()
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .seed(12345)
                .updater(new Adam(0.01))
                .graphBuilder()
                .addInputs("in")
                .addLayer("0", new DenseLayer.Builder().nIn(5).nOut(5).activation(Activation.TANH).build(), "in")
                .addVertex("1", new ShiftVertex(1.0), "0")
                .addVertex("2", new ScaleVertex(2.0), "1")
                .addLayer("3", new OutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "2")
                .setOutputs("3")
                .build();

        ComputationGraph lambda = new ComputationGraph(conf);
        lambda.init();

        ComputationGraph std = new ComputationGraph(confStd);
        std.init();

        lambda.setParams(std.params());

        INDArray in = Nd4j.rand(3, 5);
        INDArray labels = TestUtils.randomOneHot(3, 5);
        DataSet ds = new DataSet(in, labels);

        INDArray outLambda = lambda.outputSingle(in);
        INDArray outStd = std.outputSingle(in);

        assertEquals(outLambda, outStd);

        double scoreLambda = lambda.score(ds);
        double scoreStd = std.score(ds);

        assertEquals(scoreStd, scoreLambda, 1e-6);

        for (int i = 0; i < 3; i++) {
            lambda.fit(ds);
            std.fit(ds);

            String s = String.valueOf(i);
            assertEquals(s, std.params(), lambda.params());
            assertEquals(s, std.getFlattenedGradients(), lambda.getFlattenedGradients());
        }

        ComputationGraph loaded = TestUtils.testModelSerialization(lambda);
        outLambda = loaded.outputSingle(in);
        outStd = std.outputSingle(in);

        assertEquals(outStd, outLambda);

        //Sanity check on different minibatch sizes:
        INDArray newIn = Nd4j.vstack(in, in);
        INDArray outMbsd = lambda.output(newIn)[0];
        INDArray outMb = std.output(newIn)[0];
        assertEquals(outMb, outMbsd);
    }
}
 
Example 9
Source File: FrozenLayerWithBackpropTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testFrozenLayerInstantiationCompGraph() {

    //We need to be able to instantitate frozen layers from JSON etc, and have them be the same as if
    // they were initialized via the builder
    ComputationGraphConfiguration conf1 = new NeuralNetConfiguration.Builder().seed(12345).graphBuilder()
            .addInputs("in")
            .addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER).build(), "in")
            .addLayer("1", new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER).build(), "0")
            .addLayer("2", new OutputLayer.Builder(
                            LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10)
                            .nOut(10).build(),
                    "1")
            .setOutputs("2").build();

    ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).graphBuilder()
            .addInputs("in")
            .addLayer("0", new org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop(
                    new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                            .weightInit(WeightInit.XAVIER).build()), "in")
            .addLayer("1", new org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop(
                    new DenseLayer.Builder().nIn(10).nOut(10).activation(Activation.TANH)
                            .weightInit(WeightInit.XAVIER).build()), "0")
            .addLayer("2", new OutputLayer.Builder(
                            LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10)
                            .nOut(10).build(),
                    "1")
            .setOutputs("2").build();

    ComputationGraph net1 = new ComputationGraph(conf1);
    net1.init();
    ComputationGraph net2 = new ComputationGraph(conf2);
    net2.init();

    assertEquals(net1.params(), net2.params());


    String json = conf2.toJson();
    ComputationGraphConfiguration fromJson = ComputationGraphConfiguration.fromJson(json);

    assertEquals(conf2, fromJson);

    ComputationGraph net3 = new ComputationGraph(fromJson);
    net3.init();

    INDArray input = Nd4j.rand(10, 10);

    INDArray out2 = net2.outputSingle(input);
    INDArray out3 = net3.outputSingle(input);

    assertEquals(out2, out3);
}
 
Example 10
Source File: DTypeTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLocallyConnected() {
    for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
        Nd4j.setDefaultDataTypes(globalDtype, globalDtype);
        for (DataType networkDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
            assertEquals(globalDtype, Nd4j.dataType());
            assertEquals(globalDtype, Nd4j.defaultFloatingPointType());

            INDArray[] in = null;
            for (int test = 0; test < 2; test++) {
                String msg = "Global dtype: " + globalDtype + ", network dtype: " + networkDtype + ", test=" + test;

                ComputationGraphConfiguration.GraphBuilder b = new NeuralNetConfiguration.Builder()
                        .dataType(networkDtype)
                        .seed(123)
                        .updater(new NoOp())
                        .weightInit(WeightInit.XAVIER)
                        .convolutionMode(ConvolutionMode.Same)
                        .graphBuilder();

                INDArray label;
                switch (test) {
                    case 0:
                        b.addInputs("in")
                                .addLayer("1", new LSTM.Builder().nOut(5).build(), "in")
                                .addLayer("2", new LocallyConnected1D.Builder().kernelSize(2).nOut(4).build(), "1")
                                .addLayer("out", new RnnOutputLayer.Builder().nOut(10).build(), "2")
                                .setOutputs("out")
                                .setInputTypes(InputType.recurrent(5, 2));
                        in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 2)};
                        label = TestUtils.randomOneHotTimeSeries(2, 10, 2);
                        break;
                    case 1:
                        b.addInputs("in")
                                .addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
                                .addLayer("2", new LocallyConnected2D.Builder().kernelSize(2, 2).nOut(5).build(), "1")
                                .addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2")
                                .setOutputs("out")
                                .setInputTypes(InputType.convolutional(8, 8, 1));
                        in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 8, 8)};
                        label = TestUtils.randomOneHot(2, 10).castTo(networkDtype);
                        break;
                    default:
                        throw new RuntimeException();
                }

                ComputationGraph net = new ComputationGraph(b.build());
                net.init();

                INDArray out = net.outputSingle(in);
                assertEquals(msg, networkDtype, out.dataType());
                Map<String, INDArray> ff = net.feedForward(in, false);
                for (Map.Entry<String, INDArray> e : ff.entrySet()) {
                    if (e.getKey().equals("in"))
                        continue;
                    String s = msg + " - layer: " + e.getKey();
                    assertEquals(s, networkDtype, e.getValue().dataType());
                }

                net.setInputs(in);
                net.setLabels(label);
                net.computeGradientAndScore();

                net.fit(new MultiDataSet(in, new INDArray[]{label}));

                logUsedClasses(net);

                //Now, test mismatched dtypes for input/labels:
                for (DataType inputLabelDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
                    INDArray[] in2 = new INDArray[in.length];
                    for (int i = 0; i < in.length; i++) {
                        in2[i] = in[i].castTo(inputLabelDtype);
                    }
                    INDArray label2 = label.castTo(inputLabelDtype);
                    net.output(in2);
                    net.setInputs(in2);
                    net.setLabels(label2);
                    net.computeGradientAndScore();

                    net.fit(new MultiDataSet(in2, new INDArray[]{label2}));
                }
            }
        }
    }
}
 
Example 11
Source File: BidirectionalTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSerializationCompGraph() throws Exception {

    for(WorkspaceMode wsm : WorkspaceMode.values()) {
        log.info("*** Starting workspace mode: " + wsm);

        Nd4j.getRandom().setSeed(12345);

        ComputationGraphConfiguration conf1 = new NeuralNetConfiguration.Builder()
                .activation(Activation.TANH)
                .weightInit(WeightInit.XAVIER)
                .trainingWorkspaceMode(wsm)
                .inferenceWorkspaceMode(wsm)
                .updater(new Adam())
                .graphBuilder()
                .addInputs("in")
                .layer("0", new Bidirectional(Bidirectional.Mode.ADD, new GravesLSTM.Builder().nIn(10).nOut(10).dataFormat(rnnDataFormat).build()), "in")
                .layer("1", new Bidirectional(Bidirectional.Mode.ADD, new GravesLSTM.Builder().nIn(10).nOut(10).dataFormat(rnnDataFormat).build()), "0")
                .layer("2", new RnnOutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).dataFormat(rnnDataFormat)
                        .nIn(10).nOut(10).build(), "1")
                .setOutputs("2")
                .build();

        ComputationGraph net1 = new ComputationGraph(conf1);
        net1.init();
        long[] inshape = (rnnDataFormat == NCW)? new long[]{3, 10, 5}: new long[]{3, 5, 10};
        INDArray in = Nd4j.rand(inshape);
        INDArray labels = Nd4j.rand(inshape);

        net1.fit(new DataSet(in, labels));

        byte[] bytes;
        try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
            ModelSerializer.writeModel(net1, baos, true);
            bytes = baos.toByteArray();
        }


        ComputationGraph net2 = ModelSerializer.restoreComputationGraph(new ByteArrayInputStream(bytes), true);


        in = Nd4j.rand(inshape);
        labels = Nd4j.rand(inshape);

        INDArray out1 = net1.outputSingle(in);
        INDArray out2 = net2.outputSingle(in);

        assertEquals(out1, out2);

        net1.setInput(0, in);
        net2.setInput(0, in);
        net1.setLabels(labels);
        net2.setLabels(labels);

        net1.computeGradientAndScore();
        net2.computeGradientAndScore();

        assertEquals(net1.score(), net2.score(), 1e-6);
        assertEquals(net1.gradient().gradient(), net2.gradient().gradient());
    }
}
 
Example 12
Source File: BidirectionalTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void compareImplementationsCompGraph(){
//        for(WorkspaceMode wsm : WorkspaceMode.values()) {
        for(WorkspaceMode wsm : new WorkspaceMode[]{WorkspaceMode.NONE, WorkspaceMode.ENABLED}) {
            log.info("*** Starting workspace mode: " + wsm);

            //Bidirectional(GravesLSTM) and GravesBidirectionalLSTM should be equivalent, given equivalent params
            //Note that GravesBidirectionalLSTM implements ADD mode only

            ComputationGraphConfiguration conf1 = new NeuralNetConfiguration.Builder()
                    .activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER)
                    .updater(new Adam())
                    .trainingWorkspaceMode(wsm)
                    .inferenceWorkspaceMode(wsm)
                    .graphBuilder()
                    .addInputs("in")
                    .layer("0", new Bidirectional(Bidirectional.Mode.ADD, new GravesLSTM.Builder().nIn(10).nOut(10).build()), "in")
                    .layer("1", new Bidirectional(Bidirectional.Mode.ADD, new GravesLSTM.Builder().nIn(10).nOut(10).build()), "0")
                    .layer("2", new RnnOutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE)
                            .nIn(10).nOut(10).build(), "1")
                    .setOutputs("2")
                    .build();

            ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder()
                    .activation(Activation.TANH)
                    .weightInit(WeightInit.XAVIER)
                    .updater(new Adam())
                    .trainingWorkspaceMode(wsm)
                    .inferenceWorkspaceMode(wsm)
                    .graphBuilder()
                    .addInputs("in")
                    .layer("0", new GravesBidirectionalLSTM.Builder().nIn(10).nOut(10).build(), "in")
                    .layer("1", new GravesBidirectionalLSTM.Builder().nIn(10).nOut(10).build(), "0")
                    .layer("2", new RnnOutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE)
                            .nIn(10).nOut(10).build(), "1")
                    .setOutputs("2")
                    .build();

            ComputationGraph net1 = new ComputationGraph(conf1);
            net1.init();

            ComputationGraph net2 = new ComputationGraph(conf2);
            net2.init();

            assertEquals(net1.numParams(), net2.numParams());
            for (int i = 0; i < 3; i++) {
                int n1 = (int)net1.getLayer(i).numParams();
                int n2 = (int)net2.getLayer(i).numParams();
                assertEquals(n1, n2);
            }

            net2.setParams(net1.params());  //Assuming exact same layout here...

            INDArray in = Nd4j.rand(new int[]{3, 10, 5});

            INDArray out1 = net1.outputSingle(in);
            INDArray out2 = net2.outputSingle(in);

            assertEquals(out1, out2);

            INDArray labels = Nd4j.rand(new int[]{3, 10, 5});

            net1.setInput(0,in);
            net1.setLabels(labels);

            net2.setInput(0,in);
            net2.setLabels(labels);

            net1.computeGradientAndScore();
            net2.computeGradientAndScore();

            //Ensure scores are equal:
            assertEquals(net1.score(), net2.score(), 1e-6);

            //Ensure gradients are equal:
            Gradient g1 = net1.gradient();
            Gradient g2 = net2.gradient();
            assertEquals(g1.gradient(), g2.gradient());

            //Ensure updates are equal:
            ComputationGraphUpdater u1 = (ComputationGraphUpdater) net1.getUpdater();
            ComputationGraphUpdater u2 = (ComputationGraphUpdater) net2.getUpdater();
            assertEquals(u1.getUpdaterStateViewArray(), u2.getUpdaterStateViewArray());
            u1.update(g1, 0, 0, 3, LayerWorkspaceMgr.noWorkspaces());
            u2.update(g2, 0, 0, 3, LayerWorkspaceMgr.noWorkspaces());
            assertEquals(g1.gradient(), g2.gradient());
            assertEquals(u1.getUpdaterStateViewArray(), u2.getUpdaterStateViewArray());

            //Ensure params are equal, after fitting
            net1.fit(new DataSet(in, labels));
            net2.fit(new DataSet(in, labels));

            INDArray p1 = net1.params();
            INDArray p2 = net2.params();
            assertEquals(p1, p2);
        }
    }
 
Example 13
Source File: LocallyConnectedLayerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testLocallyConnected(){
        for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
            Nd4j.setDefaultDataTypes(globalDtype, globalDtype);
            for (DataType networkDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
                assertEquals(globalDtype, Nd4j.dataType());
                assertEquals(globalDtype, Nd4j.defaultFloatingPointType());

                for (int test = 0; test < 2; test++) {
                    String msg = "Global dtype: " + globalDtype + ", network dtype: " + networkDtype + ", test=" + test;

                    ComputationGraphConfiguration.GraphBuilder b = new NeuralNetConfiguration.Builder()
                            .dataType(networkDtype)
                            .seed(123)
                            .updater(new NoOp())
                            .weightInit(WeightInit.XAVIER)
                            .convolutionMode(ConvolutionMode.Same)
                            .graphBuilder();

                    INDArray[] in;
                    INDArray label;
                    switch (test){
                        case 0:
                            b.addInputs("in")
                                    .addLayer("1", new LSTM.Builder().nOut(5).build(), "in")
                                    .addLayer("2", new LocallyConnected1D.Builder().kernelSize(2).nOut(4).build(), "1")
                                    .addLayer("out", new RnnOutputLayer.Builder().nOut(10).build(), "2")
                                    .setOutputs("out")
                                    .setInputTypes(InputType.recurrent(5, 4));
                            in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 4)};
                            label = TestUtils.randomOneHotTimeSeries(2, 10, 4).castTo(networkDtype);
                            break;
                        case 1:
                            b.addInputs("in")
                                    .addLayer("1", new ConvolutionLayer.Builder().kernelSize(2,2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
                                    .addLayer("2", new LocallyConnected2D.Builder().kernelSize(2,2).nOut(5).build(), "1")
                                    .addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2")
                                    .setOutputs("out")
//                                    .setInputTypes(InputType.convolutional(28, 28, 1));
//                            in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
                                    .setInputTypes(InputType.convolutional(8, 8, 1));
                            in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 8, 8)};
                            label = TestUtils.randomOneHot(2, 10).castTo(networkDtype);
                            break;
                        default:
                            throw new RuntimeException();
                    }

                    ComputationGraph net = new ComputationGraph(b.build());
                    net.init();

                    INDArray out = net.outputSingle(in);
                    assertEquals(msg, networkDtype, out.dataType());
                    Map<String, INDArray> ff = net.feedForward(in, false);
                    for (Map.Entry<String, INDArray> e : ff.entrySet()) {
                        if (e.getKey().equals("in"))
                            continue;
                        String s = msg + " - layer: " + e.getKey();
                        assertEquals(s, networkDtype, e.getValue().dataType());
                    }

                    net.setInputs(in);
                    net.setLabels(label);
                    net.computeGradientAndScore();

                    net.fit(new MultiDataSet(in, new INDArray[]{label}));
                }
            }
        }
    }
 
Example 14
Source File: TransferLearningCompGraphTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testTransferLearningSameDiffLayersGraphVertex(){

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()

            .graphBuilder()
            .addInputs("in")
            .layer("l0", new LSTM.Builder().nIn(5).nOut(5).build(), "in")
            .addVertex("l1", new AttentionVertex.Builder().nHeads(1).headSize(5).nInKeys(5).nInQueries(5).nInValues(5).nOut(5).build(), "l0", "l0", "l0")
            .layer("out", new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build(), "l1")
            .setOutputs("out")
            .build();

    ComputationGraph cg = new ComputationGraph(conf);
    cg.init();

    INDArray arr = Nd4j.rand(DataType.FLOAT, 2, 5, 10);
    INDArray out = cg.output(arr)[0];


    ComputationGraph cg2 = new TransferLearning.GraphBuilder(cg).removeVertexAndConnections("out")
            .fineTuneConfiguration(FineTuneConfiguration.builder().updater(new Adam(0.01)).build())
            .removeVertexAndConnections("out")
            .addLayer("newOut", new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build(), "l1")
            .setOutputs("newOut")
            .build();

    cg2.output(arr);

    Map<String,INDArray> m = new HashMap<>(cg.paramTable());
    m.put("newOut_W", m.remove("out_W"));
    m.put("newOut_b", m.remove("out_b"));
    cg2.setParamTable(m);

    Map<String,INDArray> p1 = cg.paramTable();
    Map<String,INDArray> p2 = cg2.paramTable();
    for(String s : p1.keySet()){
        INDArray i1 = p1.get(s);
        INDArray i2 = p2.get(s.replaceAll("out", "newOut"));
        assertEquals(s, i1, i2);
    }

    INDArray out2 = cg2.outputSingle(arr);
    assertEquals(out, out2);
}
 
Example 15
Source File: TransferLearningCompGraphTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testTransferLearningSameDiffLayersGraph(){

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()

            .graphBuilder()
            .addInputs("in")
            .layer("l0", new LSTM.Builder().nIn(5).nOut(5).build(), "in")
            .layer("l1", new RecurrentAttentionLayer.Builder().nHeads(1).headSize(5).nIn(5).nOut(5).build(), "l0")
            .layer("out", new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build(), "l1")
            .setOutputs("out")
            .build();

    ComputationGraph cg = new ComputationGraph(conf);
    cg.init();

    INDArray arr = Nd4j.rand(DataType.FLOAT, 2, 5, 10);
    INDArray out = cg.output(arr)[0];


    ComputationGraph cg2 = new TransferLearning.GraphBuilder(cg).removeVertexAndConnections("out")
            .fineTuneConfiguration(FineTuneConfiguration.builder().updater(new Adam(0.01)).build())
            .removeVertexAndConnections("out")
            .addLayer("newOut", new RnnOutputLayer.Builder().nIn(5).nOut(5).activation(Activation.SOFTMAX).build(), "l1")
            .setOutputs("newOut")
            .build();

    cg2.output(arr);

    Map<String,INDArray> m = new HashMap<>(cg.paramTable());
    m.put("newOut_W", m.remove("out_W"));
    m.put("newOut_b", m.remove("out_b"));
    cg2.setParamTable(m);

    Map<String,INDArray> p1 = cg.paramTable();
    Map<String,INDArray> p2 = cg2.paramTable();
    for(String s : p1.keySet()){
        INDArray i1 = p1.get(s);
        INDArray i2 = p2.get(s.replaceAll("out", "newOut"));
        assertEquals(s, i1, i2);
    }

    INDArray out2 = cg2.outputSingle(arr);
    assertEquals(out, out2);
}
 
Example 16
Source File: DTypeTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testComputationGraphTypeConversion() {

    for (DataType dt : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
        Nd4j.setDefaultDataTypes(dt, dt);

        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(12345)
                .weightInit(WeightInit.XAVIER)
                .updater(new Adam(0.01))
                .dataType(DataType.DOUBLE)
                .graphBuilder()
                .addInputs("in")
                .layer("l0", new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "in")
                .layer("l1", new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "l0")
                .layer("out", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "l1")
                .setOutputs("out")
                .build();

        ComputationGraph net = new ComputationGraph(conf);
        net.init();

        INDArray inD = Nd4j.rand(DataType.DOUBLE, 1, 10);
        INDArray lD = Nd4j.create(DataType.DOUBLE, 1, 10);
        net.fit(new DataSet(inD, lD));

        INDArray outDouble = net.outputSingle(inD);
        net.setInput(0, inD);
        net.setLabels(lD);
        net.computeGradientAndScore();
        double scoreDouble = net.score();
        INDArray grads = net.getFlattenedGradients();
        INDArray u = net.getUpdater().getStateViewArray();
        assertEquals(DataType.DOUBLE, net.params().dataType());
        assertEquals(DataType.DOUBLE, grads.dataType());
        assertEquals(DataType.DOUBLE, u.dataType());


        ComputationGraph netFloat = net.convertDataType(DataType.FLOAT);
        netFloat.initGradientsView();
        assertEquals(DataType.FLOAT, netFloat.params().dataType());
        assertEquals(DataType.FLOAT, netFloat.getFlattenedGradients().dataType());
        assertEquals(DataType.FLOAT, netFloat.getUpdater(true).getStateViewArray().dataType());
        INDArray inF = inD.castTo(DataType.FLOAT);
        INDArray lF = lD.castTo(DataType.FLOAT);
        INDArray outFloat = netFloat.outputSingle(inF);
        netFloat.setInput(0, inF);
        netFloat.setLabels(lF);
        netFloat.computeGradientAndScore();
        double scoreFloat = netFloat.score();
        INDArray gradsFloat = netFloat.getFlattenedGradients();
        INDArray uFloat = netFloat.getUpdater().getStateViewArray();

        assertEquals(scoreDouble, scoreFloat, 1e-6);
        assertEquals(outDouble.castTo(DataType.FLOAT), outFloat);
        assertEquals(grads.castTo(DataType.FLOAT), gradsFloat);
        INDArray uCast = u.castTo(DataType.FLOAT);
        assertTrue(uCast.equalsWithEps(uFloat, 1e-4));

        ComputationGraph netFP16 = net.convertDataType(DataType.HALF);
        netFP16.initGradientsView();
        assertEquals(DataType.HALF, netFP16.params().dataType());
        assertEquals(DataType.HALF, netFP16.getFlattenedGradients().dataType());
        assertEquals(DataType.HALF, netFP16.getUpdater(true).getStateViewArray().dataType());

        INDArray inH = inD.castTo(DataType.HALF);
        INDArray lH = lD.castTo(DataType.HALF);
        INDArray outHalf = netFP16.outputSingle(inH);
        netFP16.setInput(0, inH);
        netFP16.setLabels(lH);
        netFP16.computeGradientAndScore();
        double scoreHalf = netFP16.score();
        INDArray gradsHalf = netFP16.getFlattenedGradients();
        INDArray uHalf = netFP16.getUpdater().getStateViewArray();

        assertEquals(scoreDouble, scoreHalf, 1e-4);
        boolean outHalfEq = outDouble.castTo(DataType.HALF).equalsWithEps(outHalf, 1e-3);
        assertTrue(outHalfEq);
        boolean gradsHalfEq = grads.castTo(DataType.HALF).equalsWithEps(gradsHalf, 1e-3);
        assertTrue(gradsHalfEq);
        INDArray uHalfCast = u.castTo(DataType.HALF);
        assertTrue(uHalfCast.equalsWithEps(uHalf, 1e-4));
    }
}
 
Example 17
Source File: TestTransferLearningModelSerializer.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testModelSerializerFrozenLayersCompGraph() throws Exception {
    FineTuneConfiguration finetune = new FineTuneConfiguration.Builder().updater(new Sgd(0.1)).build();

    int nIn = 6;
    int nOut = 3;

    ComputationGraphConfiguration origConf = new NeuralNetConfiguration.Builder().activation(Activation.TANH).graphBuilder().addInputs("in")
                    .addLayer("0", new DenseLayer.Builder().nIn(nIn).nOut(5).build(), "in")
                    .addLayer("1", new DenseLayer.Builder().nIn(5).nOut(4).build(), "0")
                    .addLayer("2", new DenseLayer.Builder().nIn(4).nOut(3).build(), "1")
                    .addLayer("3", new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                    LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3)
                                                    .nOut(nOut).build(),
                                    "2")
                    .setOutputs("3").build();
    ComputationGraph origModel = new ComputationGraph(origConf);
    origModel.init();

    ComputationGraph withFrozen = new TransferLearning.GraphBuilder(origModel).fineTuneConfiguration(finetune)
                    .setFeatureExtractor("1").build();

    assertTrue(withFrozen.getLayer(0) instanceof FrozenLayer);
    assertTrue(withFrozen.getLayer(1) instanceof FrozenLayer);

    Map<String, GraphVertex> m = withFrozen.getConfiguration().getVertices();
    Layer l0 = ((LayerVertex) m.get("0")).getLayerConf().getLayer();
    Layer l1 = ((LayerVertex) m.get("1")).getLayerConf().getLayer();
    assertTrue(l0 instanceof org.deeplearning4j.nn.conf.layers.misc.FrozenLayer);
    assertTrue(l1 instanceof org.deeplearning4j.nn.conf.layers.misc.FrozenLayer);

    ComputationGraph restored = TestUtils.testModelSerialization(withFrozen);

    assertTrue(restored.getLayer(0) instanceof FrozenLayer);
    assertTrue(restored.getLayer(1) instanceof FrozenLayer);
    assertFalse(restored.getLayer(2) instanceof FrozenLayer);
    assertFalse(restored.getLayer(3) instanceof FrozenLayer);

    INDArray in = Nd4j.rand(3, nIn);
    INDArray out = withFrozen.outputSingle(in);
    INDArray out2 = restored.outputSingle(in);

    assertEquals(out, out2);

    //Sanity check on train mode:
    out = withFrozen.outputSingle(true, in);
    out2 = restored.outputSingle(true, in);
}
 
Example 18
Source File: GradientCheckTestsComputationGraph.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testElementWiseVertexBroadcast(){

    ElementWiseVertex.Op[] ops =
            new ElementWiseVertex.Op[] {ElementWiseVertex.Op.Add, ElementWiseVertex.Op.Average,
                    ElementWiseVertex.Op.Subtract, ElementWiseVertex.Op.Max, ElementWiseVertex.Op.Product};

    for(boolean firstSmaller : new boolean[]{false, true}) {
        for (ElementWiseVertex.Op op : ops) {
            ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                    .updater(new NoOp())
                    .dataType(DataType.DOUBLE)
                    .activation(Activation.TANH)
                    .seed(12345)
                    .graphBuilder()
                    .addInputs("in")
                    .setOutputs("out")
                    .layer("l1", new DenseLayer.Builder().nIn(3).nOut(firstSmaller ? 1 : 3).build(), "in")   //[mb,3]
                    .layer("l2", new DenseLayer.Builder().nIn(3).nOut(firstSmaller ? 3 : 1).build(), "in")   //[mb,1]
                    .addVertex("ew", new ElementWiseVertex(op), "l1", "l2")
                    .layer("out", new OutputLayer.Builder().nIn(3).nOut(2).lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).build(), "ew")
                    .build();

            ComputationGraph graph = new ComputationGraph(conf);
            graph.init();

            for (int mb : new int[]{1, 5}) {
                String msg = (firstSmaller ? "first smaller, " : "second smaller, ") + "mb=" + mb + ", op=" + op;

                log.info("Test: {}", msg);

                INDArray in = Nd4j.rand(DataType.FLOAT, mb, 3);

                INDArray out = graph.outputSingle(in);
                assertArrayEquals(new long[]{mb, 2}, out.shape());

                INDArray labels = TestUtils.randomOneHot(mb, 2);

                graph.fit(new DataSet(in, labels));

                boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in})
                        .labels(new INDArray[]{labels}));
                assertTrue(msg, gradOK);
                TestUtils.testModelSerialization(graph);
            }
        }
    }
}
 
Example 19
Source File: TestTFKerasModelImport.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
private void testModelImportWithData(String path) throws Exception{
    System.out.println(path);
    // TODO multi input/output
    INDArray inputArray;
    INDArray expectedOutputArray;
    File f = Resources.asFile(path);        //May in in JAR that HDF5 can't read from
    File modelFile = new File(testDir.getRoot(), f.getName());
    FileUtils.copyFile(f, modelFile);

    synchronized (Hdf5Archive.LOCK_OBJECT){
        Hdf5Archive hdf5Archive = new Hdf5Archive(modelFile.getAbsolutePath());
        List<String> rootGroups = hdf5Archive.getGroups();
        if (rootGroups.contains("data")){
            String inputName = hdf5Archive.readAttributeAsString("input_names", "data");
            String outputName = hdf5Archive.readAttributeAsString("output_names", "data");
            inputArray = hdf5Archive.readDataSet(inputName, "data");
            expectedOutputArray = hdf5Archive.readDataSet(outputName, "data");
        }
        else{
            hdf5Archive.close();
            return;
        }
        hdf5Archive.close();
    }
    INDArray outputArray;

    ComputationGraph dl4jModel = KerasModelImport.importKerasModelAndWeights(path);
    outputArray = dl4jModel.outputSingle(inputArray);

    expectedOutputArray = expectedOutputArray.castTo(DataType.FLOAT);
    outputArray = outputArray.castTo(DataType.FLOAT);
    if (path.contains("misc_")){
        //shape relaxation
        expectedOutputArray = expectedOutputArray.reshape( -1);
        outputArray = outputArray.reshape(-1);
    }

    System.out.println(outputArray.toString());
    System.out.println(expectedOutputArray.toString());
    Assert.assertArrayEquals(expectedOutputArray.shape(), outputArray.shape());
    Assert.assertTrue(expectedOutputArray.equalsWithEps(outputArray, 1e-3));
}
 
Example 20
Source File: KerasYolo9000PredictTest.java    From deeplearning4j with Apache License 2.0 3 votes vote down vote up
@Ignore
@Test
public void testYoloPredictionImport() throws Exception {


    int HEIGHT = 416;
    int WIDTH = 416;
    INDArray indArray = Nd4j.create(HEIGHT, WIDTH, 3);
    IMAGE_PREPROCESSING_SCALER.transform(indArray);

    KerasLayer.registerCustomLayer("Lambda", KerasSpaceToDepth.class);

    String h5_FILENAME = "modelimport/keras/examples/yolo/yolo-voc.h5";
    ComputationGraph graph = KerasModelImport.importKerasModelAndWeights(h5_FILENAME, false);

    double[][] priorBoxes = {{1.3221, 1.73145}, {3.19275, 4.00944}, {5.05587, 8.09892}, {9.47112, 4.84053}, {11.2364, 10.0071}};
    INDArray priors = Nd4j.create(priorBoxes);

    ComputationGraph model = new TransferLearning.GraphBuilder(graph)
            .addLayer("outputs",
                    new org.deeplearning4j.nn.conf.layers.objdetect.Yolo2OutputLayer.Builder()
                            .boundingBoxPriors(priors)
                            .build(),
                    "conv2d_23")
            .setOutputs("outputs")
            .build();

    ModelSerializer.writeModel(model, DL4J_MODEL_FILE_NAME, false);

    ComputationGraph computationGraph = ModelSerializer.restoreComputationGraph(new File(DL4J_MODEL_FILE_NAME));

    System.out.println(computationGraph.summary(InputType.convolutional(416, 416, 3)));

    INDArray results = computationGraph.outputSingle(indArray);


}