Java Code Examples for org.deeplearning4j.nn.conf.layers.ConvolutionLayer

The following examples show how to use org.deeplearning4j.nn.conf.layers.ConvolutionLayer. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: deeplearning4j   Source File: TestFrozenLayers.java    License: Apache License 2.0 6 votes vote down vote up
public static MultiLayerNetwork getOriginalNet(int seed){
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .seed(seed)
            .weightInit(WeightInit.XAVIER)
            .activation(Activation.TANH)
            .convolutionMode(ConvolutionMode.Same)
            .updater(new Sgd(0.3))
            .list()
            .layer(new ConvolutionLayer.Builder().nOut(3).kernelSize(2,2).stride(1,1).build())
            .layer(new SubsamplingLayer.Builder().kernelSize(2,2).stride(1,1).build())
            .layer(new ConvolutionLayer.Builder().nIn(3).nOut(3).kernelSize(2,2).stride(1,1).build())
            .layer(new DenseLayer.Builder().nOut(64).build())
            .layer(new DenseLayer.Builder().nIn(64).nOut(64).build())
            .layer(new OutputLayer.Builder().nIn(64).nOut(10).lossFunction(LossFunctions.LossFunction.MSE).build())
            .setInputType(InputType.convolutionalFlat(28,28,1))
            .build();


    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    return net;
}
 
Example 2
Source Project: deeplearning4j   Source File: TestFrozenLayers.java    License: Apache License 2.0 6 votes vote down vote up
public static ComputationGraph getOriginalGraph(int seed){
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
            .seed(seed)
            .weightInit(WeightInit.XAVIER)
            .activation(Activation.TANH)
            .convolutionMode(ConvolutionMode.Same)
            .updater(new Sgd(0.3))
            .graphBuilder()
            .addInputs("in")
            .layer("0", new ConvolutionLayer.Builder().nOut(3).kernelSize(2,2).stride(1,1).build(), "in")
            .layer("1", new SubsamplingLayer.Builder().kernelSize(2,2).stride(1,1).build(), "0")
            .layer("2", new ConvolutionLayer.Builder().nIn(3).nOut(3).kernelSize(2,2).stride(1,1).build(), "1")
            .layer("3", new DenseLayer.Builder().nOut(64).build(), "2")
            .layer("4", new DenseLayer.Builder().nIn(64).nOut(64).build(), "3")
            .layer("5", new OutputLayer.Builder().nIn(64).nOut(10).lossFunction(LossFunctions.LossFunction.MSE).build(), "4")
            .setOutputs("5")
            .setInputTypes(InputType.convolutionalFlat(28,28,1))
            .build();


    ComputationGraph net = new ComputationGraph(conf);
    net.init();
    return net;
}
 
Example 3
Source Project: deeplearning4j   Source File: LocalResponseTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testMultiCNNLayer() throws Exception {
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(123).list()
                    .layer(0, new ConvolutionLayer.Builder().nIn(1).nOut(6).weightInit(WeightInit.XAVIER)
                                    .activation(Activation.RELU).build())
                    .layer(1, new LocalResponseNormalization.Builder().build()).layer(2,
                                    new DenseLayer.Builder()
                                                    .nOut(2).build())
                    .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                    .weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(2).nOut(10)
                                    .build())
                    .setInputType(InputType.convolutionalFlat(28, 28, 1)).build();

    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    DataSetIterator iter = new MnistDataSetIterator(2, 2);
    DataSet next = iter.next();

    network.fit(next);
}
 
Example 4
Source Project: deeplearning4j   Source File: ConvolutionLayerTest.java    License: Apache License 2.0 6 votes vote down vote up
private static MultiLayerNetwork getCNNMLNConfig(boolean backprop, boolean pretrain) {
    int outputNum = 10;
    int seed = 123;

    MultiLayerConfiguration.Builder conf =
                    new NeuralNetConfiguration.Builder().seed(seed)
                                    .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).list()
                                    .layer(0, new ConvolutionLayer.Builder(new int[] {10, 10}).nOut(6).build())
                                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX,
                                                    new int[] {2, 2}).stride(1, 1).build())
                                    .layer(2, new OutputLayer.Builder(
                                                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                                                    .nOut(outputNum).weightInit(WeightInit.XAVIER)
                                                                    .activation(Activation.SOFTMAX).build())
                                    .setInputType(InputType.convolutionalFlat(28, 28, 1));

    MultiLayerNetwork model = new MultiLayerNetwork(conf.build());
    model.init();

    return model;
}
 
Example 5
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testMultiChannel() throws Exception {
    INDArray in = Nd4j.rand(new int[] {10, 3, 28, 28});
    INDArray labels = Nd4j.rand(10, 2);
    DataSet next = new DataSet(in, labels);

    NeuralNetConfiguration.ListBuilder builder = (NeuralNetConfiguration.ListBuilder) incompleteLFW();
    builder.setInputType(InputType.convolutional(28, 28, 3));
    MultiLayerConfiguration conf = builder.build();
    ConvolutionLayer layer2 = (ConvolutionLayer) conf.getConf(2).getLayer();
    assertEquals(6, layer2.getNIn());

    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    network.fit(next);
}
 
Example 6
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testLRN() throws Exception {
    List<String> labels = new ArrayList<>(Arrays.asList("Zico", "Ziwang_Xu"));
    File dir = testDir.newFolder();
    new ClassPathResource("lfwtest/").copyDirectory(dir);
    String rootDir = dir.getAbsolutePath();

    RecordReader reader = new ImageRecordReader(28, 28, 3);
    reader.initialize(new FileSplit(new File(rootDir)));
    DataSetIterator recordReader = new RecordReaderDataSetIterator(reader, 10, 1, labels.size());
    labels.remove("lfwtest");
    NeuralNetConfiguration.ListBuilder builder = (NeuralNetConfiguration.ListBuilder) incompleteLRN();
    builder.setInputType(InputType.convolutional(28, 28, 3));

    MultiLayerConfiguration conf = builder.build();

    ConvolutionLayer layer2 = (ConvolutionLayer) conf.getConf(3).getLayer();
    assertEquals(6, layer2.getNIn());

}
 
Example 7
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration.Builder incompleteLRN() {
    MultiLayerConfiguration.Builder builder =
                    new NeuralNetConfiguration.Builder().seed(3)
                                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
                                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nOut(6).build())
                                    .layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}).build())
                                    .layer(2, new LocalResponseNormalization.Builder().build())
                                    .layer(3, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nOut(6).build())
                                    .layer(4, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}).build())
                                    .layer(5, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                            LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(2)
                                            .activation(Activation.SOFTMAX).build());
    return builder;
}
 
Example 8
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration.Builder incompleteLFW() {
    MultiLayerConfiguration.Builder builder =
                    new NeuralNetConfiguration.Builder().seed(3)
                                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
                                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nOut(6).build())
                                    .layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}).build())
                                    .layer(2, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nOut(6).build())
                                    .layer(3, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}).build())
                                    .layer(4, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).activation(Activation.SOFTMAX)
                                            .nOut(2).build());
    return builder;
}
 
Example 9
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration.Builder incompleteMnistLenet() {
    MultiLayerConfiguration.Builder builder =
                    new NeuralNetConfiguration.Builder().seed(3)
                                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
                                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nIn(1).nOut(20).build())
                                    .layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}, new int[] {2, 2}).build())
                                    .layer(2, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nIn(20).nOut(50).build())
                                    .layer(3, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}, new int[] {2, 2}).build())
                                    .layer(4, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nOut(500)
                                                    .build())
                                    .layer(5, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                                                    .activation(Activation.SOFTMAX).nOut(10)
                                                                    .build());
    return builder;
}
 
Example 10
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration mnistLenet() {
    MultiLayerConfiguration builder =
                    new NeuralNetConfiguration.Builder().seed(3)
                                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
                                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nIn(1).nOut(6).build())
                                    .layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {5, 5}, new int[] {2, 2}).build())
                                    .layer(2, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nIn(1).nOut(6).build())
                                    .layer(3, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {5, 5}, new int[] {2, 2}).build())
                                    .layer(4, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nIn(150)
                                                                    .nOut(10).build())
                                    .build();
    return builder;
}
 
Example 11
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration.Builder inComplete() {
    int nChannels = 1;
    int outputNum = 10;
    int seed = 123;

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                    .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).list()
                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(new int[] {10, 10},
                                    new int[] {2, 2}).nIn(nChannels).nOut(6).build())
                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
                                    .build())
                    .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
                                    .build())
                    ;

    return builder;
}
 
Example 12
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration.Builder complete() {
    final int numRows = 28;
    final int numColumns = 28;
    int nChannels = 1;
    int outputNum = 10;
    int seed = 123;

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                    .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).list()
                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(new int[] {10, 10},
                                    new int[] {2, 2}).nIn(nChannels).nOut(6).build())
                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
                                    .build())
                    .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .nIn(5 * 5 * 1 * 6) //216
                                    .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
                                    .build())
                    .inputPreProcessor(0, new FeedForwardToCnnPreProcessor(numRows, numColumns, nChannels))
                    .inputPreProcessor(2, new CnnToFeedForwardPreProcessor(5, 5, 6));

    return builder;
}
 
Example 13
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSubSamplingWithPadding() {

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list()
                    .layer(0, new ConvolutionLayer.Builder(2, 2).padding(0, 0).stride(2, 2).nIn(1).nOut(3).build()) //(28-2+0)/2+1 = 14
                    .layer(1, new SubsamplingLayer.Builder().kernelSize(2, 2).padding(1, 1).stride(2, 2).build()) //(14-2+2)/2+1 = 8 -> 8x8x3
                    .layer(2, new OutputLayer.Builder().nOut(3).activation(Activation.SOFTMAX).build())
                    .setInputType(InputType.convolutional(28, 28, 1));

    MultiLayerConfiguration conf = builder.build();

    assertNotNull(conf.getInputPreProcess(2));
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    CnnToFeedForwardPreProcessor proc = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(2);
    assertEquals(8, proc.getInputHeight());
    assertEquals(8, proc.getInputWidth());
    assertEquals(3, proc.getNumChannels());

    assertEquals(8 * 8 * 3, ((FeedForwardLayer) conf.getConf(2).getLayer()).getNIn());
}
 
Example 14
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testUpsampling() {

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list()
            .layer(new ConvolutionLayer.Builder(2, 2).padding(0, 0).stride(2, 2).nIn(1).nOut(3).build()) //(28-2+0)/2+1 = 14
            .layer(new Upsampling2D.Builder().size(3).build()) // 14 * 3 = 42!
            .layer(new OutputLayer.Builder().nOut(3).activation(Activation.SOFTMAX).build())
            .setInputType(InputType.convolutional(28, 28, 1));

    MultiLayerConfiguration conf = builder.build();

    assertNotNull(conf.getInputPreProcess(2));
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    CnnToFeedForwardPreProcessor proc = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(2);
    assertEquals(42, proc.getInputHeight());
    assertEquals(42, proc.getInputWidth());
    assertEquals(3, proc.getNumChannels());

    assertEquals(42 * 42 * 3, ((FeedForwardLayer) conf.getConf(2).getLayer()).getNIn());
}
 
Example 15
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSpaceToBatch() {

    int[] blocks = new int[] {2, 2};

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list()
            .layer(new ConvolutionLayer.Builder(2, 2).padding(0, 0).stride(2, 2).nIn(1).nOut(3).build()) //(28-2+0)/2+1 = 14
            .layer(new SpaceToBatchLayer.Builder(blocks).build()) // Divide space dimensions by blocks, i.e. 14/2 = 7
            .layer(new OutputLayer.Builder().nOut(3).activation(Activation.SOFTMAX).build())
            .setInputType(InputType.convolutional(28, 28, 1));

    MultiLayerConfiguration conf = builder.build();

    assertNotNull(conf.getInputPreProcess(2));
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    CnnToFeedForwardPreProcessor proc = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(2);
    assertEquals(7, proc.getInputHeight());
    assertEquals(7, proc.getInputWidth());
    assertEquals(3, proc.getNumChannels());
}
 
Example 16
Source Project: deeplearning4j   Source File: ConvolutionLayerSetupTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSpaceToDepth() {

    int blocks = 2;

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list()
            //(28-2+0)/2+1 = 14 -> 14x14x3 out
            .layer(new ConvolutionLayer.Builder(2, 2).padding(0, 0).stride(2, 2).nIn(1).nOut(3).build())
            // Divide space dimensions by blocks, i.e. 14/2 = 7 -> 7x7x12 out (3x2x2 depth)
            .layer(new SpaceToDepthLayer.Builder(blocks, SpaceToDepthLayer.DataFormat.NCHW).build())
            .layer(new OutputLayer.Builder().nIn(3 * 2 * 2).nOut(3).activation(Activation.SOFTMAX).build()) // nIn of the next layer gets multiplied by 2*2.
            .setInputType(InputType.convolutional(28, 28, 1));

    MultiLayerConfiguration conf = builder.build();

    assertNotNull(conf.getInputPreProcess(2));
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    CnnToFeedForwardPreProcessor proc = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(2);
    assertEquals(7, proc.getInputHeight());
    assertEquals(7, proc.getInputWidth());
    assertEquals(12, proc.getNumChannels());

}
 
Example 17
Source Project: deeplearning4j   Source File: ConvDataFormatTests.java    License: Apache License 2.0 6 votes vote down vote up
private MultiLayerNetwork getConv2dNet(CNN2DFormat format, boolean setOnLayerAlso, ConvolutionMode cm) {
    if (setOnLayerAlso) {
        return getNetWithLayer(new ConvolutionLayer.Builder()
                .kernelSize(3, 3)
                .stride(2, 2)
                .activation(Activation.TANH)
                .dataFormat(format)
                .nOut(3)
                .helperAllowFallback(false)
                .build(), format, cm, null);
    } else {
        return getNetWithLayer(new ConvolutionLayer.Builder()
                .kernelSize(3, 3)
                .stride(2, 2)
                .activation(Activation.TANH)
                .nOut(3)
                .helperAllowFallback(false)
                .build(), format, cm, null);
    }
}
 
Example 18
Source Project: deeplearning4j   Source File: ConvDataFormatTests.java    License: Apache License 2.0 6 votes vote down vote up
private MultiLayerNetwork getCnnLossNet(CNN2DFormat format, boolean setOnLayerAlso, ConvolutionMode cm){
    NeuralNetConfiguration.ListBuilder builder = new NeuralNetConfiguration.Builder()
            .seed(12345)
            .convolutionMode(cm)
            .list()
            .layer(new ConvolutionLayer.Builder()
                    .kernelSize(3, 3)
                    .stride(2, 2)
                    .activation(Activation.TANH)
                    .dataFormat(format)
                    .nOut(3)
                    .helperAllowFallback(false)
                    .build());
    if(setOnLayerAlso){
        builder.layer(new CnnLossLayer.Builder().format(format).activation(Activation.SOFTMAX).build());
    } else {
        builder.layer(new CnnLossLayer.Builder().activation(Activation.SOFTMAX).build());
    }

    builder.setInputType(InputType.convolutional(12, 12, 3, format));

    MultiLayerNetwork net = new MultiLayerNetwork(builder.build());
    net.init();
    return net;
}
 
Example 19
Source Project: deeplearning4j   Source File: DQNFactoryStdConv.java    License: Apache License 2.0 5 votes vote down vote up
public DQN buildDQN(int shapeInputs[], int numOutputs) {

        if (shapeInputs.length == 1)
            throw new AssertionError("Impossible to apply convolutional layer on a shape == 1");


        NeuralNetConfiguration.ListBuilder confB = new NeuralNetConfiguration.Builder().seed(Constants.NEURAL_NET_SEED)
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                        .l2(conf.getL2())
                        .updater(conf.getUpdater() != null ? conf.getUpdater() : new Adam())
                        .weightInit(WeightInit.XAVIER).l2(conf.getL2()).list()
                        .layer(0, new ConvolutionLayer.Builder(8, 8).nIn(shapeInputs[0]).nOut(16).stride(4, 4)
                                        .activation(Activation.RELU).build());


        confB.layer(1, new ConvolutionLayer.Builder(4, 4).nOut(32).stride(2, 2).activation(Activation.RELU).build());

        confB.layer(2, new DenseLayer.Builder().nOut(256).activation(Activation.RELU).build());

        confB.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nOut(numOutputs)
                        .build());

        confB.setInputType(InputType.convolutional(shapeInputs[1], shapeInputs[2], shapeInputs[0]));
        MultiLayerConfiguration mlnconf = confB.build();
        MultiLayerNetwork model = new MultiLayerNetwork(mlnconf);
        model.init();
        if (conf.getListeners() != null) {
            model.setListeners(conf.getListeners());
        } else {
            model.setListeners(new ScoreIterationListener(Constants.NEURAL_NET_ITERATION_LISTENER));
        }

        return new DQN(model);
    }
 
Example 20
Source Project: deeplearning4j   Source File: KerasAtrousConvolution2D.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Constructor from parsed Keras layer configuration dictionary.
 *
 * @param layerConfig           dictionary containing Keras layer configuration
 * @param enforceTrainingConfig whether to enforce training-related configuration options
 * @throws InvalidKerasConfigurationException     Invalid Keras config
 * @throws UnsupportedKerasConfigurationException Unsupported Keras config
 */
public KerasAtrousConvolution2D(Map<String, Object> layerConfig, boolean enforceTrainingConfig)
        throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
    super(layerConfig, enforceTrainingConfig);

    hasBias = getHasBiasFromConfig(layerConfig, conf);
    numTrainableParams = hasBias ? 2 : 1;

    LayerConstraint biasConstraint = KerasConstraintUtils.getConstraintsFromConfig(
            layerConfig, conf.getLAYER_FIELD_B_CONSTRAINT(), conf, kerasMajorVersion);
    LayerConstraint weightConstraint = KerasConstraintUtils.getConstraintsFromConfig(
            layerConfig, conf.getLAYER_FIELD_W_CONSTRAINT(), conf, kerasMajorVersion);

    IWeightInit init = getWeightInitFromConfig(layerConfig, conf.getLAYER_FIELD_INIT(),
            enforceTrainingConfig, conf, kerasMajorVersion);

    ConvolutionLayer.Builder builder = new ConvolutionLayer.Builder().name(this.layerName)
            .nOut(getNOutFromConfig(layerConfig, conf)).dropOut(this.dropout)
            .activation(getIActivationFromConfig(layerConfig, conf))
            .weightInit(init)
            .dilation(getDilationRate(layerConfig, 2, conf, true))
            .l1(this.weightL1Regularization).l2(this.weightL2Regularization)
            .convolutionMode(getConvolutionModeFromConfig(layerConfig, conf))
            .kernelSize(getKernelSizeFromConfig(layerConfig, 2, conf, kerasMajorVersion))
            .hasBias(hasBias)
            .stride(getStrideFromConfig(layerConfig, 2, conf));
    int[] padding = getPaddingFromBorderModeConfig(layerConfig, 2, conf, kerasMajorVersion);

    if (hasBias)
        builder.biasInit(0.0);
    if (padding != null)
        builder.padding(padding);
    if (biasConstraint != null)
        builder.constrainBias(biasConstraint);
    if (weightConstraint != null)
        builder.constrainWeights(weightConstraint);
    this.layer = builder.build();
}
 
Example 21
Source Project: deeplearning4j   Source File: KerasConvolution2D.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Constructor from parsed Keras layer configuration dictionary.
 *
 * @param layerConfig           dictionary containing Keras layer configuration
 * @param enforceTrainingConfig whether to enforce training-related configuration options
 * @throws InvalidKerasConfigurationException     Invalid Keras config
 * @throws UnsupportedKerasConfigurationException Unsupported Keras config
 */
public KerasConvolution2D(Map<String, Object> layerConfig, boolean enforceTrainingConfig)
        throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
    super(layerConfig, enforceTrainingConfig);

    hasBias = getHasBiasFromConfig(layerConfig, conf);
    numTrainableParams = hasBias ? 2 : 1;
    int[] dilationRate = getDilationRate(layerConfig, 2, conf, false);

    IWeightInit init = getWeightInitFromConfig(layerConfig, conf.getLAYER_FIELD_INIT(),
            enforceTrainingConfig, conf, kerasMajorVersion);

    LayerConstraint biasConstraint = KerasConstraintUtils.getConstraintsFromConfig(
            layerConfig, conf.getLAYER_FIELD_B_CONSTRAINT(), conf, kerasMajorVersion);
    LayerConstraint weightConstraint = KerasConstraintUtils.getConstraintsFromConfig(
            layerConfig, conf.getLAYER_FIELD_W_CONSTRAINT(), conf, kerasMajorVersion);

    ConvolutionLayer.Builder builder = new ConvolutionLayer.Builder().name(this.layerName)
            .nOut(getNOutFromConfig(layerConfig, conf)).dropOut(this.dropout)
            .activation(getIActivationFromConfig(layerConfig, conf))
            .weightInit(init)
            .l1(this.weightL1Regularization).l2(this.weightL2Regularization)
            .convolutionMode(getConvolutionModeFromConfig(layerConfig, conf))
            .kernelSize(getKernelSizeFromConfig(layerConfig, 2, conf, kerasMajorVersion))
            .hasBias(hasBias)
            .stride(getStrideFromConfig(layerConfig, 2, conf))
            .dataFormat((dimOrder==DimOrder.TENSORFLOW)? CNN2DFormat.NHWC:CNN2DFormat.NCHW);
    int[] padding = getPaddingFromBorderModeConfig(layerConfig, 2, conf, kerasMajorVersion);
    if (hasBias)
        builder.biasInit(0.0);
    if (padding != null)
        builder.padding(padding);
    if (dilationRate != null)
        builder.dilation(dilationRate);
    if (biasConstraint != null)
        builder.constrainBias(biasConstraint);
    if (weightConstraint != null)
        builder.constrainWeights(weightConstraint);
    this.layer = builder.build();
}
 
Example 22
@Override
public long numParams(Layer l) {
    org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf =
                    (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) l;

    int[] kernel = layerConf.getKernelSize();
    val nIn = layerConf.getNIn();
    val nOut = layerConf.getNOut();
    return nIn * nOut * kernel[0] * kernel[1] + (layerConf.hasBias() ? nOut : 0);
}
 
Example 23
@Override
public List<String> paramKeys(Layer layer) {
    org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf =
            (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer;
    if(layerConf.hasBias()){
        return Arrays.asList(WEIGHT_KEY, BIAS_KEY);
    } else {
        return weightKeys(layer);
    }
}
 
Example 24
@Override
public List<String> biasKeys(Layer layer) {
    org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf =
            (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) layer;
    if(layerConf.hasBias()){
        return Collections.singletonList(BIAS_KEY);
    } else {
        return Collections.emptyList();
    }
}
 
Example 25
protected INDArray createBias(NeuralNetConfiguration conf, INDArray biasView, boolean initializeParams) {
    //the bias is a 1D tensor -- one bias per output feature map
    org.deeplearning4j.nn.conf.layers.ConvolutionLayer layerConf =
                    (org.deeplearning4j.nn.conf.layers.ConvolutionLayer) conf.getLayer();
    if (initializeParams)
        biasView.assign(layerConf.getBiasInit());
    return biasView;
}
 
Example 26
Source Project: deeplearning4j   Source File: RegressionTest100b4.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testYoloHouseNumber() throws Exception {

    File f = Resources.asFile("regression_testing/100b4/HouseNumberDetection_100b4.bin");
    ComputationGraph net = ComputationGraph.load(f, true);

    int nBoxes = 5;
    int nClasses = 10;

    ConvolutionLayer cl = (ConvolutionLayer) ((LayerVertex) net.getConfiguration().getVertices()
            .get("convolution2d_9")).getLayerConf().getLayer();
    assertEquals(nBoxes * (5 + nClasses), cl.getNOut());
    assertEquals(new ActivationIdentity(), cl.getActivationFn());
    assertEquals(ConvolutionMode.Same, cl.getConvolutionMode());
    assertEquals(new WeightInitXavier(), cl.getWeightInitFn());
    assertArrayEquals(new int[]{1, 1}, cl.getKernelSize());

    INDArray outExp;
    File f2 = Resources.asFile("regression_testing/100b4/HouseNumberDetection_Output_100b4.bin");
    try (DataInputStream dis = new DataInputStream(new FileInputStream(f2))) {
        outExp = Nd4j.read(dis);
    }

    INDArray in;
    File f3 = Resources.asFile("regression_testing/100b4/HouseNumberDetection_Input_100b4.bin");
    try (DataInputStream dis = new DataInputStream(new FileInputStream(f3))) {
        in = Nd4j.read(dis);
    }

    INDArray outAct = net.outputSingle(in);

    boolean eq = outExp.equalsWithEps(outAct.castTo(outExp.dataType()), 1e-3);
    assertTrue(eq);
}
 
Example 27
Source Project: deeplearning4j   Source File: RegressionTest100b3.java    License: Apache License 2.0 5 votes vote down vote up
@Test
@Ignore("AB 2019/05/23 - Failing on linux-x86_64-cuda-9.2 - see issue #7657")
public void testYoloHouseNumber() throws Exception {

    File f = Resources.asFile("regression_testing/100b3/HouseNumberDetection_100b3.bin");
    ComputationGraph net = ComputationGraph.load(f, true);

    int nBoxes = 5;
    int nClasses = 10;

    ConvolutionLayer cl = (ConvolutionLayer)((LayerVertex)net.getConfiguration().getVertices().get("convolution2d_9")).getLayerConf().getLayer();
    assertEquals(nBoxes * (5 + nClasses), cl.getNOut());
    assertEquals(new ActivationIdentity(), cl.getActivationFn());
    assertEquals(ConvolutionMode.Same, cl.getConvolutionMode());
    assertEquals(new WeightInitXavier(), cl.getWeightInitFn());
    assertArrayEquals(new int[]{1,1}, cl.getKernelSize());
    assertArrayEquals(new int[]{1,1}, cl.getKernelSize());

    INDArray outExp;
    File f2 = Resources.asFile("regression_testing/100b3/HouseNumberDetection_Output_100b3.bin");
    try(DataInputStream dis = new DataInputStream(new FileInputStream(f2))){
        outExp = Nd4j.read(dis);
    }

    INDArray in;
    File f3 = Resources.asFile("regression_testing/100b3/HouseNumberDetection_Input_100b3.bin");
    try(DataInputStream dis = new DataInputStream(new FileInputStream(f3))){
        in = Nd4j.read(dis);
    }

    INDArray outAct = net.outputSingle(in);

    boolean eq = outExp.equalsWithEps(outAct.castTo(outExp.dataType()), 1e-3);
    assertTrue(eq);
}
 
Example 28
Source Project: deeplearning4j   Source File: LRNGradientCheckTests.java    License: Apache License 2.0 5 votes vote down vote up
@Test
    public void testGradientLRNSimple() {
        Nd4j.getRandom().setSeed(12345);
        int minibatch = 10;
        int depth = 6;
        int hw = 5;
        int nOut = 4;
        INDArray input = Nd4j.rand(new int[] {minibatch, depth, hw, hw});
        INDArray labels = Nd4j.zeros(minibatch, nOut);
        Random r = new Random(12345);
        for (int i = 0; i < minibatch; i++) {
            labels.putScalar(i, r.nextInt(nOut), 1.0);
        }

        MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().updater(new NoOp())
                        .dataType(DataType.DOUBLE)
                        .seed(12345L)
                        .dist(new NormalDistribution(0, 2)).list()
                        .layer(0, new ConvolutionLayer.Builder().nOut(6).kernelSize(2, 2).stride(1, 1)
                                        .activation(Activation.TANH).build())
                        .layer(1, new LocalResponseNormalization.Builder().build())
                        .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                        .activation(Activation.SOFTMAX).nOut(nOut).build())
                        .setInputType(InputType.convolutional(hw, hw, depth));

        MultiLayerNetwork mln = new MultiLayerNetwork(builder.build());
        mln.init();

//        if (PRINT_RESULTS) {
//            for (int j = 0; j < mln.getnLayers(); j++)
//                System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams());
//        }

        boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR,
                        DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels);

        assertTrue(gradOK);
        TestUtils.testModelSerialization(mln);
    }
 
Example 29
Source Project: deeplearning4j   Source File: BaseLayerTest.java    License: Apache License 2.0 5 votes vote down vote up
public Layer configureSingleLayer() {
    int nIn = 2;
    int nOut = 2;

    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder()
                    .layer(new ConvolutionLayer.Builder().nIn(nIn).nOut(nOut).build()).build();

    val numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    return conf.getLayer().instantiate(conf, null, 0, params, true, params.dataType());
}
 
Example 30
Source Project: deeplearning4j   Source File: DarknetHelper.java    License: Apache License 2.0 5 votes vote down vote up
public static ComputationGraphConfiguration.GraphBuilder addLayers(ComputationGraphConfiguration.GraphBuilder graphBuilder, int layerNumber, String input, int filterSize, int nIn, int nOut, int poolSize, int poolStride) {
    graphBuilder
            .addLayer("convolution2d_" + layerNumber,
                    new ConvolutionLayer.Builder(filterSize,filterSize)
                            .nIn(nIn)
                            .nOut(nOut)
                            .weightInit(WeightInit.XAVIER)
                            .convolutionMode(ConvolutionMode.Same)
                            .hasBias(false)
                            .stride(1,1)
                            .activation(Activation.IDENTITY)
                            .build(),
                    input)
            .addLayer("batchnormalization_" + layerNumber,
                    new BatchNormalization.Builder()
                            .nIn(nOut).nOut(nOut)
                            .weightInit(WeightInit.XAVIER)
                            .activation(Activation.IDENTITY)
                            .build(),
                    "convolution2d_" + layerNumber)
            .addLayer("activation_" + layerNumber,
                    new ActivationLayer.Builder()
                            .activation(new ActivationLReLU(0.1))
                            .build(),
                    "batchnormalization_" + layerNumber);
    if (poolSize > 0) {
        graphBuilder
                .addLayer("maxpooling2d_" + layerNumber,
                        new SubsamplingLayer.Builder()
                                .kernelSize(poolSize, poolSize)
                                .stride(poolStride, poolStride)
                                .convolutionMode(ConvolutionMode.Same)
                                .build(),
                        "activation_" + layerNumber);
    }

    return graphBuilder;
}