org.deeplearning4j.nn.conf.layers.ConvolutionLayer Java Examples

The following examples show how to use org.deeplearning4j.nn.conf.layers.ConvolutionLayer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration.Builder incompleteLRN() {
    MultiLayerConfiguration.Builder builder =
                    new NeuralNetConfiguration.Builder().seed(3)
                                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
                                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nOut(6).build())
                                    .layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}).build())
                                    .layer(2, new LocalResponseNormalization.Builder().build())
                                    .layer(3, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nOut(6).build())
                                    .layer(4, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}).build())
                                    .layer(5, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                            LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(2)
                                            .activation(Activation.SOFTMAX).build());
    return builder;
}
 
Example #2
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration.Builder incompleteLFW() {
    MultiLayerConfiguration.Builder builder =
                    new NeuralNetConfiguration.Builder().seed(3)
                                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
                                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nOut(6).build())
                                    .layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}).build())
                                    .layer(2, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nOut(6).build())
                                    .layer(3, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}).build())
                                    .layer(4, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).activation(Activation.SOFTMAX)
                                            .nOut(2).build());
    return builder;
}
 
Example #3
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration.Builder incompleteMnistLenet() {
    MultiLayerConfiguration.Builder builder =
                    new NeuralNetConfiguration.Builder().seed(3)
                                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
                                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nIn(1).nOut(20).build())
                                    .layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}, new int[] {2, 2}).build())
                                    .layer(2, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nIn(20).nOut(50).build())
                                    .layer(3, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {2, 2}, new int[] {2, 2}).build())
                                    .layer(4, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nOut(500)
                                                    .build())
                                    .layer(5, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                                                    .activation(Activation.SOFTMAX).nOut(10)
                                                                    .build());
    return builder;
}
 
Example #4
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration mnistLenet() {
    MultiLayerConfiguration builder =
                    new NeuralNetConfiguration.Builder().seed(3)
                                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
                                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nIn(1).nOut(6).build())
                                    .layer(1, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {5, 5}, new int[] {2, 2}).build())
                                    .layer(2, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(
                                                    new int[] {5, 5}).nIn(1).nOut(6).build())
                                    .layer(3, new org.deeplearning4j.nn.conf.layers.SubsamplingLayer.Builder(
                                                    new int[] {5, 5}, new int[] {2, 2}).build())
                                    .layer(4, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder(
                                                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nIn(150)
                                                                    .nOut(10).build())
                                    .build();
    return builder;
}
 
Example #5
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration.Builder inComplete() {
    int nChannels = 1;
    int outputNum = 10;
    int seed = 123;

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                    .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).list()
                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(new int[] {10, 10},
                                    new int[] {2, 2}).nIn(nChannels).nOut(6).build())
                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
                                    .build())
                    .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
                                    .build())
                    ;

    return builder;
}
 
Example #6
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public MultiLayerConfiguration.Builder complete() {
    final int numRows = 28;
    final int numColumns = 28;
    int nChannels = 1;
    int outputNum = 10;
    int seed = 123;

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                    .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).list()
                    .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(new int[] {10, 10},
                                    new int[] {2, 2}).nIn(nChannels).nOut(6).build())
                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
                                    .build())
                    .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .nIn(5 * 5 * 1 * 6) //216
                                    .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
                                    .build())
                    .inputPreProcessor(0, new FeedForwardToCnnPreProcessor(numRows, numColumns, nChannels))
                    .inputPreProcessor(2, new CnnToFeedForwardPreProcessor(5, 5, 6));

    return builder;
}
 
Example #7
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSubSamplingWithPadding() {

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list()
                    .layer(0, new ConvolutionLayer.Builder(2, 2).padding(0, 0).stride(2, 2).nIn(1).nOut(3).build()) //(28-2+0)/2+1 = 14
                    .layer(1, new SubsamplingLayer.Builder().kernelSize(2, 2).padding(1, 1).stride(2, 2).build()) //(14-2+2)/2+1 = 8 -> 8x8x3
                    .layer(2, new OutputLayer.Builder().nOut(3).activation(Activation.SOFTMAX).build())
                    .setInputType(InputType.convolutional(28, 28, 1));

    MultiLayerConfiguration conf = builder.build();

    assertNotNull(conf.getInputPreProcess(2));
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    CnnToFeedForwardPreProcessor proc = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(2);
    assertEquals(8, proc.getInputHeight());
    assertEquals(8, proc.getInputWidth());
    assertEquals(3, proc.getNumChannels());

    assertEquals(8 * 8 * 3, ((FeedForwardLayer) conf.getConf(2).getLayer()).getNIn());
}
 
Example #8
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testUpsampling() {

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list()
            .layer(new ConvolutionLayer.Builder(2, 2).padding(0, 0).stride(2, 2).nIn(1).nOut(3).build()) //(28-2+0)/2+1 = 14
            .layer(new Upsampling2D.Builder().size(3).build()) // 14 * 3 = 42!
            .layer(new OutputLayer.Builder().nOut(3).activation(Activation.SOFTMAX).build())
            .setInputType(InputType.convolutional(28, 28, 1));

    MultiLayerConfiguration conf = builder.build();

    assertNotNull(conf.getInputPreProcess(2));
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    CnnToFeedForwardPreProcessor proc = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(2);
    assertEquals(42, proc.getInputHeight());
    assertEquals(42, proc.getInputWidth());
    assertEquals(3, proc.getNumChannels());

    assertEquals(42 * 42 * 3, ((FeedForwardLayer) conf.getConf(2).getLayer()).getNIn());
}
 
Example #9
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSpaceToBatch() {

    int[] blocks = new int[] {2, 2};

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list()
            .layer(new ConvolutionLayer.Builder(2, 2).padding(0, 0).stride(2, 2).nIn(1).nOut(3).build()) //(28-2+0)/2+1 = 14
            .layer(new SpaceToBatchLayer.Builder(blocks).build()) // Divide space dimensions by blocks, i.e. 14/2 = 7
            .layer(new OutputLayer.Builder().nOut(3).activation(Activation.SOFTMAX).build())
            .setInputType(InputType.convolutional(28, 28, 1));

    MultiLayerConfiguration conf = builder.build();

    assertNotNull(conf.getInputPreProcess(2));
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    CnnToFeedForwardPreProcessor proc = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(2);
    assertEquals(7, proc.getInputHeight());
    assertEquals(7, proc.getInputWidth());
    assertEquals(3, proc.getNumChannels());
}
 
Example #10
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSpaceToDepth() {

    int blocks = 2;

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list()
            //(28-2+0)/2+1 = 14 -> 14x14x3 out
            .layer(new ConvolutionLayer.Builder(2, 2).padding(0, 0).stride(2, 2).nIn(1).nOut(3).build())
            // Divide space dimensions by blocks, i.e. 14/2 = 7 -> 7x7x12 out (3x2x2 depth)
            .layer(new SpaceToDepthLayer.Builder(blocks, SpaceToDepthLayer.DataFormat.NCHW).build())
            .layer(new OutputLayer.Builder().nIn(3 * 2 * 2).nOut(3).activation(Activation.SOFTMAX).build()) // nIn of the next layer gets multiplied by 2*2.
            .setInputType(InputType.convolutional(28, 28, 1));

    MultiLayerConfiguration conf = builder.build();

    assertNotNull(conf.getInputPreProcess(2));
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    CnnToFeedForwardPreProcessor proc = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(2);
    assertEquals(7, proc.getInputHeight());
    assertEquals(7, proc.getInputWidth());
    assertEquals(12, proc.getNumChannels());

}
 
Example #11
Source File: ConvDataFormatTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private MultiLayerNetwork getConv2dNet(CNN2DFormat format, boolean setOnLayerAlso, ConvolutionMode cm) {
    if (setOnLayerAlso) {
        return getNetWithLayer(new ConvolutionLayer.Builder()
                .kernelSize(3, 3)
                .stride(2, 2)
                .activation(Activation.TANH)
                .dataFormat(format)
                .nOut(3)
                .helperAllowFallback(false)
                .build(), format, cm, null);
    } else {
        return getNetWithLayer(new ConvolutionLayer.Builder()
                .kernelSize(3, 3)
                .stride(2, 2)
                .activation(Activation.TANH)
                .nOut(3)
                .helperAllowFallback(false)
                .build(), format, cm, null);
    }
}
 
Example #12
Source File: ConvolutionLayerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private static MultiLayerNetwork getCNNMLNConfig(boolean backprop, boolean pretrain) {
    int outputNum = 10;
    int seed = 123;

    MultiLayerConfiguration.Builder conf =
                    new NeuralNetConfiguration.Builder().seed(seed)
                                    .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).list()
                                    .layer(0, new ConvolutionLayer.Builder(new int[] {10, 10}).nOut(6).build())
                                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX,
                                                    new int[] {2, 2}).stride(1, 1).build())
                                    .layer(2, new OutputLayer.Builder(
                                                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                                                    .nOut(outputNum).weightInit(WeightInit.XAVIER)
                                                                    .activation(Activation.SOFTMAX).build())
                                    .setInputType(InputType.convolutionalFlat(28, 28, 1));

    MultiLayerNetwork model = new MultiLayerNetwork(conf.build());
    model.init();

    return model;
}
 
Example #13
Source File: ConvDataFormatTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private MultiLayerNetwork getCnnLossNet(CNN2DFormat format, boolean setOnLayerAlso, ConvolutionMode cm){
    NeuralNetConfiguration.ListBuilder builder = new NeuralNetConfiguration.Builder()
            .seed(12345)
            .convolutionMode(cm)
            .list()
            .layer(new ConvolutionLayer.Builder()
                    .kernelSize(3, 3)
                    .stride(2, 2)
                    .activation(Activation.TANH)
                    .dataFormat(format)
                    .nOut(3)
                    .helperAllowFallback(false)
                    .build());
    if(setOnLayerAlso){
        builder.layer(new CnnLossLayer.Builder().format(format).activation(Activation.SOFTMAX).build());
    } else {
        builder.layer(new CnnLossLayer.Builder().activation(Activation.SOFTMAX).build());
    }

    builder.setInputType(InputType.convolutional(12, 12, 3, format));

    MultiLayerNetwork net = new MultiLayerNetwork(builder.build());
    net.init();
    return net;
}
 
Example #14
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testLRN() throws Exception {
    List<String> labels = new ArrayList<>(Arrays.asList("Zico", "Ziwang_Xu"));
    File dir = testDir.newFolder();
    new ClassPathResource("lfwtest/").copyDirectory(dir);
    String rootDir = dir.getAbsolutePath();

    RecordReader reader = new ImageRecordReader(28, 28, 3);
    reader.initialize(new FileSplit(new File(rootDir)));
    DataSetIterator recordReader = new RecordReaderDataSetIterator(reader, 10, 1, labels.size());
    labels.remove("lfwtest");
    NeuralNetConfiguration.ListBuilder builder = (NeuralNetConfiguration.ListBuilder) incompleteLRN();
    builder.setInputType(InputType.convolutional(28, 28, 3));

    MultiLayerConfiguration conf = builder.build();

    ConvolutionLayer layer2 = (ConvolutionLayer) conf.getConf(3).getLayer();
    assertEquals(6, layer2.getNIn());

}
 
Example #15
Source File: TestFrozenLayers.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public static MultiLayerNetwork getOriginalNet(int seed){
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .seed(seed)
            .weightInit(WeightInit.XAVIER)
            .activation(Activation.TANH)
            .convolutionMode(ConvolutionMode.Same)
            .updater(new Sgd(0.3))
            .list()
            .layer(new ConvolutionLayer.Builder().nOut(3).kernelSize(2,2).stride(1,1).build())
            .layer(new SubsamplingLayer.Builder().kernelSize(2,2).stride(1,1).build())
            .layer(new ConvolutionLayer.Builder().nIn(3).nOut(3).kernelSize(2,2).stride(1,1).build())
            .layer(new DenseLayer.Builder().nOut(64).build())
            .layer(new DenseLayer.Builder().nIn(64).nOut(64).build())
            .layer(new OutputLayer.Builder().nIn(64).nOut(10).lossFunction(LossFunctions.LossFunction.MSE).build())
            .setInputType(InputType.convolutionalFlat(28,28,1))
            .build();


    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    return net;
}
 
Example #16
Source File: TestFrozenLayers.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public static ComputationGraph getOriginalGraph(int seed){
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
            .seed(seed)
            .weightInit(WeightInit.XAVIER)
            .activation(Activation.TANH)
            .convolutionMode(ConvolutionMode.Same)
            .updater(new Sgd(0.3))
            .graphBuilder()
            .addInputs("in")
            .layer("0", new ConvolutionLayer.Builder().nOut(3).kernelSize(2,2).stride(1,1).build(), "in")
            .layer("1", new SubsamplingLayer.Builder().kernelSize(2,2).stride(1,1).build(), "0")
            .layer("2", new ConvolutionLayer.Builder().nIn(3).nOut(3).kernelSize(2,2).stride(1,1).build(), "1")
            .layer("3", new DenseLayer.Builder().nOut(64).build(), "2")
            .layer("4", new DenseLayer.Builder().nIn(64).nOut(64).build(), "3")
            .layer("5", new OutputLayer.Builder().nIn(64).nOut(10).lossFunction(LossFunctions.LossFunction.MSE).build(), "4")
            .setOutputs("5")
            .setInputTypes(InputType.convolutionalFlat(28,28,1))
            .build();


    ComputationGraph net = new ComputationGraph(conf);
    net.init();
    return net;
}
 
Example #17
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMultiChannel() throws Exception {
    INDArray in = Nd4j.rand(new int[] {10, 3, 28, 28});
    INDArray labels = Nd4j.rand(10, 2);
    DataSet next = new DataSet(in, labels);

    NeuralNetConfiguration.ListBuilder builder = (NeuralNetConfiguration.ListBuilder) incompleteLFW();
    builder.setInputType(InputType.convolutional(28, 28, 3));
    MultiLayerConfiguration conf = builder.build();
    ConvolutionLayer layer2 = (ConvolutionLayer) conf.getConf(2).getLayer();
    assertEquals(6, layer2.getNIn());

    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    network.fit(next);
}
 
Example #18
Source File: LocalResponseTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMultiCNNLayer() throws Exception {
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(123).list()
                    .layer(0, new ConvolutionLayer.Builder().nIn(1).nOut(6).weightInit(WeightInit.XAVIER)
                                    .activation(Activation.RELU).build())
                    .layer(1, new LocalResponseNormalization.Builder().build()).layer(2,
                                    new DenseLayer.Builder()
                                                    .nOut(2).build())
                    .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                    .weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(2).nOut(10)
                                    .build())
                    .setInputType(InputType.convolutionalFlat(28, 28, 1)).build();

    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    DataSetIterator iter = new MnistDataSetIterator(2, 2);
    DataSet next = iter.next();

    network.fit(next);
}
 
Example #19
Source File: BaseConvolutionLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
protected void setLayerOptionsBuilder(ConvolutionLayer.BaseConvBuilder<?> builder, double[] values) {
    super.setLayerOptionsBuilder(builder, values);
    if (dilation != null)
        builder.dilation(dilation.getValue(values));
    if (kernelSize != null)
        builder.kernelSize(kernelSize.getValue(values));
    if (stride != null)
        builder.stride(stride.getValue(values));
    if (padding != null)
        builder.padding(padding.getValue(values));
    if (convolutionMode != null)
        builder.convolutionMode(convolutionMode.getValue(values));
    if (hasBias != null)
        builder.hasBias(hasBias.getValue(values));
}
 
Example #20
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCNNDBNMultiLayer() throws Exception {
    DataSetIterator iter = new MnistDataSetIterator(2, 2);
    DataSet next = iter.next();

    // Run with separate activation layer
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123)
                    .weightInit(WeightInit.XAVIER).list()
                    .layer(0, new ConvolutionLayer.Builder(new int[] {1, 1}, new int[] {1, 1}).nIn(1).nOut(6)
                                    .activation(Activation.IDENTITY).build())
                    .layer(1, new BatchNormalization.Builder().build())
                    .layer(2, new ActivationLayer.Builder().activation(Activation.RELU).build())
                    .layer(3, new DenseLayer.Builder().nIn(28 * 28 * 6).nOut(10).activation(Activation.IDENTITY)
                                    .build())
                    .layer(4, new BatchNormalization.Builder().nOut(10).build())
                    .layer(5, new ActivationLayer.Builder().activation(Activation.RELU).build())
                    .layer(6, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                                    .activation(Activation.SOFTMAX).nOut(10).build())
                    .setInputType(InputType.convolutionalFlat(28, 28, 1)).build();

    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();

    network.setInput(next.getFeatures());
    INDArray activationsActual = network.output(next.getFeatures());
    assertEquals(10, activationsActual.shape()[1], 1e-2);

    network.fit(next);
    INDArray actualGammaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.GAMMA);
    INDArray actualBetaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.BETA);
    assertTrue(actualGammaParam != null);
    assertTrue(actualBetaParam != null);
}
 
Example #21
Source File: TestConvolution.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private static ConvolutionLayer convInit(String name, int in, int out, int[] kernel, int[] stride,
                                         int[] pad, double bias) {
    return new ConvolutionLayer.Builder(kernel, stride, pad).name(name)
            .nIn(in)
            .nOut(out)
            .biasInit(bias)
            .build();
}
 
Example #22
Source File: CenterLossOutputLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public ComputationGraph getCNNMnistConfig() {

        int nChannels = 1; // Number of input channels
        int outputNum = 10; // The number of possible outcomes

        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) // Training iterations as above
                        .l2(0.0005).weightInit(WeightInit.XAVIER)
                        .updater(new Nesterovs(0.01, 0.9))
                        .graphBuilder().addInputs("input")
                        .setInputTypes(InputType.convolutionalFlat(28, 28, 1))
                        .addLayer("0", new ConvolutionLayer.Builder(5, 5)
                                        //nIn and nOut specify channels. nIn here is the nChannels and nOut is the number of filters to be applied
                                        .nIn(nChannels).stride(1, 1).nOut(20).activation(Activation.IDENTITY).build(),
                                        "input")
                        .addLayer("1", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                        .stride(2, 2).build(), "0")
                        .addLayer("2", new ConvolutionLayer.Builder(5, 5)
                                        //Note that nIn need not be specified in later layers
                                        .stride(1, 1).nOut(50).activation(Activation.IDENTITY).build(), "1")
                        .addLayer("3", new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                        .stride(2, 2).build(), "2")
                        .addLayer("4", new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build(), "3")
                        .addLayer("output",
                                        new org.deeplearning4j.nn.conf.layers.CenterLossOutputLayer.Builder(
                                                        LossFunction.MCXENT).nOut(outputNum)
                                                                        .activation(Activation.SOFTMAX).build(),
                                        "4")
                        .setOutputs("output").build();

        ComputationGraph graph = new ComputationGraph(conf);
        graph.init();

        return graph;
    }
 
Example #23
Source File: ConvDataFormatTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private MultiLayerNetwork getNetWithLayer(Layer layer, CNN2DFormat format, ConvolutionMode cm, InputType inputType) {
    NeuralNetConfiguration.ListBuilder builder = new NeuralNetConfiguration.Builder()
            .dataType(this.dataType)
            .seed(12345)
            .convolutionMode(cm)
            .list()
            .layer(new ConvolutionLayer.Builder()
                    .kernelSize(3, 3)
                    .stride(2, 2)
                    .activation(Activation.TANH)
                    .dataFormat(format)
                    .nOut(3)
                    .helperAllowFallback(false)
                    .build())
            .layer(layer)
            .layer(new OutputLayer.Builder().activation(Activation.SOFTMAX).nOut(10).build())
            .setInputType(inputType != null ? inputType : InputType.convolutional(12, 12, 3, format));

    if(format == CNN2DFormat.NHWC && !(layer instanceof GlobalPoolingLayer)){
        //Add a preprocessor due to the differences in how NHWC and NCHW activations are flattened
        //DL4J's flattening behaviour matches Keras (hence TF) for import compatibility
        builder.inputPreProcessor(2, new ComposableInputPreProcessor(new NHWCToNCHWPreprocessor(), new CnnToFeedForwardPreProcessor()));
    }

    MultiLayerNetwork net = new MultiLayerNetwork(builder.build());
    net.init();
    return net;
}
 
Example #24
Source File: DarknetHelper.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public static ComputationGraphConfiguration.GraphBuilder addLayers(ComputationGraphConfiguration.GraphBuilder graphBuilder, int layerNumber, String input, int filterSize, int nIn, int nOut, int poolSize, int poolStride) {
    graphBuilder
            .addLayer("convolution2d_" + layerNumber,
                    new ConvolutionLayer.Builder(filterSize,filterSize)
                            .nIn(nIn)
                            .nOut(nOut)
                            .weightInit(WeightInit.XAVIER)
                            .convolutionMode(ConvolutionMode.Same)
                            .hasBias(false)
                            .stride(1,1)
                            .activation(Activation.IDENTITY)
                            .build(),
                    input)
            .addLayer("batchnormalization_" + layerNumber,
                    new BatchNormalization.Builder()
                            .nIn(nOut).nOut(nOut)
                            .weightInit(WeightInit.XAVIER)
                            .activation(Activation.IDENTITY)
                            .build(),
                    "convolution2d_" + layerNumber)
            .addLayer("activation_" + layerNumber,
                    new ActivationLayer.Builder()
                            .activation(new ActivationLReLU(0.1))
                            .build(),
                    "batchnormalization_" + layerNumber);
    if (poolSize > 0) {
        graphBuilder
                .addLayer("maxpooling2d_" + layerNumber,
                        new SubsamplingLayer.Builder()
                                .kernelSize(poolSize, poolSize)
                                .stride(poolStride, poolStride)
                                .convolutionMode(ConvolutionMode.Same)
                                .build(),
                        "activation_" + layerNumber);
    }

    return graphBuilder;
}
 
Example #25
Source File: ConvolutionLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
protected void setLayerOptionsBuilder(ConvolutionLayer.Builder builder, double[] values) {
    super.setLayerOptionsBuilder(builder, values);
    if (dilation != null)
        builder.dilation(dilation.getValue(values));
    if (kernelSize != null)
        builder.kernelSize(kernelSize.getValue(values));
    if (stride != null)
        builder.stride(stride.getValue(values));
    if (padding != null)
        builder.padding(padding.getValue(values));
    if (convolutionMode != null)
        builder.convolutionMode(convolutionMode.getValue(values));
    if (hasBias != null)
        builder.hasBias(hasBias.getValue(values));
}
 
Example #26
Source File: TestConvolution.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCudnnDilation(){
    //Sanity check on dilated conv execution
    int[] k = new int[]{2,3,4,5};
    int[] d = new int[]{1,2,3,4};

    for( int[] inputSize : new int[][]{{10,1,28,28}, {3,3,224,224}}) {
        for (int i = 0; i < k.length; i++) {
            for(ConvolutionMode cm : new ConvolutionMode[]{ConvolutionMode.Same, ConvolutionMode.Truncate}) {

                MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                        .convolutionMode(ConvolutionMode.Same)
                        .list()
                        .layer(new ConvolutionLayer.Builder().kernelSize(k[i], k[i]).dilation(d[i], d[i]).nOut(3).build())
                        .layer(new SubsamplingLayer.Builder().kernelSize(k[i], k[i]).dilation(d[i], d[i]).build())
                        .layer(new OutputLayer.Builder().nOut(10).build())
                        .setInputType(InputType.convolutional(inputSize[3], inputSize[2], inputSize[1]))
                        .build();

                MultiLayerNetwork net = new MultiLayerNetwork(conf);
                net.init();

                INDArray in = Nd4j.create(inputSize);
                net.output(in);
            }
        }
    }
}
 
Example #27
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDenseToOutputLayer() {
    Nd4j.getRandom().setSeed(12345);
    final int numRows = 76;
    final int numColumns = 76;
    int nChannels = 3;
    int outputNum = 6;
    int seed = 123;

    //setup the network
    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                    .l1(1e-1).l2(2e-4).dropOut(0.5).miniBatch(true)
                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).list()
                    .layer(0, new ConvolutionLayer.Builder(5, 5).nOut(5).dropOut(0.5).weightInit(WeightInit.XAVIER)
                                    .activation(Activation.RELU).build())
                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
                                    .build())
                    .layer(2, new ConvolutionLayer.Builder(3, 3).nOut(10).dropOut(0.5).weightInit(WeightInit.XAVIER)
                                    .activation(Activation.RELU).build())
                    .layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
                                    .build())
                    .layer(4, new DenseLayer.Builder().nOut(100).activation(Activation.RELU).build())
                    .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                    .nOut(outputNum).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX)
                                    .build())

                    .setInputType(InputType.convolutional(numRows, numColumns, nChannels));

    DataSet d = new DataSet(Nd4j.rand(new int[]{10, nChannels, numRows, numColumns}),
                    FeatureUtil.toOutcomeMatrix(new int[] {1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, 6));
    MultiLayerNetwork network = new MultiLayerNetwork(builder.build());
    network.init();
    network.fit(d);

}
 
Example #28
Source File: ConvolutionLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private static Layer getCNNConfig(int nIn, int nOut, int[] kernelSize, int[] stride, int[] padding) {

        ConvolutionLayer layer = new ConvolutionLayer.Builder(kernelSize, stride, padding).nIn(nIn).nOut(nOut)
                        .activation(Activation.SIGMOID).build();

        NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().layer(layer).build();

        val numParams = conf.getLayer().initializer().numParams(conf);
        INDArray params = Nd4j.create(1, numParams);
        return conf.getLayer().instantiate(conf, null, 0, params, true, params.dataType());
    }
 
Example #29
Source File: ConvolutionLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testTwdFirstLayer() throws Exception {
    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(123)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).l2(2e-4)
                    .updater(new Nesterovs(0.9)).dropOut(0.5)
                    .list().layer(0,
                                    new ConvolutionLayer.Builder(8, 8) //16 filters kernel size 8 stride 4
                                                    .stride(4, 4).nOut(16).dropOut(0.5)
                                                    .activation(Activation.RELU).weightInit(
                                                                    WeightInit.XAVIER)
                                                    .build())
                    .layer(1, new ConvolutionLayer.Builder(4, 4) //32 filters kernel size 4 stride 2
                                    .stride(2, 2).nOut(32).dropOut(0.5).activation(Activation.RELU)
                                    .weightInit(WeightInit.XAVIER).build())
                    .layer(2, new DenseLayer.Builder() //fully connected with 256 rectified units
                                    .nOut(256).activation(Activation.RELU).weightInit(WeightInit.XAVIER)
                                    .dropOut(0.5).build())
                    .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.SQUARED_LOSS) //output layer
                                    .nOut(10).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build())
                    .setInputType(InputType.convolutionalFlat(28, 28, 1));

    DataSetIterator iter = new MnistDataSetIterator(10, 10);
    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    DataSet ds = iter.next();
    for( int i=0; i<5; i++ ) {
        network.fit(ds);
    }
}
 
Example #30
Source File: TestConvolutionModes.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSameModeActivationSizes() {
    int inH = 3;
    int inW = 4;
    int inDepth = 3;
    int minibatch = 5;

    int sH = 2;
    int sW = 2;
    int kH = 3;
    int kW = 3;

    Layer[] l = new Layer[2];
    l[0] = new ConvolutionLayer.Builder().nOut(4).kernelSize(kH, kW).stride(sH, sW).build();
    l[1] = new SubsamplingLayer.Builder().kernelSize(kH, kW).stride(sH, sW).build();

    for (int i = 0; i < l.length; i++) {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().convolutionMode(ConvolutionMode.Same)
                        .list().layer(0, l[i]).layer(1, new OutputLayer.Builder().nOut(3).activation(Activation.SOFTMAX).build())
                        .setInputType(InputType.convolutional(inH, inW, inDepth)).build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        INDArray inData = Nd4j.create(minibatch, inDepth, inH, inW);
        List<INDArray> activations = net.feedForward(inData);
        INDArray actL0 = activations.get(1);

        int outH = (int) Math.ceil(inH / ((double) sH));
        int outW = (int) Math.ceil(inW / ((double) sW));

        System.out.println(Arrays.toString(actL0.shape()));
        assertArrayEquals(new long[] {minibatch, (i == 0 ? 4 : inDepth), outH, outW}, actL0.shape());
    }
}