org.deeplearning4j.nn.conf.inputs.InputType Java Examples

The following examples show how to use org.deeplearning4j.nn.conf.inputs.InputType. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMultiChannel() throws Exception {
    INDArray in = Nd4j.rand(new int[] {10, 3, 28, 28});
    INDArray labels = Nd4j.rand(10, 2);
    DataSet next = new DataSet(in, labels);

    NeuralNetConfiguration.ListBuilder builder = (NeuralNetConfiguration.ListBuilder) incompleteLFW();
    builder.setInputType(InputType.convolutional(28, 28, 3));
    MultiLayerConfiguration conf = builder.build();
    ConvolutionLayer layer2 = (ConvolutionLayer) conf.getConf(2).getLayer();
    assertEquals(6, layer2.getNIn());

    MultiLayerNetwork network = new MultiLayerNetwork(conf);
    network.init();
    network.fit(next);
}
 
Example #2
Source File: LayerVertex.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public InputType getOutputType(int layerIndex, InputType... vertexInputs) throws InvalidInputTypeException {
    if (vertexInputs.length != 1) {
        throw new InvalidInputTypeException(
                        "LayerVertex expects exactly one input. Got: " + Arrays.toString(vertexInputs));
    }

    //Assume any necessary preprocessors have already been added
    InputType afterPreprocessor;
    if (preProcessor == null)
        afterPreprocessor = vertexInputs[0];
    else
        afterPreprocessor = preProcessor.getOutputType(vertexInputs[0]);

    return layerConf.getLayer().getOutputType(layerIndex, afterPreprocessor);
}
 
Example #3
Source File: TransferLearningMLNTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testChangeNOutNIn() {
    INDArray input = Nd4j.create(new long[] {1, 2, 4, 4});
    MultiLayerNetwork net = new MultiLayerNetwork(new NeuralNetConfiguration.Builder()
            .list()
            .setInputType(InputType.inferInputTypes(input)[0])
            .layer(new Convolution2D.Builder(1, 1).nOut(10).build())
            .layer(new SubsamplingLayer.Builder(1,1).build())
            .layer(new Convolution2D.Builder(1, 1).nOut(7).build())
            .layer(new OutputLayer.Builder().activation(Activation.SOFTMAX).nOut(2).build())
            .build());
    net.init();

    final MultiLayerNetwork newNet = new TransferLearning.Builder(net)
            .nOutReplace(0, 5, WeightInit.XAVIER)
            .nInReplace(2, 5, WeightInit.XAVIER)
            .build();

    newNet.init();

    assertEquals("Incorrect number of outputs!", 5 , newNet.layerSize(0));
    assertEquals("Incorrect number of inputs!", 5, newNet.layerInputSize(2));
    newNet.output(input);
}
 
Example #4
Source File: RNNTestCases.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public Object getConfiguration() throws Exception {
    return new NeuralNetConfiguration.Builder()
            .dataType(DataType.FLOAT)
            .seed(12345)
            .updater(new Adam(5e-2))
            .l1(1e-3).l2(1e-3)
            .list()
            .layer(0, new LSTM.Builder().activation(Activation.TANH).nOut(10).build())
            .layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.AVG).build())
            .layer(new OutputLayer.Builder().nOut(6)
                    .lossFunction(LossFunctions.LossFunction.MCXENT)
                    .activation(Activation.SOFTMAX)
                    .build())
            .setInputType(InputType.recurrent(1))
            .build();
}
 
Example #5
Source File: TestBasic.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private static MultiLayerSpace getMultiLayerSpaceMnist() {
    return new MultiLayerSpace.Builder()
            .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.2)))
            .l2(new ContinuousParameterSpace(0.0001, 0.05))
            .addLayer(
                    new ConvolutionLayerSpace.Builder().nIn(1)
                            .nOut(new IntegerParameterSpace(5, 30))
                            .kernelSize(new DiscreteParameterSpace<>(new int[]{3, 3},
                                    new int[]{4, 4}, new int[]{5, 5}))
                            .stride(new DiscreteParameterSpace<>(new int[]{1, 1},
                                    new int[]{2, 2}))
                            .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                    Activation.SOFTPLUS, Activation.LEAKYRELU))
                            .build())
            .addLayer(new DenseLayerSpace.Builder().nOut(new IntegerParameterSpace(32, 128))
                    .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH))
                    .build(), new IntegerParameterSpace(0, 1), true) //0 to 1 layers
            .addLayer(new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
            .setInputType(InputType.convolutionalFlat(28, 28, 1))
            .build();
}
 
Example #6
Source File: ConvolutionLayerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private static MultiLayerNetwork getCNNMLNConfig(boolean backprop, boolean pretrain) {
    int outputNum = 10;
    int seed = 123;

    MultiLayerConfiguration.Builder conf =
                    new NeuralNetConfiguration.Builder().seed(seed)
                                    .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).list()
                                    .layer(0, new ConvolutionLayer.Builder(new int[] {10, 10}).nOut(6).build())
                                    .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX,
                                                    new int[] {2, 2}).stride(1, 1).build())
                                    .layer(2, new OutputLayer.Builder(
                                                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                                                                    .nOut(outputNum).weightInit(WeightInit.XAVIER)
                                                                    .activation(Activation.SOFTMAX).build())
                                    .setInputType(InputType.convolutionalFlat(28, 28, 1));

    MultiLayerNetwork model = new MultiLayerNetwork(conf.build());
    model.init();

    return model;
}
 
Example #7
Source File: ConvolutionLayerSetupTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSpaceToDepth() {

    int blocks = 2;

    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().list()
            //(28-2+0)/2+1 = 14 -> 14x14x3 out
            .layer(new ConvolutionLayer.Builder(2, 2).padding(0, 0).stride(2, 2).nIn(1).nOut(3).build())
            // Divide space dimensions by blocks, i.e. 14/2 = 7 -> 7x7x12 out (3x2x2 depth)
            .layer(new SpaceToDepthLayer.Builder(blocks, SpaceToDepthLayer.DataFormat.NCHW).build())
            .layer(new OutputLayer.Builder().nIn(3 * 2 * 2).nOut(3).activation(Activation.SOFTMAX).build()) // nIn of the next layer gets multiplied by 2*2.
            .setInputType(InputType.convolutional(28, 28, 1));

    MultiLayerConfiguration conf = builder.build();

    assertNotNull(conf.getInputPreProcess(2));
    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);
    CnnToFeedForwardPreProcessor proc = (CnnToFeedForwardPreProcessor) conf.getInputPreProcess(2);
    assertEquals(7, proc.getInputHeight());
    assertEquals(7, proc.getInputWidth());
    assertEquals(12, proc.getNumChannels());

}
 
Example #8
Source File: PrimaryCapsules.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public void setNIn(InputType inputType, boolean override) {
    if (inputType == null || inputType.getType() != Type.CNN) {
        throw new IllegalStateException("Invalid input for Primary Capsules layer (layer name = \""
                + layerName + "\"): expect CNN input.  Got: " + inputType);
    }

    InputTypeConvolutional ci = (InputTypeConvolutional) inputType;

    this.inputChannels = (int) ci.getChannels();

    if(capsules <= 0 || override) {

        InputTypeConvolutional out = (InputTypeConvolutional) InputTypeUtil
                .getOutputTypeCnnLayers(inputType, kernelSize, stride, padding, dilation, convolutionMode,
                        capsuleDimensions * channels, -1, getLayerName(), PrimaryCapsules.class);

        this.capsules = (int) (out.getChannels() * out.getHeight() * out.getWidth() / capsuleDimensions);
    }
}
 
Example #9
Source File: DuplicateToTimeSeriesVertex.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public InputType getOutputType(int layerIndex, InputType... vertexInputs) throws InvalidInputTypeException {
    if (vertexInputs.length != 1)
        throw new InvalidInputTypeException("Invalid input type: cannot duplicate more than 1 input");

    int tsLength = 1; //TODO work this out properly

    if (vertexInputs[0].getType() == InputType.Type.FF) {
        return InputType.recurrent(((InputType.InputTypeFeedForward) vertexInputs[0]).getSize(), tsLength);
    } else if (vertexInputs[0].getType() == InputType.Type.CNNFlat) {
        return InputType.recurrent(((InputType.InputTypeConvolutionalFlat) vertexInputs[0]).getFlattenedSize(),
                        tsLength);
    } else {
        throw new InvalidInputTypeException(
                        "Invalid input type: cannot duplicate to time series non feed forward (or CNN flat) input (got: "
                                        + vertexInputs[0] + ")");
    }


}
 
Example #10
Source File: ZeroPadding1DLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputType getOutputType(int layerIndex, InputType inputType) {
    if (inputType == null || inputType.getType() != InputType.Type.RNN) {
        throw new IllegalStateException("Invalid input for 1D CNN layer (layer index = " + layerIndex
                        + ", layer name = \"" + getLayerName() + "\"): expect RNN input type with size > 0. Got: "
                        + inputType);
    }
    InputType.InputTypeRecurrent recurrent = (InputType.InputTypeRecurrent) inputType;
    return InputType.recurrent(recurrent.getSize(), recurrent.getTimeSeriesLength() + padding[0] + padding[1]);
}
 
Example #11
Source File: LastTimeStep.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputType getOutputType(int layerIndex, InputType inputType) {
    if (inputType.getType() != InputType.Type.RNN) {
        throw new IllegalArgumentException("Require RNN input type - got " + inputType);
    }
    InputType outType = underlying.getOutputType(layerIndex, inputType);
    InputType.InputTypeRecurrent r = (InputType.InputTypeRecurrent) outType;
    return InputType.feedForward(r.getSize());
}
 
Example #12
Source File: ScaleVertex.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputType getOutputType(int layerIndex, InputType... vertexInputs) throws InvalidInputTypeException {
    if (vertexInputs.length == 1)
        return vertexInputs[0];
    InputType first = vertexInputs[0];

    return first; //Same output shape/size as
}
 
Example #13
Source File: KerasCropping3D.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Get layer output type.
 *
 * @param inputType Array of InputTypes
 * @return output type as InputType
 * @throws InvalidKerasConfigurationException Invalid Keras config
 */
@Override
public InputType getOutputType(InputType... inputType) throws InvalidKerasConfigurationException {
    if (inputType.length > 1)
        throw new InvalidKerasConfigurationException(
                "Keras Cropping 3D layer accepts only one input (received " + inputType.length + ")");
    return this.getCropping3DLayer().getOutputType(-1, inputType[0]);
}
 
Example #14
Source File: KerasGlobalPooling.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Get layer output type.
 *
 * @param inputType Array of InputTypes
 * @return output type as InputType
 * @throws InvalidKerasConfigurationException Invalid Keras config
 */
@Override
public InputType getOutputType(InputType... inputType) throws InvalidKerasConfigurationException {
    if (inputType.length > 1)
        throw new InvalidKerasConfigurationException(
                "Keras Subsampling layer accepts only one input (received " + inputType.length + ")");

    /* Check whether layer requires a preprocessor for this InputType. */
    InputPreProcessor preprocessor = getInputPreprocessor(inputType[0]);
    if (preprocessor != null) {
        return this.getGlobalPoolingLayer().getOutputType(-1, preprocessor.getOutputType(inputType[0]));
    }
    return this.getGlobalPoolingLayer().getOutputType(-1, inputType[0]);
}
 
Example #15
Source File: RecurrentAttentionLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputType getOutputType(int layerIndex, InputType inputType) {
    if (inputType == null || inputType.getType() != InputType.Type.RNN) {
        throw new IllegalStateException("Invalid input for Recurrent Attention layer (layer index = " + layerIndex
                + ", layer name = \"" + getLayerName() + "\"): expect RNN input type with size > 0. Got: "
                + inputType);
    }

    InputType.InputTypeRecurrent itr = (InputType.InputTypeRecurrent) inputType;


    return InputType.recurrent(nOut, itr.getTimeSeriesLength());
}
 
Example #16
Source File: Convolution1DLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void setNIn(InputType inputType, boolean override) {
    if (inputType == null || inputType.getType() != InputType.Type.RNN) {
        throw new IllegalStateException("Invalid input for 1D CNN layer (layer name = \"" + getLayerName()
                        + "\"): expect RNN input type with size > 0. Got: " + inputType);
    }

    InputType.InputTypeRecurrent r = (InputType.InputTypeRecurrent) inputType;
    if (nIn <= 0 || override) {
        this.nIn = r.getSize();
    }
    this.rnnDataFormat = r.getFormat();
}
 
Example #17
Source File: KerasBidirectional.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Get layer output type.
 *
 * @param inputType Array of InputTypes
 * @return output type as InputType
 * @throws InvalidKerasConfigurationException Invalid Keras config
 */
@Override
public InputType getOutputType(InputType... inputType) throws InvalidKerasConfigurationException {
    if (inputType.length > 1)
        throw new InvalidKerasConfigurationException(
                "Keras Bidirectional layer accepts only one input (received " + inputType.length + ")");
    InputPreProcessor preProcessor = getInputPreprocessor(inputType);
    if (preProcessor != null)
        return this.getBidirectionalLayer().getOutputType(-1, preProcessor.getOutputType(inputType[0]));
    else
        return this.getBidirectionalLayer().getOutputType(-1, inputType[0]);
}
 
Example #18
Source File: CnnLossLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputType getOutputType(int layerIndex, InputType inputType) {
    if (inputType == null || (inputType.getType() != InputType.Type.CNN
                    && inputType.getType() != InputType.Type.CNNFlat)) {
        throw new IllegalStateException(
                        "Invalid input type for CnnLossLayer (layer index = " + layerIndex + ", layer name=\""
                                        + getLayerName() + "\"): Expected CNN or CNNFlat input, got " + inputType);
    }
    return inputType;
}
 
Example #19
Source File: EmbeddingSequenceLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void setNIn(InputType inputType, boolean override) {
    if(inputType.getType() == InputType.Type.RNN){
        if (nIn <= 0 || override) {
            InputType.InputTypeRecurrent f = (InputType.InputTypeRecurrent) inputType;
            this.nIn = f.getSize();
        }
    } else {
        super.setNIn(inputType, override);
    }

}
 
Example #20
Source File: ZeroPadding1DLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public LayerMemoryReport getMemoryReport(InputType inputType) {
    InputType outputType = getOutputType(-1, inputType);

    return new LayerMemoryReport.Builder(layerName, ZeroPaddingLayer.class, inputType, outputType)
                    .standardMemory(0, 0) //No params
                    .workingMemory(0, 0, MemoryReport.CACHE_MODE_ALL_ZEROS, MemoryReport.CACHE_MODE_ALL_ZEROS)
                    .cacheMemory(MemoryReport.CACHE_MODE_ALL_ZEROS, MemoryReport.CACHE_MODE_ALL_ZEROS) //No caching
                    .build();
}
 
Example #21
Source File: TimeDistributed.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void setNIn(InputType inputType, boolean override) {
    if (inputType.getType() != InputType.Type.RNN) {
        throw new IllegalStateException("Only RNN input type is supported as input to TimeDistributed layer");
    }

    InputType.InputTypeRecurrent rnn = (InputType.InputTypeRecurrent) inputType;
    InputType ff = InputType.feedForward(rnn.getSize());
    this.rnnDataFormat = rnn.getFormat();
    underlying.setNIn(ff, override);
}
 
Example #22
Source File: FeedForwardLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputPreProcessor getPreProcessorForInputType(InputType inputType) {
    if (inputType == null) {
        throw new IllegalStateException(
                        "Invalid input for layer (layer name = \"" + getLayerName() + "\"): input type is null");
    }

    switch (inputType.getType()) {
        case FF:
        case CNNFlat:
            //FF -> FF and CNN (flattened format) -> FF: no preprocessor necessary
            return null;
        case RNN:
            //RNN -> FF
            return new RnnToFeedForwardPreProcessor(((InputType.InputTypeRecurrent)inputType).getFormat());
        case CNN:
            //CNN -> FF
            InputType.InputTypeConvolutional c = (InputType.InputTypeConvolutional) inputType;
            return new CnnToFeedForwardPreProcessor(c.getHeight(), c.getWidth(), c.getChannels(), c.getFormat());
        case CNN3D:
            //CNN3D -> FF
            InputType.InputTypeConvolutional3D c3d = (InputType.InputTypeConvolutional3D) inputType;
            return new Cnn3DToFeedForwardPreProcessor(c3d.getDepth(), c3d.getHeight(), c3d.getWidth(),
                            c3d.getChannels(), c3d.getDataFormat() == Convolution3D.DataFormat.NCDHW);
        default:
            throw new RuntimeException("Unknown input type: " + inputType);
    }
}
 
Example #23
Source File: RnnToCnnPreProcessor.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputType getOutputType(InputType inputType) {
    if (inputType == null || inputType.getType() != InputType.Type.RNN) {
        throw new IllegalStateException("Invalid input type: Expected input of type RNN, got " + inputType);
    }

    InputType.InputTypeRecurrent c = (InputType.InputTypeRecurrent) inputType;
    int expSize = inputHeight * inputWidth * numChannels;
    if (c.getSize() != expSize) {
        throw new IllegalStateException("Invalid input: expected RNN input of size " + expSize + " = (d="
                        + numChannels + " * w=" + inputWidth + " * h=" + inputHeight + "), got " + inputType);
    }

    return InputType.convolutional(inputHeight, inputWidth, numChannels);
}
 
Example #24
Source File: TestComputationGraphNetwork.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void printSummary() {
        NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1))
                .activation(Activation.IDENTITY);

        ComputationGraphConfiguration conf = overallConf.graphBuilder().addInputs("inCentre", "inRight")
                .addLayer("denseCentre0", new DenseLayer.Builder().nIn(10).nOut(9).build(), "inCentre")
                .addLayer("denseCentre1", new DenseLayer.Builder().nIn(9).nOut(8).build(), "denseCentre0")
                .addLayer("denseCentre2", new DenseLayer.Builder().nIn(8).nOut(7).build(), "denseCentre1")
                .addLayer("denseCentre3", new DenseLayer.Builder().nIn(7).nOut(7).build(), "denseCentre2")
                .addLayer("outCentre",
                        new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(7).nOut(4).build(),
                        "denseCentre3")
                .addVertex("subsetLeft", new SubsetVertex(0, 3), "denseCentre1")
                .addLayer("denseLeft0", new DenseLayer.Builder().nIn(4).nOut(5).build(), "subsetLeft")
                .addLayer("outLeft",
                        new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(5).nOut(6).build(),
                        "denseLeft0")
                .addLayer("denseRight", new DenseLayer.Builder().nIn(7).nOut(7).build(), "denseCentre2")
                .addLayer("denseRight0", new DenseLayer.Builder().nIn(2).nOut(3).build(), "inRight")
                .addVertex("mergeRight", new MergeVertex(), "denseRight", "denseRight0")
                .addLayer("denseRight1", new DenseLayer.Builder().nIn(10).nOut(5).build(), "mergeRight")
                .addLayer("outRight",
                        new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(5).nOut(5).build(),
                        "denseRight1")
                .setOutputs("outLeft", "outCentre", "outRight").build();

        ComputationGraph modelToTune = new ComputationGraph(conf);
        modelToTune.init();
//        System.out.println(modelToTune.summary());
        modelToTune.summary();

        ComputationGraph modelNow =
                new TransferLearning.GraphBuilder(modelToTune).setFeatureExtractor("denseCentre2").build();
//        System.out.println(modelNow.summary());
//        System.out.println(modelNow.summary(InputType.feedForward(10),InputType.feedForward(2)));
        modelNow.summary();
        modelNow.summary(InputType.feedForward(10),InputType.feedForward(2));
    }
 
Example #25
Source File: FeedForwardToCnn3DPreProcessor.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputType getOutputType(InputType inputType) {

    switch (inputType.getType()) {
        case FF:
            InputType.InputTypeFeedForward c = (InputType.InputTypeFeedForward) inputType;
            int expSize = inputDepth * inputHeight * inputWidth * numChannels;
            if (c.getSize() != expSize) {
                throw new IllegalStateException("Invalid input: expected FeedForward input of size " + expSize
                        + " = (d=" + numChannels + " * w=" + inputWidth + " * h=" + inputHeight + "), got "
                        + inputType);
            }
            return InputType.convolutional3D(inputDepth, inputHeight, inputWidth, numChannels);
        case CNN:
            InputType.InputTypeConvolutional c2 = (InputType.InputTypeConvolutional) inputType;

            if (c2.getChannels() != numChannels || c2.getHeight() != inputHeight || c2.getWidth() != inputWidth) {
                throw new IllegalStateException("Invalid input: Got CNN input type with (c,w,h)=(" + c2.getChannels()
                        + "," + c2.getWidth() + "," + c2.getHeight() + ") but expected (" + numChannels
                        + "," + inputHeight + "," + inputWidth + ")");
            }
            return InputType.convolutional3D(1, c2.getHeight(), c2.getWidth(), c2.getChannels());
        case CNN3D:
            InputType.InputTypeConvolutional3D c3 = (InputType.InputTypeConvolutional3D) inputType;

            if (c3.getChannels() != numChannels || c3.getDepth() != inputDepth ||
                    c3.getHeight() != inputHeight || c3.getWidth() != inputWidth) {
                throw new IllegalStateException("Invalid input: Got CNN input type with (c, d,w,h)=("
                        + c3.getChannels() + "," + c3.getDepth() + "," + c3.getWidth() + "," + c3.getHeight()
                        + ") but expected (" + numChannels + "," + inputDepth + ","
                        + inputHeight + "," + inputWidth + ")");
            }
            return c3;
        default:
            throw new IllegalStateException("Invalid input type: got " + inputType);
    }
}
 
Example #26
Source File: KerasUpsampling2D.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Get layer output type.
 *
 * @param inputType Array of InputTypes
 * @return output type as InputType
 * @throws InvalidKerasConfigurationException Invalid Keras config
 */
@Override
public InputType getOutputType(InputType... inputType) throws InvalidKerasConfigurationException {
    if (inputType.length > 1)
        throw new InvalidKerasConfigurationException(
                "Keras Upsampling layer accepts only one input (received " + inputType.length + ")");
    return this.getUpsampling2DLayer().getOutputType(-1, inputType[0]);
}
 
Example #27
Source File: RnnOutputLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputType getOutputType(int layerIndex, InputType inputType) {
    if (inputType == null || inputType.getType() != InputType.Type.RNN) {
        throw new IllegalStateException("Invalid input type for RnnOutputLayer (layer index = " + layerIndex
                        + ", layer name=\"" + getLayerName() + "\"): Expected RNN input, got " + inputType);
    }
    InputType.InputTypeRecurrent itr = (InputType.InputTypeRecurrent) inputType;

    return InputType.recurrent(nOut, itr.getTimeSeriesLength(), itr.getFormat());
}
 
Example #28
Source File: ConvolutionLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputType getOutputType(int layerIndex, InputType inputType) {
    if (inputType == null || inputType.getType() != InputType.Type.CNN) {
        throw new IllegalStateException("Invalid input for Convolution layer (layer name=\"" + getLayerName()
                        + "\"): Expected CNN input, got " + inputType);
    }

    return InputTypeUtil.getOutputTypeCnnLayers(inputType, kernelSize, stride, padding, dilation, convolutionMode,
                    nOut, layerIndex, getLayerName(), cnn2dDataFormat, ConvolutionLayer.class);
}
 
Example #29
Source File: KerasGaussianNoise.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Get layer output type.
 *
 * @param inputType Array of InputTypes
 * @return output type as InputType
 * @throws InvalidKerasConfigurationException Invalid Keras config
 */
@Override
public InputType getOutputType(InputType... inputType) throws InvalidKerasConfigurationException {
    if (inputType.length > 1)
        throw new InvalidKerasConfigurationException(
                "Keras Gaussian Noise layer accepts only one input (received " + inputType.length + ")");
    return this.getGaussianNoiseLayer().getOutputType(-1, inputType[0]);
}
 
Example #30
Source File: LocallyConnected2D.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public InputType getOutputType(int layerIndex, InputType inputType) {
    if (inputType == null || inputType.getType() != InputType.Type.CNN) {
        throw new IllegalArgumentException("Provided input type for locally connected 2D layers has to be "
                        + "of CNN type, got: " + inputType);
    }
    // dynamically compute input size from input type
    InputType.InputTypeConvolutional cnnType = (InputType.InputTypeConvolutional) inputType;
    this.inputSize = new int[] {(int) cnnType.getHeight(), (int) cnnType.getWidth()};
    computeOutputSize();

    return InputTypeUtil.getOutputTypeCnnLayers(inputType, kernel, stride, padding, new int[] {1, 1}, cm, nOut,
                    layerIndex, getLayerName(), format, LocallyConnected2D.class);
}