org.deeplearning4j.nn.api.Layer Java Examples

The following examples show how to use org.deeplearning4j.nn.api.Layer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestSparkMultiLayerParameterAveraging.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testEvaluation() {

    SparkDl4jMultiLayer sparkNet = getBasicNetwork();
    MultiLayerNetwork netCopy = sparkNet.getNetwork().clone();

    Evaluation evalExpected = new Evaluation();
    INDArray outLocal = netCopy.output(input, Layer.TrainingMode.TEST);
    evalExpected.eval(labels, outLocal);

    Evaluation evalActual = sparkNet.evaluate(sparkData);

    assertEquals(evalExpected.accuracy(), evalActual.accuracy(), 1e-3);
    assertEquals(evalExpected.f1(), evalActual.f1(), 1e-3);
    assertEquals(evalExpected.getNumRowCounter(), evalActual.getNumRowCounter(), 1e-3);
    assertMapEquals(evalExpected.falseNegatives(), evalActual.falseNegatives());
    assertMapEquals(evalExpected.falsePositives(), evalActual.falsePositives());
    assertMapEquals(evalExpected.trueNegatives(), evalActual.trueNegatives());
    assertMapEquals(evalExpected.truePositives(), evalActual.truePositives());
    assertEquals(evalExpected.precision(), evalActual.precision(), 1e-3);
    assertEquals(evalExpected.recall(), evalActual.recall(), 1e-3);
    assertEquals(evalExpected.getConfusionMatrix(), evalActual.getConfusionMatrix());
}
 
Example #2
Source File: ElementWiseMultiplicationLayer.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
                         int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
    if (this.nIn != this.nOut) {
        throw new IllegalStateException("Element wise layer must have the same input and output size. Got nIn="
                        + nIn + ", nOut=" + nOut);
    }
    org.deeplearning4j.nn.layers.feedforward.elementwise.ElementWiseMultiplicationLayer ret =
                    new org.deeplearning4j.nn.layers.feedforward.elementwise.ElementWiseMultiplicationLayer(conf, networkDataType);
    ret.setListeners(trainingListeners);
    ret.setIndex(layerIndex);
    ret.setParamsViewArray(layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);

    return ret;
}
 
Example #3
Source File: MultiLayerNeuralNetConfigurationTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testTrainingListener() {
    MultiLayerNetwork model1 = new MultiLayerNetwork(getConf());
    model1.init();
    model1.addListeners( new ScoreIterationListener(1));

    MultiLayerNetwork model2 = new MultiLayerNetwork(getConf());
    model2.addListeners( new ScoreIterationListener(1));
    model2.init();

    Layer[] l1 = model1.getLayers();
    for (int i = 0; i < l1.length; i++)
        assertTrue(l1[i].getListeners() != null && l1[i].getListeners().size() == 1);

    Layer[] l2 = model2.getLayers();
    for (int i = 0; i < l2.length; i++)
        assertTrue(l2[i].getListeners() != null && l2[i].getListeners().size() == 1);
}
 
Example #4
Source File: SeparableConvolution2D.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
                         int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
    LayerValidation.assertNInNOutSet("SeparableConvolution2D", getLayerName(), layerIndex, getNIn(), getNOut());

    org.deeplearning4j.nn.layers.convolution.SeparableConvolution2DLayer ret =
                    new org.deeplearning4j.nn.layers.convolution.SeparableConvolution2DLayer(conf, networkDataType);
    ret.setListeners(trainingListeners);
    ret.setIndex(layerIndex);
    ret.setParamsViewArray(layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);

    return ret;
}
 
Example #5
Source File: CGVaeReconstructionErrorWithKeyFunction.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public VariationalAutoencoder getVaeLayer() {
    ComputationGraph network =
                    new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue()));
    network.init();
    INDArray val = ((INDArray) params.value()).unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcasted set parameters");
    network.setParams(val);

    Layer l = network.getLayer(0);
    if (!(l instanceof VariationalAutoencoder)) {
        throw new RuntimeException(
                        "Cannot use CGVaeReconstructionErrorWithKeyFunction on network that doesn't have a VAE "
                                        + "layer as layer 0. Layer type: " + l.getClass());
    }
    return (VariationalAutoencoder) l;
}
 
Example #6
Source File: Gan11Exemple.java    From dl4j-tutorials with MIT License 6 votes vote down vote up
private void freeze(double gRate, double dRate) {
    Layer[] layers = net.getLayers();
    for (Layer layer : layers) {
        if (layer instanceof BaseLayer) {
            BaseLayer baseLayer = (BaseLayer) layer;
            String layerName = baseLayer.getConf().getLayer().getLayerName();
            // System.out.println("layerName = " + layerName + ", type = " + type);
            if (layerName.contains("g-")) {
                // System.out.println(layerName + " = " + 0);
                net.setLearningRate(layerName, gRate);
            } else if (layerName.contains("d-")) {
                // System.out.println(layerName + " = " + baseLr);
                net.setLearningRate(layerName, dRate);
            }
        }
    }
}
 
Example #7
Source File: CGVaeReconstructionProbWithKeyFunction.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public VariationalAutoencoder getVaeLayer() {
    ComputationGraph network =
                    new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue()));
    network.init();
    INDArray val = ((INDArray) params.value()).unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcasted set parameters");
    network.setParams(val);

    Layer l = network.getLayer(0);
    if (!(l instanceof VariationalAutoencoder)) {
        throw new RuntimeException(
                        "Cannot use CGVaeReconstructionProbWithKeyFunction on network that doesn't have a VAE "
                                        + "layer as layer 0. Layer type: " + l.getClass());
    }
    return (VariationalAutoencoder) l;
}
 
Example #8
Source File: ConvolutionLayerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public Layer getContainedConfig() {
    int[] kernelSize = new int[] {2, 2};
    int[] stride = new int[] {2, 2};
    int[] padding = new int[] {0, 0};
    int nChannelsIn = 1;
    int depth = 2;

    INDArray W = Nd4j.create(new double[] {0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5}, new int[] {2, 1, 2, 2});
    INDArray b = Nd4j.create(new double[] {1, 1});
    Layer layer = getCNNConfig(nChannelsIn, depth, kernelSize, stride, padding);
    layer.setParam("W", W);
    layer.setParam("b", b);

    return layer;

}
 
Example #9
Source File: DropConnect.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray getParameter(Layer layer, String paramKey, int iteration, int epoch, boolean train, LayerWorkspaceMgr workspaceMgr) {
    ParamInitializer init = layer.conf().getLayer().initializer();
    INDArray param = layer.getParam(paramKey);

    double p;
    if(weightRetainProbSchedule == null){
        p = weightRetainProb;
    } else {
        p = weightRetainProbSchedule.valueAt(iteration, epoch);
    }

    if (train && init.isWeightParam(layer.conf().getLayer(), paramKey)
            || (applyToBiases && init.isBiasParam(layer.conf().getLayer(), paramKey))) {
        INDArray out = workspaceMgr.createUninitialized(ArrayType.INPUT, param.dataType(), param.shape(), param.ordering());
        Nd4j.getExecutioner().exec(new DropOut(param, out, p));
        return out;
    }
    return param;
}
 
Example #10
Source File: WeightNoise.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray getParameter(Layer layer, String paramKey, int iteration, int epoch, boolean train, LayerWorkspaceMgr workspaceMgr) {

    ParamInitializer init = layer.conf().getLayer().initializer();
    INDArray param = layer.getParam(paramKey);
    if (train && init.isWeightParam(layer.conf().getLayer(), paramKey) ||
            (applyToBias && init.isBiasParam(layer.conf().getLayer(), paramKey))) {

        org.nd4j.linalg.api.rng.distribution.Distribution dist = Distributions.createDistribution(distribution);
        INDArray noise = dist.sample(param.ulike());
        INDArray out = workspaceMgr.createUninitialized(ArrayType.INPUT, param.dataType(), param.shape(), param.ordering());

        if (additive) {
            Nd4j.getExecutioner().exec(new AddOp(param, noise,out));
        } else {
            Nd4j.getExecutioner().exec(new MulOp(param, noise, out));
        }
        return out;
    }
    return param;
}
 
Example #11
Source File: BaseConstraint.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public void applyConstraint(Layer layer, int iteration, int epoch) {
    Map<String,INDArray> paramTable = layer.paramTable();
    if(paramTable == null || paramTable.isEmpty() ){
        return;
    }

    ParamInitializer i = layer.conf().getLayer().initializer();
    for(Map.Entry<String,INDArray> e : paramTable.entrySet()){
        if(params.contains(e.getKey())){
            apply(e.getValue());
        }
        if (params != null && params.contains(e.getKey())) {
            apply(e.getValue());
        }
    }
}
 
Example #12
Source File: SubsamplingLayerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSubSampleMaxActivate() throws Exception {
    INDArray containedExpectedOut =
                    Nd4j.create(new double[] {5., 7., 6., 8., 4., 7., 5., 9.}, new long[] {1, 2, 2, 2}).castTo(Nd4j.defaultFloatingPointType());
    INDArray containedInput = getContainedData();
    INDArray input = getData();
    Layer layer = getSubsamplingLayer(SubsamplingLayer.PoolingType.MAX);

    INDArray containedOutput = layer.activate(containedInput, false, LayerWorkspaceMgr.noWorkspaces());
    assertTrue(Arrays.equals(containedExpectedOut.shape(), containedOutput.shape()));
    assertEquals(containedExpectedOut, containedOutput);

    INDArray output = layer.activate(input, false, LayerWorkspaceMgr.noWorkspaces());
    assertTrue(Arrays.equals(new long[] {nExamples, nChannelsIn, featureMapWidth, featureMapHeight},
                    output.shape()));
    assertEquals(nChannelsIn, output.size(1), 1e-4); // channels retained
}
 
Example #13
Source File: BatchNormalizationTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testDnnForwardPass() {
        int nOut = 10;
        Layer l = getLayer(nOut, 0.0, false, -1, -1);
        assertEquals(4 * nOut, l.numParams()); //Gamma, beta, global mean, global var

        INDArray randInput = Nd4j.rand(100, nOut);
        INDArray output = l.activate(randInput, true, LayerWorkspaceMgr.noWorkspaces());

        INDArray mean = output.mean(0);
        INDArray stdev = output.std(false, 0);

//        System.out.println(Arrays.toString(mean.data().asFloat()));

        assertArrayEquals(new float[nOut], mean.data().asFloat(), 1e-6f);
        assertEquals(Nd4j.ones(nOut), stdev);

        //If we fix gamma/beta: expect different mean and variance...
        double gamma = 2.0;
        double beta = 3.0;
        l = getLayer(nOut, 0.0, true, gamma, beta);
        assertEquals(2 * nOut, l.numParams()); //Should have only global mean/var parameters
        output = l.activate(randInput, true, LayerWorkspaceMgr.noWorkspaces());
        mean = output.mean(0);
        stdev = output.std(false, 0);

        assertEquals(Nd4j.valueArrayOf(mean.shape(), beta), mean);
        assertEquals(Nd4j.valueArrayOf(stdev.shape(), gamma), stdev);
    }
 
Example #14
Source File: SubsamplingLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private Layer getSubsamplingLayer(SubsamplingLayer.PoolingType pooling) {
    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder()
                    .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).seed(123)
                    .layer(new SubsamplingLayer.Builder(pooling, new int[] {2, 2}).build()).build();

    return conf.getLayer().instantiate(conf, null, 0, null, true, Nd4j.defaultFloatingPointType());
}
 
Example #15
Source File: CnnLossLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
                         int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
    org.deeplearning4j.nn.layers.convolution.CnnLossLayer ret =
                    new org.deeplearning4j.nn.layers.convolution.CnnLossLayer(conf, networkDataType);
    ret.setListeners(trainingListeners);
    ret.setIndex(layerIndex);
    ret.setParamsViewArray(layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);
    return ret;
}
 
Example #16
Source File: BaseLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSetExistingParamsConvolutionSingleLayer() {
    Layer layer = configureSingleLayer();
    assertNotEquals(paramTable, layer.paramTable());

    layer.setParamTable(paramTable);
    assertEquals(paramTable, layer.paramTable());
}
 
Example #17
Source File: SameDiffLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public org.deeplearning4j.nn.api.Layer instantiate(NeuralNetConfiguration conf,
                                                   Collection<TrainingListener> trainingListeners, int layerIndex, INDArray layerParamsView,
                                                   boolean initializeParams, DataType networkDataType) {
    org.deeplearning4j.nn.layers.samediff.SameDiffLayer ret =
                    new org.deeplearning4j.nn.layers.samediff.SameDiffLayer(conf, networkDataType);
    ret.setIndex(layerIndex);
    ret.setParamsViewArray(layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);
    return ret;
}
 
Example #18
Source File: LayerVertex.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public LayerVertex(ComputationGraph graph, String name, int vertexIndex, VertexIndices[] inputVertices,
                VertexIndices[] outputVertices, Layer layer, InputPreProcessor layerPreProcessor,
                boolean outputVertex, DataType dataType) {
    super(graph, name, vertexIndex, inputVertices, outputVertices, dataType);
    this.graph = graph;
    this.vertexName = name;
    this.vertexIndex = vertexIndex;
    this.inputVertices = inputVertices;
    this.outputVertices = outputVertices;
    this.layer = layer;
    this.layerPreProcessor = layerPreProcessor;
    this.outputVertex = outputVertex;

    this.inputs = new INDArray[(inputVertices != null ? inputVertices.length : 0)];
}
 
Example #19
Source File: RnnLossLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
                         int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
    org.deeplearning4j.nn.layers.recurrent.RnnLossLayer ret =
                    new org.deeplearning4j.nn.layers.recurrent.RnnLossLayer(conf, networkDataType);
    ret.setListeners(trainingListeners);
    ret.setIndex(layerIndex);
    ret.setParamsViewArray(layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);
    return ret;
}
 
Example #20
Source File: BatchNormalization.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
                         int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
    LayerValidation.assertNOutSet("BatchNormalization", getLayerName(), layerIndex, getNOut());

    org.deeplearning4j.nn.layers.normalization.BatchNormalization ret =
                    new org.deeplearning4j.nn.layers.normalization.BatchNormalization(conf, networkDataType);
    ret.setListeners(trainingListeners);
    ret.setIndex(layerIndex);
    ret.setParamsViewArray(layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);
    return ret;
}
 
Example #21
Source File: EmbeddingLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
                         int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
    org.deeplearning4j.nn.layers.feedforward.embedding.EmbeddingLayer ret =
                    new org.deeplearning4j.nn.layers.feedforward.embedding.EmbeddingLayer(conf, networkDataType);
    ret.setListeners(trainingListeners);
    ret.setIndex(layerIndex);
    ret.setParamsViewArray(layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);
    return ret;
}
 
Example #22
Source File: GravesBidirectionalLSTM.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
                         int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
    org.deeplearning4j.nn.layers.recurrent.GravesBidirectionalLSTM ret =
                    new org.deeplearning4j.nn.layers.recurrent.GravesBidirectionalLSTM(conf, networkDataType);
    ret.setListeners(trainingListeners);
    ret.setIndex(layerIndex);
    ret.setParamsViewArray(layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);
    return ret;
}
 
Example #23
Source File: RnnOutputLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
                         int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
    LayerValidation.assertNInNOutSet("RnnOutputLayer", getLayerName(), layerIndex, getNIn(), getNOut());

    org.deeplearning4j.nn.layers.recurrent.RnnOutputLayer ret =
                    new org.deeplearning4j.nn.layers.recurrent.RnnOutputLayer(conf, networkDataType);
    ret.setListeners(trainingListeners);
    ret.setIndex(layerIndex);
    ret.setParamsViewArray(layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);
    return ret;
}
 
Example #24
Source File: BaseStatsListener.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private String getSessionID(Model model) {
    if (model instanceof MultiLayerNetwork || model instanceof ComputationGraph)
        return sessionID;
    if (model instanceof Layer) {
        //Keep in mind MultiLayerNetwork implements Layer also...
        Layer l = (Layer) model;
        int layerIdx = l.getIndex();
        return sessionID + "_layer" + layerIdx;
    }
    return sessionID; //Should never happen
}
 
Example #25
Source File: CuDNNTestUtils.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public static void assertHelpersPresent(Layer[] layers) throws Exception {
    for(Layer l : layers){
        //Don't use instanceof here - there are sub conv subclasses
        if(l.getClass() == ConvolutionLayer.class || l instanceof SubsamplingLayer || l instanceof BatchNormalization || l instanceof LSTM){
            Preconditions.checkNotNull(l.getHelper(), l.conf().getLayer().getLayerName());
        }
    }
}
 
Example #26
Source File: CuDNNTestUtils.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public static void removeHelpers(Layer[] layers) throws Exception {
    for(Layer l : layers){

        if(l instanceof ConvolutionLayer){
            Field f1 = ConvolutionLayer.class.getDeclaredField("helper");
            f1.setAccessible(true);
            f1.set(l, null);
        } else if(l instanceof SubsamplingLayer){
            Field f2 = SubsamplingLayer.class.getDeclaredField("helper");
            f2.setAccessible(true);
            f2.set(l, null);
        } else if(l instanceof BatchNormalization) {
            Field f3 = BatchNormalization.class.getDeclaredField("helper");
            f3.setAccessible(true);
            f3.set(l, null);
        } else if(l instanceof LSTM){
            Field f4 = LSTM.class.getDeclaredField("helper");
            f4.setAccessible(true);
            f4.set(l, null);
        } else if(l instanceof LocalResponseNormalization){
            Field f5 = LocalResponseNormalization.class.getDeclaredField("helper");
            f5.setAccessible(true);
            f5.set(l, null);
        }


        if(l.getHelper() != null){
            throw new IllegalStateException("Did not remove helper for layer: " + l.getClass().getSimpleName());
        }
    }
}
 
Example #27
Source File: SubsamplingLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSubSampleLayerMaxBackprop() throws Exception {
    INDArray expectedContainedEpsilonInput =
                    Nd4j.create(new double[] {1., 1., 1., 1., 1., 1., 1., 1.}, new int[] {1, 2, 2, 2}).castTo(Nd4j.defaultFloatingPointType());

    INDArray expectedContainedEpsilonResult = Nd4j.create(new double[] {0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 1.,
                    0., 0., 1., 0., 0., 0., 0., 0., 1., 1., 0., 0., 0., 1., 0., 1., 0., 0., 0., 0., 0.},
                    new int[] {1, 2, 4, 4}).castTo(Nd4j.defaultFloatingPointType());

    INDArray input = getContainedData();

    Layer layer = getSubsamplingLayer(SubsamplingLayer.PoolingType.MAX);
    layer.activate(input, false, LayerWorkspaceMgr.noWorkspaces());

    Pair<Gradient, INDArray> containedOutput = layer.backpropGradient(expectedContainedEpsilonInput, LayerWorkspaceMgr.noWorkspaces());
    assertEquals(expectedContainedEpsilonResult, containedOutput.getSecond());
    assertEquals(null, containedOutput.getFirst().getGradientFor("W"));
    assertEquals(expectedContainedEpsilonResult.shape().length, containedOutput.getSecond().shape().length);

    INDArray input2 = getData();
    layer.activate(input2, false, LayerWorkspaceMgr.noWorkspaces());
    long depth = input2.size(1);

    epsilon = Nd4j.ones(5, depth, featureMapHeight, featureMapWidth);

    Pair<Gradient, INDArray> out = layer.backpropGradient(epsilon, LayerWorkspaceMgr.noWorkspaces());
    assertEquals(input.shape().length, out.getSecond().shape().length);
    assertEquals(depth, out.getSecond().size(1)); // channels retained
}
 
Example #28
Source File: Deconvolution2D.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
                         int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
    LayerValidation.assertNInNOutSet("Deconvolution2D", getLayerName(), layerIndex, getNIn(), getNOut());

    org.deeplearning4j.nn.layers.convolution.Deconvolution2DLayer ret =
                    new org.deeplearning4j.nn.layers.convolution.Deconvolution2DLayer(conf, networkDataType);
    ret.setListeners(trainingListeners);
    ret.setIndex(layerIndex);
    ret.setParamsViewArray(layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);
    return ret;
}
 
Example #29
Source File: TestWeightNoise.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDropConnectValues() {
    Nd4j.getRandom().setSeed(12345);

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .weightInit(WeightInit.ONES)
            .list()
            .layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
            .build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    Layer l = net.getLayer(0);
    DropConnect d = new DropConnect(0.5);

    INDArray outTest = d.getParameter(l, "W", 0, 0, false, LayerWorkspaceMgr.noWorkspaces());
    assertTrue(l.getParam("W") == outTest);    //Should be same object
    INDArray outTrain = d.getParameter(l, "W", 0, 0, true, LayerWorkspaceMgr.noWorkspaces());
    assertNotEquals(l.getParam("W"), outTrain);

    assertEquals(l.getParam("W"), Nd4j.ones(DataType.FLOAT, 10, 10));

    int countZeros = Nd4j.getExecutioner().exec(new MatchCondition(outTrain, Conditions.equals(0))).getInt(0);
    int countOnes = Nd4j.getExecutioner().exec(new MatchCondition(outTrain, Conditions.equals(1))).getInt(0);

    assertEquals(100, countZeros + countOnes);  //Should only be 0 or 2
    //Stochastic, but this should hold for most cases
    assertTrue(countZeros >= 25 && countZeros <= 75);
    assertTrue(countOnes >= 25 && countOnes <= 75);
}
 
Example #30
Source File: Upsampling2DTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testUpsampling2DBackprop() throws Exception {
    INDArray expectedContainedEpsilonInput =
                    Nd4j.create(new double[] {1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.},
                            new int[] {1, 1, 4, 4});

    INDArray expectedContainedEpsilonResult = Nd4j.create(new double[] {4., 4., 4., 4.},
                    new int[] {1, 1, 2, 2});

    INDArray input = getContainedData();

    Layer layer = getUpsamplingLayer();
    layer.activate(input, false, LayerWorkspaceMgr.noWorkspaces());

    Pair<Gradient, INDArray> containedOutput = layer.backpropGradient(expectedContainedEpsilonInput, LayerWorkspaceMgr.noWorkspaces());

    assertEquals(expectedContainedEpsilonResult, containedOutput.getSecond());
    assertEquals(null, containedOutput.getFirst().getGradientFor("W"));
    assertEquals(expectedContainedEpsilonResult.shape().length, containedOutput.getSecond().shape().length);

    INDArray input2 = getData();
    layer.activate(input2, false, LayerWorkspaceMgr.noWorkspaces());
    val depth = input2.size(1);

    epsilon = Nd4j.ones(5, depth, outputHeight, outputWidth);

    Pair<Gradient, INDArray> out = layer.backpropGradient(epsilon, LayerWorkspaceMgr.noWorkspaces());
    assertEquals(input.shape().length, out.getSecond().shape().length);
    assertEquals(depth, out.getSecond().size(1));
}