Java Code Examples for org.deeplearning4j.nn.api.Layer#getParam()

The following examples show how to use org.deeplearning4j.nn.api.Layer#getParam() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WeightNoise.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray getParameter(Layer layer, String paramKey, int iteration, int epoch, boolean train, LayerWorkspaceMgr workspaceMgr) {

    ParamInitializer init = layer.conf().getLayer().initializer();
    INDArray param = layer.getParam(paramKey);
    if (train && init.isWeightParam(layer.conf().getLayer(), paramKey) ||
            (applyToBias && init.isBiasParam(layer.conf().getLayer(), paramKey))) {

        org.nd4j.linalg.api.rng.distribution.Distribution dist = Distributions.createDistribution(distribution);
        INDArray noise = dist.sample(param.ulike());
        INDArray out = workspaceMgr.createUninitialized(ArrayType.INPUT, param.dataType(), param.shape(), param.ordering());

        if (additive) {
            Nd4j.getExecutioner().exec(new AddOp(param, noise,out));
        } else {
            Nd4j.getExecutioner().exec(new MulOp(param, noise, out));
        }
        return out;
    }
    return param;
}
 
Example 2
Source File: DropConnect.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray getParameter(Layer layer, String paramKey, int iteration, int epoch, boolean train, LayerWorkspaceMgr workspaceMgr) {
    ParamInitializer init = layer.conf().getLayer().initializer();
    INDArray param = layer.getParam(paramKey);

    double p;
    if(weightRetainProbSchedule == null){
        p = weightRetainProb;
    } else {
        p = weightRetainProbSchedule.valueAt(iteration, epoch);
    }

    if (train && init.isWeightParam(layer.conf().getLayer(), paramKey)
            || (applyToBiases && init.isBiasParam(layer.conf().getLayer(), paramKey))) {
        INDArray out = workspaceMgr.createUninitialized(ArrayType.INPUT, param.dataType(), param.shape(), param.ordering());
        Nd4j.getExecutioner().exec(new DropOut(param, out, p));
        return out;
    }
    return param;
}
 
Example 3
Source File: EmbeddingLayerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testEmbeddingLayerConfig() {

    for (boolean hasBias : new boolean[]{true, false}) {
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().activation(Activation.TANH).list()
                .layer(0, new EmbeddingLayer.Builder().hasBias(hasBias).nIn(10).nOut(5).build())
                .layer(1, new OutputLayer.Builder().nIn(5).nOut(4).activation(Activation.SOFTMAX).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        Layer l0 = net.getLayer(0);

        assertEquals(org.deeplearning4j.nn.layers.feedforward.embedding.EmbeddingLayer.class, l0.getClass());
        assertEquals(10, ((FeedForwardLayer) l0.conf().getLayer()).getNIn());
        assertEquals(5, ((FeedForwardLayer) l0.conf().getLayer()).getNOut());

        INDArray weights = l0.getParam(DefaultParamInitializer.WEIGHT_KEY);
        INDArray bias = l0.getParam(DefaultParamInitializer.BIAS_KEY);
        assertArrayEquals(new long[]{10, 5}, weights.shape());
        if (hasBias) {
            assertArrayEquals(new long[]{1, 5}, bias.shape());
        }
    }
}
 
Example 4
Source File: NeuralNetConfigurationTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testRNG() {
    DenseLayer layer = new DenseLayer.Builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
                    .weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build();

    NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().seed(123)
                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).layer(layer).build();

    long numParams = conf.getLayer().initializer().numParams(conf);
    INDArray params = Nd4j.create(1, numParams);
    Layer model = conf.getLayer().instantiate(conf, null, 0, params, true, params.dataType());
    INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY);


    DenseLayer layer2 = new DenseLayer.Builder().nIn(trainingSet.numInputs()).nOut(trainingSet.numOutcomes())
                    .weightInit(WeightInit.UNIFORM).activation(Activation.TANH).build();
    NeuralNetConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(123)
                    .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT).layer(layer2).build();

    long numParams2 = conf2.getLayer().initializer().numParams(conf);
    INDArray params2 = Nd4j.create(1, numParams);
    Layer model2 = conf2.getLayer().instantiate(conf2, null, 0, params2, true, params.dataType());
    INDArray modelWeights2 = model2.getParam(DefaultParamInitializer.WEIGHT_KEY);

    assertEquals(modelWeights, modelWeights2);
}
 
Example 5
Source File: EmbeddingLayerTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEmbeddingSequenceLayerConfig() {

    int inputLength = 6;
    int nIn = 10;
    int embeddingDim = 5;
    int nout = 4;

    for (boolean hasBias : new boolean[]{true, false}) {
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().activation(Activation.TANH).list()
                .layer(new EmbeddingSequenceLayer.Builder().hasBias(hasBias)
                        .inputLength(inputLength).nIn(nIn).nOut(embeddingDim).build())
                .layer(new RnnOutputLayer.Builder().nIn(embeddingDim).nOut(nout).activation(Activation.SOFTMAX).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        Layer l0 = net.getLayer(0);

        assertEquals(org.deeplearning4j.nn.layers.feedforward.embedding.EmbeddingSequenceLayer.class, l0.getClass());
        assertEquals(10, ((FeedForwardLayer) l0.conf().getLayer()).getNIn());
        assertEquals(5, ((FeedForwardLayer) l0.conf().getLayer()).getNOut());

        INDArray weights = l0.getParam(DefaultParamInitializer.WEIGHT_KEY);
        INDArray bias = l0.getParam(DefaultParamInitializer.BIAS_KEY);
        assertArrayEquals(new long[]{10, 5}, weights.shape());
        if (hasBias) {
            assertArrayEquals(new long[]{1, 5}, bias.shape());
        }
    }
}
 
Example 6
Source File: NeuralNetConfigurationTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSetSeedSize() {
    Nd4j.getRandom().setSeed(123);

    Layer model = getLayer(trainingSet.numInputs(), trainingSet.numOutcomes(), new WeightInitXavier(), true);
    INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY);
    Nd4j.getRandom().setSeed(123);

    Layer model2 = getLayer(trainingSet.numInputs(), trainingSet.numOutcomes(), new WeightInitXavier(), true);
    INDArray modelWeights2 = model2.getParam(DefaultParamInitializer.WEIGHT_KEY);
    assertEquals(modelWeights, modelWeights2);
}
 
Example 7
Source File: NeuralNetConfigurationTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSetSeedNormalized() {
    Nd4j.getRandom().setSeed(123);

    Layer model = getLayer(trainingSet.numInputs(), trainingSet.numOutcomes(), new WeightInitXavier(), true);
    INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY);
    Nd4j.getRandom().setSeed(123);

    Layer model2 = getLayer(trainingSet.numInputs(), trainingSet.numOutcomes(), new WeightInitXavier(), true);
    INDArray modelWeights2 = model2.getParam(DefaultParamInitializer.WEIGHT_KEY);
    assertEquals(modelWeights, modelWeights2);
}
 
Example 8
Source File: NeuralNetConfigurationTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSetSeedXavier() {
    Nd4j.getRandom().setSeed(123);

    Layer model = getLayer(trainingSet.numInputs(), trainingSet.numOutcomes(), new WeightInitUniform(), true);
    INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY);
    Nd4j.getRandom().setSeed(123);

    Layer model2 = getLayer(trainingSet.numInputs(), trainingSet.numOutcomes(), new WeightInitUniform(), true);
    INDArray modelWeights2 = model2.getParam(DefaultParamInitializer.WEIGHT_KEY);

    assertEquals(modelWeights, modelWeights2);
}
 
Example 9
Source File: NeuralNetConfigurationTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSetSeedDistribution() {
    Nd4j.getRandom().setSeed(123);

    Layer model = getLayer(trainingSet.numInputs(), trainingSet.numOutcomes(),
            new WeightInitDistribution(new NormalDistribution(1, 1)), true);
    INDArray modelWeights = model.getParam(DefaultParamInitializer.WEIGHT_KEY);
    Nd4j.getRandom().setSeed(123);

    Layer model2 = getLayer(trainingSet.numInputs(), trainingSet.numOutcomes(),
            new WeightInitDistribution(new NormalDistribution(1, 1)), true);
    INDArray modelWeights2 = model2.getParam(DefaultParamInitializer.WEIGHT_KEY);

    assertEquals(modelWeights, modelWeights2);
}
 
Example 10
Source File: TestWeightNoise.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray getParameter(Layer layer, String paramKey, int iteration, int epoch, boolean train, LayerWorkspaceMgr workspaceMgr) {
    allCalls.add(new WeightNoiseCall(layer.getIndex(), paramKey, iteration, epoch, train));
    return layer.getParam(paramKey);
}