Java Code Examples for org.deeplearning4j.nn.conf.NeuralNetConfiguration#setLayer()

The following examples show how to use org.deeplearning4j.nn.conf.NeuralNetConfiguration#setLayer() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: WeightLayerTestCase.java    From jstarcraft-ai with Apache License 2.0 6 votes vote down vote up
@Override
protected AbstractLayer<?> getOldFunction() {
    NeuralNetConfiguration neuralNetConfiguration = new NeuralNetConfiguration();
    DenseLayer layerConfiguration = new DenseLayer();
    layerConfiguration.setWeightInit(WeightInit.UNIFORM);
    layerConfiguration.setNIn(2);
    layerConfiguration.setNOut(1);
    layerConfiguration.setActivationFn(new ActivationSigmoid());
    layerConfiguration.setL1(0.01D);
    layerConfiguration.setL1Bias(0.01D);
    layerConfiguration.setL2(0.05D);
    layerConfiguration.setL2Bias(0.05D);
    neuralNetConfiguration.setLayer(layerConfiguration);
    AbstractLayer<?> layer = AbstractLayer.class.cast(layerConfiguration.instantiate(neuralNetConfiguration, null, 0, Nd4j.zeros(3), true));
    layer.setBackpropGradientsViewArray(Nd4j.zeros(3));
    return layer;
}
 
Example 2
Source File: EmbedLayerTestCase.java    From jstarcraft-ai with Apache License 2.0 6 votes vote down vote up
@Override
protected AbstractLayer<?> getOldFunction() {
    NeuralNetConfiguration neuralNetConfiguration = new NeuralNetConfiguration();
    EmbeddingLayer layerConfiguration = new EmbeddingLayer();
    layerConfiguration.setWeightInit(WeightInit.UNIFORM);
    layerConfiguration.setNIn(5);
    layerConfiguration.setNOut(2);
    layerConfiguration.setActivationFn(new ActivationSigmoid());
    layerConfiguration.setL1(0.01D);
    layerConfiguration.setL1Bias(0.01D);
    layerConfiguration.setL2(0.05D);
    layerConfiguration.setL2Bias(0.05D);
    neuralNetConfiguration.setLayer(layerConfiguration);
    AbstractLayer<?> layer = AbstractLayer.class.cast(layerConfiguration.instantiate(neuralNetConfiguration, null, 0, Nd4j.zeros(12), true));
    layer.setBackpropGradientsViewArray(Nd4j.zeros(12));
    return layer;
}
 
Example 3
Source File: Bidirectional.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public org.deeplearning4j.nn.api.Layer instantiate(NeuralNetConfiguration conf,
                                                   Collection<TrainingListener> trainingListeners, int layerIndex, INDArray layerParamsView,
                                                   boolean initializeParams, DataType networkDataType) {
    NeuralNetConfiguration c1 = conf.clone();
    NeuralNetConfiguration c2 = conf.clone();
    c1.setLayer(fwd);
    c2.setLayer(bwd);

    long n = layerParamsView.length() / 2;
    INDArray fp = layerParamsView.get(interval(0,0,true), interval(0, n));
    INDArray bp = layerParamsView.get(interval(0,0,true), interval(n, 2 * n));
    org.deeplearning4j.nn.api.Layer f = fwd.instantiate(c1, trainingListeners, layerIndex, fp, initializeParams, networkDataType);

    org.deeplearning4j.nn.api.Layer b = bwd.instantiate(c2, trainingListeners, layerIndex, bp, initializeParams, networkDataType);

    BidirectionalLayer ret = new BidirectionalLayer(conf, f, b, layerParamsView);
    Map<String, INDArray> paramTable = initializer().init(conf, layerParamsView, initializeParams);
    ret.setParamTable(paramTable);
    ret.setConf(conf);

    return ret;
}
 
Example 4
Source File: FrozenLayerWithBackpropParamInitializer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, INDArray> init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) {
    FrozenLayerWithBackprop fl = (FrozenLayerWithBackprop) conf.getLayer();
    Layer innerLayer = fl.getUnderlying();
    ParamInitializer initializer = innerLayer.initializer();
    conf.setLayer(innerLayer);
    Map<String, INDArray> m = initializer.init(conf, paramsView, initializeParams);
    conf.setLayer(fl);

    return m;
}
 
Example 5
Source File: FrozenLayerWithBackpropParamInitializer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) {
    FrozenLayerWithBackprop fl = (FrozenLayerWithBackprop) conf.getLayer();
    Layer innerLayer = fl.getUnderlying();
    ParamInitializer initializer = innerLayer.initializer();
    conf.setLayer(innerLayer);
    Map<String, INDArray> m = initializer.getGradientsFromFlattened(conf, gradientView);
    conf.setLayer(fl);
    return m;
}
 
Example 6
Source File: WrapperLayerParamInitializer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, INDArray> init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) {
    Layer orig = conf.getLayer();
    Layer l = underlying(conf.getLayer());
    conf.setLayer(l);
    Map<String,INDArray> m = l.initializer().init(conf, paramsView, initializeParams);
    conf.setLayer(orig);
    return m;
}
 
Example 7
Source File: WrapperLayerParamInitializer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) {
    Layer orig = conf.getLayer();
    Layer l = underlying(conf.getLayer());
    conf.setLayer(l);
    Map<String,INDArray> m = l.initializer().getGradientsFromFlattened(conf, gradientView);
    conf.setLayer(orig);
    return m;
}
 
Example 8
Source File: FrozenLayerParamInitializer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, INDArray> init(NeuralNetConfiguration conf, INDArray paramsView, boolean initializeParams) {
    FrozenLayer fl = (FrozenLayer) conf.getLayer();
    Layer innerLayer = fl.getLayer();
    ParamInitializer initializer = innerLayer.initializer();
    conf.setLayer(innerLayer);
    Map<String, INDArray> m = initializer.init(conf, paramsView, initializeParams);
    conf.setLayer(fl);

    return m;
}
 
Example 9
Source File: FrozenLayerParamInitializer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, INDArray> getGradientsFromFlattened(NeuralNetConfiguration conf, INDArray gradientView) {
    FrozenLayer fl = (FrozenLayer) conf.getLayer();
    Layer innerLayer = fl.getLayer();
    ParamInitializer initializer = innerLayer.initializer();
    conf.setLayer(innerLayer);
    Map<String, INDArray> m = initializer.getGradientsFromFlattened(conf, gradientView);
    conf.setLayer(fl);
    return m;
}
 
Example 10
Source File: TimeDistributed.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public org.deeplearning4j.nn.api.Layer instantiate(NeuralNetConfiguration conf, Collection<TrainingListener> trainingListeners,
                                                   int layerIndex, INDArray layerParamsView, boolean initializeParams, DataType networkDataType) {
    NeuralNetConfiguration conf2 = conf.clone();
    conf2.setLayer(((TimeDistributed) conf2.getLayer()).getUnderlying());
    return new TimeDistributedLayer(underlying.instantiate(conf2, trainingListeners, layerIndex, layerParamsView,
            initializeParams, networkDataType), rnnDataFormat);
}
 
Example 11
Source File: LastTimeStep.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public org.deeplearning4j.nn.api.Layer instantiate(NeuralNetConfiguration conf,
                                                   Collection<TrainingListener> trainingListeners, int layerIndex, INDArray layerParamsView,
                                                   boolean initializeParams, DataType networkDataType) {
    NeuralNetConfiguration conf2 = conf.clone();
    conf2.setLayer(((LastTimeStep) conf2.getLayer()).getUnderlying());
    return new LastTimeStepLayer(underlying.instantiate(conf2, trainingListeners, layerIndex, layerParamsView,
                    initializeParams, networkDataType));
}
 
Example 12
Source File: MaskZeroLayer.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public org.deeplearning4j.nn.api.Layer instantiate(NeuralNetConfiguration conf,
                                                   Collection<TrainingListener> trainingListeners, int layerIndex, INDArray layerParamsView,
                                                   boolean initializeParams, DataType networkDataType) {

    NeuralNetConfiguration conf2 = conf.clone();
    conf2.setLayer(((BaseWrapperLayer) conf2.getLayer()).getUnderlying());

    org.deeplearning4j.nn.api.Layer underlyingLayer =
                    underlying.instantiate(conf2, trainingListeners, layerIndex, layerParamsView, initializeParams, networkDataType);
    return new org.deeplearning4j.nn.layers.recurrent.MaskZeroLayer(underlyingLayer, maskingValue);
}
 
Example 13
Source File: FrozenLayerWithBackprop.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public NeuralNetConfiguration getInnerConf(NeuralNetConfiguration conf) {
    NeuralNetConfiguration nnc = conf.clone();
    nnc.setLayer(underlying);
    return nnc;
}
 
Example 14
Source File: FrozenLayer.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public NeuralNetConfiguration getInnerConf(NeuralNetConfiguration conf) {
    NeuralNetConfiguration nnc = conf.clone();
    nnc.setLayer(layer);
    return nnc;
}