Java Code Examples for org.deeplearning4j.nn.multilayer.MultiLayerNetwork#getParam()

The following examples show how to use org.deeplearning4j.nn.multilayer.MultiLayerNetwork#getParam() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestSimpleRnn.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBiasInit(){
    Nd4j.getRandom().setSeed(12345);
    int nIn = 5;
    int layerSize = 6;

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .updater(new NoOp())
            .weightInit(WeightInit.XAVIER)
            .activation(Activation.TANH)
            .list()
            .layer(new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).dataFormat(rnnDataFormat)
                    .biasInit(100)
                    .build())
            .build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    INDArray bArr = net.getParam("0_b");
    assertEquals(Nd4j.valueArrayOf(new long[]{1,layerSize}, 100.0f), bArr);
}
 
Example 2
Source File: Word2VecTestsSmall.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test(timeout = 300000)
    public void testW2VEmbeddingLayerInit() throws Exception {
        Nd4j.setDefaultDataTypes(DataType.FLOAT, DataType.FLOAT);

        val inputFile = Resources.asFile("big/raw_sentences.txt");
        val iter = ParagraphVectorsTest.getIterator(isIntegrationTests(), inputFile);
//        val iter = new BasicLineIterator(inputFile);
        val t = new DefaultTokenizerFactory();
        t.setTokenPreProcessor(new CommonPreprocessor());

        Word2Vec vec = new Word2Vec.Builder()
                .minWordFrequency(1)
                .epochs(1)
                .layerSize(300)
                .limitVocabularySize(1) // Limit the vocab size to 2 words
                .windowSize(5)
                .allowParallelTokenization(true)
                .batchSize(512)
                .learningRate(0.025)
                .minLearningRate(0.0001)
                .negativeSample(0.0)
                .sampling(0.0)
                .useAdaGrad(false)
                .useHierarchicSoftmax(true)
                .iterations(1)
                .useUnknown(true) // Using UNK with limited vocab size causes the issue
                .seed(42)
                .iterate(iter)
                .workers(4)
                .tokenizerFactory(t).build();

        vec.fit();

        INDArray w = vec.lookupTable().getWeights();
        System.out.println(w);

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(12345).list()
                .layer(new EmbeddingLayer.Builder().weightInit(vec).build())
                .layer(new DenseLayer.Builder().activation(Activation.TANH).nIn(w.size(1)).nOut(3).build())
                .layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(3)
                        .nOut(4).build())
                .build();

        final MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        INDArray w0 = net.getParam("0_W");
        assertEquals(w, w0);

        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        ModelSerializer.writeModel(net, baos, true);
        byte[] bytes = baos.toByteArray();

        ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
        final MultiLayerNetwork restored = ModelSerializer.restoreMultiLayerNetwork(bais, true);

        assertEquals(net.getLayerWiseConfigurations(), restored.getLayerWiseConfigurations());
        assertTrue(net.params().equalsWithEps(restored.params(), 2e-3));
    }
 
Example 3
Source File: TestRecurrentWeightInit.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testRWInit() {

    for (boolean rwInit : new boolean[]{false, true}) {
        for (int i = 0; i < 3; i++) {

            NeuralNetConfiguration.ListBuilder b = new NeuralNetConfiguration.Builder()
                    .weightInit(new UniformDistribution(0, 1))
                    .list();

            if(rwInit) {
                switch (i) {
                    case 0:
                        b.layer(new LSTM.Builder().nIn(10).nOut(10)
                                .weightInitRecurrent(new UniformDistribution(2, 3))
                                .build());
                        break;
                    case 1:
                        b.layer(new GravesLSTM.Builder().nIn(10).nOut(10)
                                .weightInitRecurrent(new UniformDistribution(2, 3))
                                .build());
                        break;
                    case 2:
                        b.layer(new SimpleRnn.Builder().nIn(10).nOut(10)
                                .weightInitRecurrent(new UniformDistribution(2, 3)).build());
                        break;
                    default:
                        throw new RuntimeException();
                }
            } else {
                switch (i) {
                    case 0:
                        b.layer(new LSTM.Builder().nIn(10).nOut(10).build());
                        break;
                    case 1:
                        b.layer(new GravesLSTM.Builder().nIn(10).nOut(10).build());
                        break;
                    case 2:
                        b.layer(new SimpleRnn.Builder().nIn(10).nOut(10).build());
                        break;
                    default:
                        throw new RuntimeException();
                }
            }

            MultiLayerNetwork net = new MultiLayerNetwork(b.build());
            net.init();

            INDArray rw = net.getParam("0_RW");
            double min = rw.minNumber().doubleValue();
            double max = rw.maxNumber().doubleValue();
            if(rwInit){
                assertTrue(String.valueOf(min), min >= 2.0);
                assertTrue(String.valueOf(max), max <= 3.0);
            } else {
                assertTrue(String.valueOf(min), min >= 0.0);
                assertTrue(String.valueOf(max), max <= 1.0);
            }
        }
    }
}
 
Example 4
Source File: TestSimpleRnn.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSimpleRnn(){
    Nd4j.getRandom().setSeed(12345);

    int m = 3;
    int nIn = 5;
    int layerSize = 6;
    int tsLength = 7;
    INDArray in;
    if (rnnDataFormat == RNNFormat.NCW){
        in = Nd4j.rand(DataType.FLOAT, m, nIn, tsLength);
    }
    else{
        in = Nd4j.rand(DataType.FLOAT, m, tsLength, nIn);
    }


    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .updater(new NoOp())
            .weightInit(WeightInit.XAVIER)
            .activation(Activation.TANH)
            .list()
            .layer(new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).dataFormat(rnnDataFormat).build())
            .build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    INDArray out = net.output(in);

    INDArray w = net.getParam("0_W");
    INDArray rw = net.getParam("0_RW");
    INDArray b = net.getParam("0_b");

    INDArray outLast = null;
    for( int i=0; i<tsLength; i++ ){
        INDArray inCurrent;
        if (rnnDataFormat == RNNFormat.NCW){
            inCurrent = in.get(all(), all(), point(i));
        }
        else{
            inCurrent = in.get(all(), point(i), all());
        }

        INDArray outExpCurrent = inCurrent.mmul(w);
        if(outLast != null){
            outExpCurrent.addi(outLast.mmul(rw));
        }

        outExpCurrent.addiRowVector(b);

        Transforms.tanh(outExpCurrent, false);

        INDArray outActCurrent;
        if (rnnDataFormat == RNNFormat.NCW){
            outActCurrent = out.get(all(), all(), point(i));
        }
        else{
            outActCurrent = out.get(all(), point(i), all());
        }
        assertEquals(String.valueOf(i), outExpCurrent, outActCurrent);

        outLast = outExpCurrent;
    }


    TestUtils.testModelSerialization(net);
}
 
Example 5
Source File: BatchNormalizationTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void checkMeanVarianceEstimateCNNCompareModes() throws Exception {

    Nd4j.getRandom().setSeed(12345);
    //Check that the internal global mean/variance estimate is approximately correct

    //First, Mnist data as 2d input (NOT taking into account convolution property)
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(Updater.RMSPROP).seed(12345).list()
            .layer(0, new BatchNormalization.Builder().nIn(3).nOut(3).eps(1e-5).decay(0.95).useLogStd(false).build())
            .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).weightInit(WeightInit.XAVIER)
                    .activation(Activation.IDENTITY).nOut(10).build())
            .setInputType(InputType.convolutional(5, 5, 3)).build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    Nd4j.getRandom().setSeed(12345);
    MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(Updater.RMSPROP).seed(12345).list()
            .layer(0, new BatchNormalization.Builder().nIn(3).nOut(3).eps(1e-5).decay(0.95).useLogStd(true).build())
            .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).weightInit(WeightInit.XAVIER)
                    .activation(Activation.IDENTITY).nOut(10).build())
            .setInputType(InputType.convolutional(5, 5, 3)).build();
    MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
    net2.init();

    int minibatch = 32;
    for (int i = 0; i < 10; i++) {
        DataSet ds = new DataSet(Nd4j.rand(new int[]{minibatch, 3, 5, 5}), Nd4j.rand(minibatch, 10));
        net.fit(ds);
        net2.fit(ds);

        INDArray globalVar = net.getParam("0_" + BatchNormalizationParamInitializer.GLOBAL_VAR);

        INDArray log10std = net2.getParam("0_" + BatchNormalizationParamInitializer.GLOBAL_LOG_STD);
        INDArray globalVar2 = Nd4j.valueArrayOf(log10std.shape(), 10.0).castTo(log10std.dataType());
        Transforms.pow(globalVar2, log10std, false);    // stdev = 10^(log10(stdev))
        globalVar2.muli(globalVar2);

        assertEquals(globalVar, globalVar2);
    }
}
 
Example 6
Source File: TestConstraints.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLayerRecurrentConstraints() throws Exception {

    LayerConstraint[] constraints = new LayerConstraint[]{
            new MaxNormConstraint(0.5, 1),
            new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
            new NonNegativeConstraint(),
            new UnitNormConstraint(1)
    };

    for (LayerConstraint lc : constraints) {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .updater(new Sgd(0.0))
                .dist(new NormalDistribution(0, 5))
                .list()
                .layer(new LSTM.Builder().nIn(12).nOut(10)
                        .constrainRecurrent(lc).build())
                .layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        LayerConstraint exp = lc.clone();
        assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());

        INDArray input = Nd4j.rand(3, 12);
        INDArray labels = Nd4j.rand(3, 8);

        net.fit(input.reshape(3,12,1), labels);

        INDArray RW0 = net.getParam("0_RW");


        if (lc instanceof MaxNormConstraint) {
            assertTrue(RW0.norm2(1).maxNumber().doubleValue() <= 0.5);

        } else if (lc instanceof MinMaxNormConstraint) {
            assertTrue(RW0.norm2(1).minNumber().doubleValue() >= 0.3);
            assertTrue(RW0.norm2(1).maxNumber().doubleValue() <= 0.4);
        } else if (lc instanceof NonNegativeConstraint) {
            assertTrue(RW0.minNumber().doubleValue() >= 0.0);
        } else if (lc instanceof UnitNormConstraint) {
            assertEquals(1.0, RW0.norm2(1).minNumber().doubleValue(), 1e-6);
            assertEquals(1.0, RW0.norm2(1).maxNumber().doubleValue(), 1e-6);
        }

        TestUtils.testModelSerialization(net);
    }
}
 
Example 7
Source File: TestConstraints.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLayerBiasConstraints() throws Exception {

    LayerConstraint[] constraints = new LayerConstraint[]{
            new MaxNormConstraint(0.5, 1),
            new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
            new NonNegativeConstraint(),
            new UnitNormConstraint(1)
    };

    for (LayerConstraint lc : constraints) {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .updater(new Sgd(0.0))
                .dist(new NormalDistribution(0, 5))
                .biasInit(10.0)
                .list()
                .layer(new DenseLayer.Builder().nIn(12).nOut(10)
                        .constrainBias(lc).build())
                .layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        LayerConstraint exp = lc.clone();
        assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());

        INDArray input = Nd4j.rand(3, 12);
        INDArray labels = Nd4j.rand(3, 8);

        net.fit(input, labels);

        INDArray b0 = net.getParam("0_b");


        if (lc instanceof MaxNormConstraint) {
            assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.5);

        } else if (lc instanceof MinMaxNormConstraint) {
            assertTrue(b0.norm2(1).minNumber().doubleValue() >= 0.3);
            assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.4);
        } else if (lc instanceof NonNegativeConstraint) {
            assertTrue(b0.minNumber().doubleValue() >= 0.0);
        } else if (lc instanceof UnitNormConstraint) {
            assertEquals(1.0, b0.norm2(1).minNumber().doubleValue(), 1e-6);
            assertEquals(1.0, b0.norm2(1).maxNumber().doubleValue(), 1e-6);
        }

        TestUtils.testModelSerialization(net);
    }
}
 
Example 8
Source File: TestConstraints.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLayerWeightsConstraints() throws Exception {

    LayerConstraint[] constraints = new LayerConstraint[]{
            new MaxNormConstraint(0.5, 1),
            new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
            new NonNegativeConstraint(),
            new UnitNormConstraint(1)
    };

    for (LayerConstraint lc : constraints) {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .updater(new Sgd(0.0))
                .dist(new NormalDistribution(0, 5))
                .list()
                .layer(new DenseLayer.Builder().nIn(12).nOut(10)
                        .constrainWeights(lc).build())
                .layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        LayerConstraint exp = lc.clone();
        assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());

        INDArray input = Nd4j.rand(3, 12);
        INDArray labels = Nd4j.rand(3, 8);

        net.fit(input, labels);

        INDArray w0 = net.getParam("0_W");


        if (lc instanceof MaxNormConstraint) {
            assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5);

        } else if (lc instanceof MinMaxNormConstraint) {
            assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3);
            assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4);
        } else if (lc instanceof NonNegativeConstraint) {
            assertTrue(w0.minNumber().doubleValue() >= 0.0);
        } else if (lc instanceof UnitNormConstraint) {
            assertEquals(1.0, w0.norm2(1).minNumber().doubleValue(), 1e-6);
            assertEquals(1.0, w0.norm2(1).maxNumber().doubleValue(), 1e-6);
        }

        TestUtils.testModelSerialization(net);
    }
}
 
Example 9
Source File: TestConstraints.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLayerWeightsAndBiasConstraints() throws Exception {

    LayerConstraint[] constraints = new LayerConstraint[]{
            new MaxNormConstraint(0.5, 1),
            new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
            new NonNegativeConstraint(),
            new UnitNormConstraint(1)
    };

    for (LayerConstraint lc : constraints) {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .updater(new Sgd(0.0))
                .dist(new NormalDistribution(0, 5))
                .biasInit(0.2)
                .list()
                .layer(new DenseLayer.Builder().nIn(12).nOut(10)
                        .constrainAllParameters(lc).build())
                .layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        LayerConstraint exp = lc.clone();
        assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());

        INDArray input = Nd4j.rand(3, 12);
        INDArray labels = Nd4j.rand(3, 8);

        net.fit(input, labels);

        INDArray w0 = net.getParam("0_W");
        INDArray b0 = net.getParam("0_b");


        if (lc instanceof MaxNormConstraint) {
            assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5);
            assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.5);

        } else if (lc instanceof MinMaxNormConstraint) {
            assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3);
            assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4);
            assertTrue(b0.norm2(1).minNumber().doubleValue() >= 0.3);
            assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.4);
        } else if (lc instanceof NonNegativeConstraint) {
            assertTrue(w0.minNumber().doubleValue() >= 0.0);
            assertTrue(b0.minNumber().doubleValue() >= 0.0);
        } else if (lc instanceof UnitNormConstraint) {
            assertEquals(1.0, w0.norm2(1).minNumber().doubleValue(), 1e-6);
            assertEquals(1.0, w0.norm2(1).maxNumber().doubleValue(), 1e-6);
            assertEquals(1.0, b0.norm2(1).minNumber().doubleValue(), 1e-6);
            assertEquals(1.0, b0.norm2(1).maxNumber().doubleValue(), 1e-6);
        }

        TestUtils.testModelSerialization(net);
    }
}
 
Example 10
Source File: TestConstraints.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLayerWeightsAndBiasSeparateConstraints() throws Exception {

    LayerConstraint[] constraints = new LayerConstraint[]{
            new MaxNormConstraint(0.5, 1),
            new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
            new NonNegativeConstraint(),
            new UnitNormConstraint(1)
    };

    for (LayerConstraint lc : constraints) {

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .updater(new Sgd(0.0))
                .dist(new NormalDistribution(0, 5))
                .biasInit(0.2)
                .list()
                .layer(new DenseLayer.Builder().nIn(12).nOut(10)
                        .constrainWeights(lc).constrainBias(lc).build())
                .layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        LayerConstraint exp = lc.clone();
        assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());

        INDArray input = Nd4j.rand(3, 12);
        INDArray labels = Nd4j.rand(3, 8);

        net.fit(input, labels);

        INDArray w0 = net.getParam("0_W");
        INDArray b0 = net.getParam("0_b");


        if (lc instanceof MaxNormConstraint) {
            assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5);
            assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.5);

        } else if (lc instanceof MinMaxNormConstraint) {
            assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3);
            assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4);
            assertTrue(b0.norm2(1).minNumber().doubleValue() >= 0.3);
            assertTrue(b0.norm2(1).maxNumber().doubleValue() <= 0.4);
        } else if (lc instanceof NonNegativeConstraint) {
            assertTrue(w0.minNumber().doubleValue() >= 0.0);
            assertTrue(b0.minNumber().doubleValue() >= 0.0);
        } else if (lc instanceof UnitNormConstraint) {
            assertEquals(1.0, w0.norm2(1).minNumber().doubleValue(), 1e-6);
            assertEquals(1.0, w0.norm2(1).maxNumber().doubleValue(), 1e-6);
            assertEquals(1.0, b0.norm2(1).minNumber().doubleValue(), 1e-6);
            assertEquals(1.0, b0.norm2(1).maxNumber().doubleValue(), 1e-6);
        }

        TestUtils.testModelSerialization(net);
    }
}
 
Example 11
Source File: TestConstraints.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testModelConstraints() throws Exception {

    LayerConstraint[] constraints = new LayerConstraint[]{
            new MaxNormConstraint(0.5, 1),
            new MinMaxNormConstraint(0.3, 0.4, 1.0, 1),
            new NonNegativeConstraint(),
            new UnitNormConstraint(1)
    };

    for(LayerConstraint lc : constraints){

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .constrainWeights(lc)
                .updater(new Sgd(0.0))
                .dist(new NormalDistribution(0,5))
                .biasInit(1)
                .list()
                .layer(new DenseLayer.Builder().nIn(12).nOut(10).build())
                .layer(new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nIn(10).nOut(8).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        LayerConstraint exp = lc.clone();
        assertEquals(exp.toString(), net.getLayer(0).conf().getLayer().getConstraints().get(0).toString());
        assertEquals(exp.toString(), net.getLayer(1).conf().getLayer().getConstraints().get(0).toString());

        INDArray input = Nd4j.rand(3, 12);
        INDArray labels = Nd4j.rand(3, 8);

        net.fit(input, labels);

        INDArray w0 = net.getParam("0_W");
        INDArray w1 = net.getParam("1_W");

        if(lc instanceof MaxNormConstraint){
            assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.5 );
            assertTrue(w1.norm2(1).maxNumber().doubleValue() <= 0.5 );
        } else if(lc instanceof MinMaxNormConstraint){
            assertTrue(w0.norm2(1).minNumber().doubleValue() >= 0.3 );
            assertTrue(w0.norm2(1).maxNumber().doubleValue() <= 0.4 );
            assertTrue(w1.norm2(1).minNumber().doubleValue() >= 0.3 );
            assertTrue(w1.norm2(1).maxNumber().doubleValue() <= 0.4 );
        } else if(lc instanceof NonNegativeConstraint ){
            assertTrue(w0.minNumber().doubleValue() >= 0.0 );
        } else if(lc instanceof UnitNormConstraint ){
            assertEquals(1.0, w0.norm2(1).minNumber().doubleValue(),  1e-6 );
            assertEquals(1.0, w0.norm2(1).maxNumber().doubleValue(), 1e-6 );
            assertEquals(1.0, w1.norm2(1).minNumber().doubleValue(), 1e-6 );
            assertEquals(1.0, w1.norm2(1).maxNumber().doubleValue(), 1e-6 );
        }

        TestUtils.testModelSerialization(net);
    }
}