Java Code Examples for org.deeplearning4j.arbiter.MultiLayerSpace#numParameters()

The following examples show how to use org.deeplearning4j.arbiter.MultiLayerSpace#numParameters() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testILossFunctionGetsSet() {
    ILossFunction lossFunction = new LossMCXENT(Nd4j.create(new float[] {1f, 2f}, new long[]{1,2}));

    MultiLayerConfiguration expected =
                    new NeuralNetConfiguration.Builder().updater(new Sgd(0.005)).seed(12345).list()
                                    .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                    .layer(1, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(2,
                                                    new OutputLayer.Builder().lossFunction(lossFunction)
                                                            .activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
                                    .build();

    MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
                    .addLayer(new DenseLayerSpace.Builder().nIn(10).nOut(10).build(), new FixedValue<>(2)) //2 identical layers
                    .addLayer(new OutputLayerSpace.Builder().iLossFunction(lossFunction).activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
                    .build();

    int nParams = mls.numParameters();
    assertEquals(0, nParams);

    MultiLayerConfiguration conf = mls.getValue(new double[0]).getMultiLayerConfiguration();

    assertEquals(expected, conf);
}
 
Example 2
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testGlobalPoolingBasic() {

    MultiLayerConfiguration expected = new NeuralNetConfiguration.Builder().updater(new Sgd(0.005)).seed(12345).list()
                    .layer(0, new GravesLSTM.Builder().nIn(10).nOut(10).build())
                    .layer(1, new GlobalPoolingLayer.Builder().poolingType(PoolingType.SUM).pnorm(7).build())
                    .layer(2, new OutputLayer.Builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
                    .build();

    MultiLayerSpace mls =
                    new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
                                    .addLayer(new GravesLSTMLayerSpace.Builder().nIn(10).nOut(10).build())
                                    .addLayer(new GlobalPoolingLayerSpace.Builder().poolingType(PoolingType.SUM)
                                                    .pNorm(7).build())
                                    .addLayer(new OutputLayerSpace.Builder().lossFunction(LossFunction.MCXENT)
                                            .activation(Activation.SOFTMAX)
                                                    .nIn(10).nOut(5).build())
                                    .build();

    int nParams = mls.numParameters();
    assertEquals(0, nParams);

    MultiLayerConfiguration conf = mls.getValue(new double[0]).getMultiLayerConfiguration();

    assertEquals(expected, conf);
}
 
Example 3
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBasic() {

    MultiLayerConfiguration expected =
                    new NeuralNetConfiguration.Builder()
                                    .updater(new Sgd(0.005)).seed(12345).list()
                                    .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                    .layer(1, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(2,
                                                    new OutputLayer.Builder().lossFunction(LossFunction.MCXENT)
                                                            .activation(Activation.SOFTMAX).nIn(10).nOut(5).build())

                                    .build();

    MultiLayerSpace mls =
                    new MultiLayerSpace.Builder()
                                    .updater(new Sgd(0.005)).seed(12345)
                                    .addLayer(new DenseLayerSpace.Builder().nIn(10).nOut(10).build(),
                                                    new FixedValue<>(2)) //2 identical layers
                                    .addLayer(new OutputLayerSpace.Builder().lossFunction(LossFunction.MCXENT)
                                            .activation(Activation.SOFTMAX)
                                                    .nIn(10).nOut(5).build()).build();

    int nParams = mls.numParameters();
    assertEquals(0, nParams);

    MultiLayerConfiguration conf = mls.getValue(new double[0]).getMultiLayerConfiguration();

    assertEquals(expected, conf);
}
 
Example 4
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testWeightedLossFunction() {

    MultiLayerConfiguration expected =
                    new NeuralNetConfiguration.Builder().updater(new Sgd(0.005)).seed(12345).list()
                                    .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                    .layer(1, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(2,
                                                    new OutputLayer.Builder()
                                                                    .lossFunction(new LossMSE(Nd4j.create(
                                                                                    new double[] {1, 2, 3, 4, 5}, new long[]{1,5})))
                                                                    .nIn(10).nOut(5).build())
                                    .build();

    MultiLayerSpace mls =
                    new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
                                    .addLayer(new DenseLayerSpace.Builder().nIn(10).nOut(10).build(),
                                                    new FixedValue<>(2)) //2 identical layers
                                    .addLayer(new OutputLayerSpace.Builder()
                                                    .iLossFunction(new LossMSE(Nd4j.create(new double[] {1, 2, 3, 4, 5}, new long[]{1,5})))
                                                    .nIn(10).nOut(5).build())
                                    .build();

    int nParams = mls.numParameters();
    assertEquals(0, nParams);

    MultiLayerConfiguration conf = mls.getValue(new double[0]).getMultiLayerConfiguration();

    assertEquals(expected, conf);

    String json = mls.toJson();
    MultiLayerSpace fromJson = MultiLayerSpace.fromJson(json);

    assertEquals(mls, fromJson);
}
 
Example 5
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDropout(){

    MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
            .addLayer(new ConvolutionLayerSpace.Builder().nOut(2)
                    .dropOut(new ContinuousParameterSpace(0.4,0.6))
                    .build())
            .addLayer(new GlobalPoolingLayerSpace.Builder().dropOut(new ContinuousParameterSpace(0.4,0.6)).build())
            .addLayer(new OutputLayerSpace.Builder().activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
            .setInputType(InputType.convolutional(28, 28, 1))
            .build();

    int nParams = mls.numParameters();
    List<ParameterSpace> l = LeafUtils.getUniqueObjects(mls.collectLeaves());
    int x=0;
    for( ParameterSpace p : l){
        int n = p.numParameters();
        int[] arr = new int[n];
        for(int i=0; i<arr.length; i++ ){
            arr[i] = x++;
        }
        p.setIndices(arr);
    }


    MultiLayerConfiguration conf = mls.getValue(new double[nParams]).getMultiLayerConfiguration();
}
 
Example 6
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDropout2(){

    MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
            .addLayer(new ConvolutionLayerSpace.Builder().nOut(2)
                    .dropOut(new ContinuousParameterSpace(0.4,0.6))
                    .build())
            .addLayer(new DropoutLayerSpace.Builder().dropOut(new ContinuousParameterSpace(0.4,0.6)).build())
            .addLayer(new OutputLayerSpace.Builder().activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
            .setInputType(InputType.convolutional(28, 28, 1))
            .build();

    int nParams = mls.numParameters();
    List<ParameterSpace> l = LeafUtils.getUniqueObjects(mls.collectLeaves());
    int x=0;
    for( ParameterSpace p : l){
        int n = p.numParameters();
        int[] arr = new int[n];
        for(int i=0; i<arr.length; i++ ){
            arr[i] = x++;
        }
        p.setIndices(arr);
    }


    MultiLayerConfiguration conf = mls.getValue(new double[nParams]).getMultiLayerConfiguration();
}
 
Example 7
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testBasic0() {
    MultiLayerConfiguration expected =
            new NeuralNetConfiguration.Builder()
                    .l1Bias(0.4)
                    .l2Bias(0.5)
                    .constrainBias(new NonNegativeConstraint())
                    .updater(new Sgd(0.005)).seed(12345).list()
                    .layer(0, new DenseLayer.Builder().l1Bias(0.6).nIn(10).nOut(10).build())
                    .layer(1, new DenseLayer.Builder().l2Bias(0.7).constrainBias(new UnitNormConstraint()).nIn(10).nOut(10).build()).layer(2,
                    new OutputLayer.Builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX)
                            .nIn(10).nOut(5).build())
                    .build();

    MultiLayerSpace mls =
            new MultiLayerSpace.Builder()
                    .l1Bias(0.4)
                    .l2Bias(0.5)
                    .constrainBias(new NonNegativeConstraint())
                    .updater(new Sgd(0.005)).seed(12345)
                    .addLayer(new DenseLayerSpace.Builder().l1Bias(new ContinuousParameterSpace(0,1)).nIn(10).nOut(10).build())
                    .addLayer(new DenseLayerSpace.Builder().l2Bias(0.7).constrainBias(new UnitNormConstraint()).nIn(10).nOut(10).build())
                    .addLayer(new OutputLayerSpace.Builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX)
                            .nIn(10).nOut(5).build())
                    .build();

    int nParams = mls.numParameters();
    assertEquals(1, nParams);

    //Assign numbers to each leaf ParameterSpace object (normally done by candidate generator - manual here for testing)
    List<ParameterSpace> noDuplicatesList = LeafUtils.getUniqueObjects(mls.collectLeaves());

    //Second: assign each a number
    int c = 0;
    for (ParameterSpace ps : noDuplicatesList) {
        int np = ps.numParameters();
        if (np == 1) {
            ps.setIndices(c++);
        } else {
            int[] values = new int[np];
            for (int j = 0; j < np; j++)
                values[c++] = j;
            ps.setIndices(values);
        }
    }
    MultiLayerConfiguration conf = mls.getValue(new double[] {0.6}).getMultiLayerConfiguration();

    assertEquals(expected, conf);
}
 
Example 8
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testVariationalAutoencoderLayerSpaceBasic() {
    MultiLayerSpace mls =
                    new MultiLayerSpace.Builder()
                                    .updater(new Sgd(0.005)).seed(
                                                    12345)
                                    .addLayer(new VariationalAutoencoderLayerSpace.Builder()
                                                    .nIn(new IntegerParameterSpace(50, 75)).nOut(200)
                                                    .encoderLayerSizes(234, 567).decoderLayerSizes(123, 456)
                                                    .reconstructionDistribution(
                                                                    new DiscreteParameterSpace<ReconstructionDistribution>(
                                                                                    new GaussianReconstructionDistribution(),
                                                                                    new BernoulliReconstructionDistribution()))
                                                    .build())
                                    .build();

    int numParams = mls.numParameters();

    //Assign numbers to each leaf ParameterSpace object (normally done by candidate generator - manual here for testing)
    List<ParameterSpace> noDuplicatesList = LeafUtils.getUniqueObjects(mls.collectLeaves());

    //Second: assign each a number
    int c = 0;
    for (ParameterSpace ps : noDuplicatesList) {
        int np = ps.numParameters();
        if (np == 1) {
            ps.setIndices(c++);
        } else {
            int[] values = new int[np];
            for (int j = 0; j < np; j++)
                values[c++] = j;
            ps.setIndices(values);
        }
    }

    double[] zeros = new double[numParams];

    DL4JConfiguration configuration = mls.getValue(zeros);

    MultiLayerConfiguration conf = configuration.getMultiLayerConfiguration();
    assertEquals(1, conf.getConfs().size());

    NeuralNetConfiguration nnc = conf.getConf(0);
    VariationalAutoencoder vae = (VariationalAutoencoder) nnc.getLayer();

    assertEquals(50, vae.getNIn());
    assertEquals(200, vae.getNOut());

    assertArrayEquals(new int[] {234, 567}, vae.getEncoderLayerSizes());
    assertArrayEquals(new int[] {123, 456}, vae.getDecoderLayerSizes());

    assertTrue(vae.getOutputDistribution() instanceof GaussianReconstructionDistribution);



    double[] ones = new double[numParams];
    for (int i = 0; i < ones.length; i++)
        ones[i] = 1.0;

    configuration = mls.getValue(ones);

    conf = configuration.getMultiLayerConfiguration();
    assertEquals(1, conf.getConfs().size());

    nnc = conf.getConf(0);
    vae = (VariationalAutoencoder) nnc.getLayer();

    assertEquals(75, vae.getNIn());
    assertEquals(200, vae.getNOut());

    assertArrayEquals(new int[] {234, 567}, vae.getEncoderLayerSizes());
    assertArrayEquals(new int[] {123, 456}, vae.getDecoderLayerSizes());

    assertTrue(vae.getOutputDistribution() instanceof BernoulliReconstructionDistribution);
}
 
Example 9
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMathOps() {

    ParameterSpace<Integer> firstLayerSize = new IntegerParameterSpace(10,30);
    ParameterSpace<Integer> secondLayerSize = new MathOp<>(firstLayerSize, Op.MUL, 3);
    ParameterSpace<Double> firstLayerLR = new ContinuousParameterSpace(0.01, 0.1);
    ParameterSpace<Double> secondLayerLR = new MathOp<>(firstLayerLR, Op.ADD, 0.2);

    MultiLayerSpace mls =
            new MultiLayerSpace.Builder().updater(new Sgd(0.005))
                    .seed(12345)
                    .layer(new DenseLayerSpace.Builder().nOut(firstLayerSize)
                            .updater(new AdamSpace(firstLayerLR))
                            .build())
                    .layer(new OutputLayerSpace.Builder().nOut(secondLayerSize)
                            .updater(new AdamSpace(secondLayerLR))
                            .activation(Activation.SOFTMAX)
                            .build())
                    .setInputType(InputType.feedForward(10))
                    .build();

    int nParams = mls.numParameters();
    assertEquals(2, nParams);

    new RandomSearchGenerator(mls, null);    //Initializes the indices

    Random r = new Random(12345);
    for( int i=0; i<10; i++ ){
        double[] d = new double[nParams];
        for( int j=0; j<d.length; j++ ){
            d[j] = r.nextDouble();
        }

        MultiLayerConfiguration conf = mls.getValue(d).getMultiLayerConfiguration();
        long l0Size = ((FeedForwardLayer)conf.getConf(0).getLayer()).getNOut();
        long l1Size = ((FeedForwardLayer)conf.getConf(1).getLayer()).getNOut();
        assertEquals(3*l0Size, l1Size);

        double l0Lr = ((FeedForwardLayer)conf.getConf(0).getLayer()).getIUpdater().getLearningRate(0,0);
        double l1Lr = ((FeedForwardLayer)conf.getConf(1).getLayer()).getIUpdater().getLearningRate(0,0);
        assertEquals(l0Lr+0.2, l1Lr, 1e-6);
    }
}
 
Example 10
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testDropoutSpace(){

    ParameterSpace<Double> dropout = new DiscreteParameterSpace<>(0.0, 0.5);

    MultiLayerSpace mls =
            new MultiLayerSpace.Builder().updater(new Sgd(0.005))
                    .dropOut(dropout)
                    .seed(12345)
                    .layer(new DenseLayerSpace.Builder().nOut(10)
                            .build())
                    .layer(new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                            .build())
                    .setInputType(InputType.feedForward(10))
                    .build();

    int nParams = mls.numParameters();
    assertEquals(1, nParams);

    new RandomSearchGenerator(mls, null);    //Initializes the indices

    Random r = new Random(12345);
    int countNull = 0;
    int count05 = 0;
    for( int i=0; i<10; i++ ){
        double[] d = new double[nParams];
        for( int j=0; j<d.length; j++ ){
            d[j] = r.nextDouble();
        }

        MultiLayerConfiguration conf = mls.getValue(d).getMultiLayerConfiguration();
        IDropout d0 = conf.getConf(0).getLayer().getIDropout();
        IDropout d1 = conf.getConf(1).getLayer().getIDropout();

        if(d0 == null){
            assertNull(d1);
            countNull++;
        } else {
            Dropout do0 = (Dropout)d0;
            Dropout do1 = (Dropout)d1;

            assertEquals(0.5, do0.getP(), 0.0);
            assertEquals(0.5, do1.getP(), 0.0);
            count05++;
        }
    }
    assertTrue(countNull > 0);
    assertTrue(count05 > 0);
}