Java Code Examples for org.deeplearning4j.arbiter.optimize.api.ParameterSpace#numParameters()

The following examples show how to use org.deeplearning4j.arbiter.optimize.api.ParameterSpace#numParameters() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MultiLayerSpace.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
protected MultiLayerSpace(Builder builder) {
    super(builder);
    this.inputType = builder.inputType;
    this.inputPreProcessors = builder.inputPreProcessors;

    this.earlyStoppingConfiguration = builder.earlyStoppingConfiguration;

    this.layerSpaces = builder.layerSpaces;

    //Determine total number of parameters:
    //Collect the leaves, and make sure they are unique.
    //Note that the *object instances* must be unique - and consequently we don't want to use .equals(), as
    // this would incorrectly filter out equal range parameter spaces
    List<ParameterSpace> allLeaves = collectLeaves();
    List<ParameterSpace> list = LeafUtils.getUniqueObjects(allLeaves);

    for (ParameterSpace ps : list) {
        int n = ps.numParameters();
        numParameters += ps.numParameters();
    }

    this.trainingWorkspaceMode = builder.trainingWorkspaceMode;
    this.inferenceWorkspaceMode = builder.inferenceWorkspaceMode;
    this.validateOutputLayerConfig = builder.validateOutputLayerConfig;
}
 
Example 2
Source File: ComputationGraphSpace.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
protected ComputationGraphSpace(Builder builder) {
    super(builder);

    this.earlyStoppingConfiguration = builder.earlyStoppingConfiguration;
    this.layerSpaces = builder.layerList;
    this.vertices = builder.vertexList;

    this.networkInputs = builder.networkInputs;
    this.networkOutputs = builder.networkOutputs;
    this.inputTypes = builder.inputTypes;
    this.trainingWorkspaceMode = builder.trainingWorkspaceMode;
    this.inferenceWorkspaceMode = builder.inferenceWorkspaceMode;
    this.validateOutputLayerConfig = builder.validateOutputLayerConfig;

    //Determine total number of parameters:
    List<ParameterSpace> list = LeafUtils.getUniqueObjects(collectLeaves());
    for (ParameterSpace ps : list)
        numParameters += ps.numParameters();
}
 
Example 3
Source File: BaseCandidateGenerator.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
protected void initialize() {
    if(!initDone) {
        //First: collect leaf parameter spaces objects and remove duplicates
        List<ParameterSpace> noDuplicatesList = LeafUtils.getUniqueObjects(parameterSpace.collectLeaves());

        //Second: assign each a number
        int i = 0;
        for (ParameterSpace ps : noDuplicatesList) {
            int np = ps.numParameters();
            if (np == 1) {
                ps.setIndices(i++);
            } else {
                int[] values = new int[np];
                for (int j = 0; j < np; j++)
                    values[j] = i++;
                ps.setIndices(values);
            }
        }
        initDone = true;
    }
}
 
Example 4
Source File: BaseUpdaterSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public int numParameters() {
    int count = 0;
    for(ParameterSpace p : collectLeaves()){
        count += p.numParameters();
    }
    return count;
}
 
Example 5
Source File: BaseUpdaterSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void setIndices(int... indices){
    int soFar = 0;
    for(ParameterSpace p : collectLeaves()){
        int numParams = p.numParameters();
        if(numParams <= 0){
            continue;
        }
        int[] subset = Arrays.copyOfRange(indices, soFar, soFar + numParams);
        p.setIndices(subset);
    }
}
 
Example 6
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDropout(){

    MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
            .addLayer(new ConvolutionLayerSpace.Builder().nOut(2)
                    .dropOut(new ContinuousParameterSpace(0.4,0.6))
                    .build())
            .addLayer(new GlobalPoolingLayerSpace.Builder().dropOut(new ContinuousParameterSpace(0.4,0.6)).build())
            .addLayer(new OutputLayerSpace.Builder().activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
            .setInputType(InputType.convolutional(28, 28, 1))
            .build();

    int nParams = mls.numParameters();
    List<ParameterSpace> l = LeafUtils.getUniqueObjects(mls.collectLeaves());
    int x=0;
    for( ParameterSpace p : l){
        int n = p.numParameters();
        int[] arr = new int[n];
        for(int i=0; i<arr.length; i++ ){
            arr[i] = x++;
        }
        p.setIndices(arr);
    }


    MultiLayerConfiguration conf = mls.getValue(new double[nParams]).getMultiLayerConfiguration();
}
 
Example 7
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDropout2(){

    MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
            .addLayer(new ConvolutionLayerSpace.Builder().nOut(2)
                    .dropOut(new ContinuousParameterSpace(0.4,0.6))
                    .build())
            .addLayer(new DropoutLayerSpace.Builder().dropOut(new ContinuousParameterSpace(0.4,0.6)).build())
            .addLayer(new OutputLayerSpace.Builder().activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
            .setInputType(InputType.convolutional(28, 28, 1))
            .build();

    int nParams = mls.numParameters();
    List<ParameterSpace> l = LeafUtils.getUniqueObjects(mls.collectLeaves());
    int x=0;
    for( ParameterSpace p : l){
        int n = p.numParameters();
        int[] arr = new int[n];
        for(int i=0; i<arr.length; i++ ){
            arr[i] = x++;
        }
        p.setIndices(arr);
    }


    MultiLayerConfiguration conf = mls.getValue(new double[nParams]).getMultiLayerConfiguration();
}
 
Example 8
Source File: LeafUtils.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Count the number of unique parameters in the specified leaf nodes
 *
 * @param allLeaves Leaf values to count the parameters fore
 * @return Number of parameters for all unique objects
 */
public static int countUniqueParameters(List<ParameterSpace> allLeaves) {
    List<ParameterSpace> unique = getUniqueObjects(allLeaves);
    int count = 0;
    for (ParameterSpace ps : unique) {
        if (!ps.isLeaf()) {
            throw new IllegalStateException("Method should only be used with leaf nodes");
        }
        count += ps.numParameters();
    }
    return count;
}
 
Example 9
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testBasic0() {
    MultiLayerConfiguration expected =
            new NeuralNetConfiguration.Builder()
                    .l1Bias(0.4)
                    .l2Bias(0.5)
                    .constrainBias(new NonNegativeConstraint())
                    .updater(new Sgd(0.005)).seed(12345).list()
                    .layer(0, new DenseLayer.Builder().l1Bias(0.6).nIn(10).nOut(10).build())
                    .layer(1, new DenseLayer.Builder().l2Bias(0.7).constrainBias(new UnitNormConstraint()).nIn(10).nOut(10).build()).layer(2,
                    new OutputLayer.Builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX)
                            .nIn(10).nOut(5).build())
                    .build();

    MultiLayerSpace mls =
            new MultiLayerSpace.Builder()
                    .l1Bias(0.4)
                    .l2Bias(0.5)
                    .constrainBias(new NonNegativeConstraint())
                    .updater(new Sgd(0.005)).seed(12345)
                    .addLayer(new DenseLayerSpace.Builder().l1Bias(new ContinuousParameterSpace(0,1)).nIn(10).nOut(10).build())
                    .addLayer(new DenseLayerSpace.Builder().l2Bias(0.7).constrainBias(new UnitNormConstraint()).nIn(10).nOut(10).build())
                    .addLayer(new OutputLayerSpace.Builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX)
                            .nIn(10).nOut(5).build())
                    .build();

    int nParams = mls.numParameters();
    assertEquals(1, nParams);

    //Assign numbers to each leaf ParameterSpace object (normally done by candidate generator - manual here for testing)
    List<ParameterSpace> noDuplicatesList = LeafUtils.getUniqueObjects(mls.collectLeaves());

    //Second: assign each a number
    int c = 0;
    for (ParameterSpace ps : noDuplicatesList) {
        int np = ps.numParameters();
        if (np == 1) {
            ps.setIndices(c++);
        } else {
            int[] values = new int[np];
            for (int j = 0; j < np; j++)
                values[c++] = j;
            ps.setIndices(values);
        }
    }
    MultiLayerConfiguration conf = mls.getValue(new double[] {0.6}).getMultiLayerConfiguration();

    assertEquals(expected, conf);
}
 
Example 10
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testVariationalAutoencoderLayerSpaceBasic() {
    MultiLayerSpace mls =
                    new MultiLayerSpace.Builder()
                                    .updater(new Sgd(0.005)).seed(
                                                    12345)
                                    .addLayer(new VariationalAutoencoderLayerSpace.Builder()
                                                    .nIn(new IntegerParameterSpace(50, 75)).nOut(200)
                                                    .encoderLayerSizes(234, 567).decoderLayerSizes(123, 456)
                                                    .reconstructionDistribution(
                                                                    new DiscreteParameterSpace<ReconstructionDistribution>(
                                                                                    new GaussianReconstructionDistribution(),
                                                                                    new BernoulliReconstructionDistribution()))
                                                    .build())
                                    .build();

    int numParams = mls.numParameters();

    //Assign numbers to each leaf ParameterSpace object (normally done by candidate generator - manual here for testing)
    List<ParameterSpace> noDuplicatesList = LeafUtils.getUniqueObjects(mls.collectLeaves());

    //Second: assign each a number
    int c = 0;
    for (ParameterSpace ps : noDuplicatesList) {
        int np = ps.numParameters();
        if (np == 1) {
            ps.setIndices(c++);
        } else {
            int[] values = new int[np];
            for (int j = 0; j < np; j++)
                values[c++] = j;
            ps.setIndices(values);
        }
    }

    double[] zeros = new double[numParams];

    DL4JConfiguration configuration = mls.getValue(zeros);

    MultiLayerConfiguration conf = configuration.getMultiLayerConfiguration();
    assertEquals(1, conf.getConfs().size());

    NeuralNetConfiguration nnc = conf.getConf(0);
    VariationalAutoencoder vae = (VariationalAutoencoder) nnc.getLayer();

    assertEquals(50, vae.getNIn());
    assertEquals(200, vae.getNOut());

    assertArrayEquals(new int[] {234, 567}, vae.getEncoderLayerSizes());
    assertArrayEquals(new int[] {123, 456}, vae.getDecoderLayerSizes());

    assertTrue(vae.getOutputDistribution() instanceof GaussianReconstructionDistribution);



    double[] ones = new double[numParams];
    for (int i = 0; i < ones.length; i++)
        ones[i] = 1.0;

    configuration = mls.getValue(ones);

    conf = configuration.getMultiLayerConfiguration();
    assertEquals(1, conf.getConfs().size());

    nnc = conf.getConf(0);
    vae = (VariationalAutoencoder) nnc.getLayer();

    assertEquals(75, vae.getNIn());
    assertEquals(200, vae.getNOut());

    assertArrayEquals(new int[] {234, 567}, vae.getEncoderLayerSizes());
    assertArrayEquals(new int[] {123, 456}, vae.getDecoderLayerSizes());

    assertTrue(vae.getOutputDistribution() instanceof BernoulliReconstructionDistribution);
}