Java Code Examples for org.deeplearning4j.arbiter.optimize.api.CandidateGenerator#getCandidate()

The following examples show how to use org.deeplearning4j.arbiter.optimize.api.CandidateGenerator#getCandidate() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSameRanges() {

    ParameterSpace<Double> l1Hyperparam = new ContinuousParameterSpace(0.001, 0.1);
    ParameterSpace<Double> l2Hyperparam = new ContinuousParameterSpace(0.001, 0.1);

    MultiLayerSpace hyperparameterSpace =
                    new MultiLayerSpace.Builder().addLayer(new DenseLayerSpace.Builder().nIn(10).nOut(10).build())
                                    .l1(l1Hyperparam).l2(l2Hyperparam).build();

    CandidateGenerator c = new RandomSearchGenerator(hyperparameterSpace, null);

    Candidate candidate = c.getCandidate();
}
 
Example 2
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testGridCandidateGenerator(){
        ParameterSpace<Integer> layerSizeParam = new DiscreteParameterSpace<>(32, 48, 64);
        ParameterSpace<Double> learningRateParam = new DiscreteParameterSpace<>(0.005, 0.007, 0.01);

        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .seed(12345)
                .biasInit(1)
                .l2(1e-4)
                .updater(new NesterovsSpace(learningRateParam))
                .addLayer(new DenseLayerSpace.Builder().nIn(10).nOut(layerSizeParam)
                        .weightInit(WeightInit.XAVIER)
                        .activation(Activation.RELU)
                        .build())
                .addLayer(new DenseLayerSpace.Builder().nIn(layerSizeParam).nOut(layerSizeParam)
                        .weightInit(WeightInit.XAVIER)
                        .activation(Activation.RELU)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .lossFunction(LossFunctions.LossFunction.MSE)
                        .weightInit(WeightInit.XAVIER)
                        .activation(Activation.SOFTMAX)
                        .nIn(layerSizeParam).nOut(10).build())
                .build();

        CandidateGenerator candidateGenerator = new GridSearchCandidateGenerator(hyperParamaterSpace, 30, GridSearchCandidateGenerator.Mode.Sequential, null);
//        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace);

        Set<Pair<Double,Integer>> expCandidates = new HashSet<>();
        for(Double d : new double[]{0.005, 0.007, 0.01}){
            for(int i : new int[]{32, 48, 64}){
                expCandidates.add(new Pair<>(d, i));
            }
        }

        Set<Pair<Double,Integer>> actCandidates = new HashSet<>();
        while(candidateGenerator.hasMoreCandidates()) {
            Candidate<DL4JConfiguration> conf = candidateGenerator.getCandidate();
            MultiLayerConfiguration mlc = conf.getValue().getMultiLayerConfiguration();
            FeedForwardLayer ffl = ((FeedForwardLayer) mlc.getConf(0).getLayer());
//            System.out.println(ffl.getIUpdater() + ", " + ffl.getNOut());
            actCandidates.add(new Pair<>(ffl.getIUpdater().getLearningRate(0,0), (int)ffl.getNOut()));
        }

        assertEquals(expCandidates, actCandidates);
    }
 
Example 3
Source File: TestDL4JLocalExecution.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testOcnn() {
    Map<String, Object> commands = new HashMap<>();
    commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName());


    //Define: network config (hyperparameter space)
    MultiLayerSpace mls = new MultiLayerSpace.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.1)))
            .l2(new ContinuousParameterSpace(0.0001, 0.01))
            .addLayer(
                    new DenseLayerSpace.Builder().nOut(new IntegerParameterSpace(250, 500))
                            .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                    Activation.TANH))
                            .build(),
                    new IntegerParameterSpace(1, 2)) //1-2 identical layers (except nIn)
            .addLayer(new OCNNLayerSpace.Builder().nu(new ContinuousParameterSpace(0.0001, 0.1))
                    .numHidden(new DiscreteParameterSpace<Integer>(784 / 2,784 / 4))
                    .activation(Activation.HARDSIGMOID)
                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
            .setInputType(InputType.convolutionalFlat(28,28,1))
            .build();

    //Define configuration:

    CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls, commands);
    DataProvider dataProvider = new DataSetIteratorFactoryProvider();


    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest3\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    f.deleteOnExit();
    if (!f.exists())
        throw new RuntimeException();

    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
            .candidateGenerator(candidateGenerator).dataProvider(dataProvider)
            .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(new TestSetLossScoreFunction())
            .terminationConditions(new MaxTimeCondition(2, TimeUnit.MINUTES),
                    new MaxCandidatesCondition(100))
            .build();


    //candidate generation: uncomment execute if you want to run
    IOptimizationRunner runner = new LocalOptimizationRunner(configuration,
            new MultiLayerNetworkTaskCreator(new ClassificationEvaluator()));

    Candidate candidate = candidateGenerator.getCandidate();

    // runner.execute();
    System.out.println("----- COMPLETE -----");
}