org.deeplearning4j.arbiter.optimize.api.termination.TerminationCondition Java Examples

The following examples show how to use org.deeplearning4j.arbiter.optimize.api.termination.TerminationCondition. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ArbiterCliGenerator.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private List<TerminationCondition> getConditions() {
    List<TerminationCondition> ret = new ArrayList<>();
    if(duration > 0) {
        ret.add(new MaxTimeCondition(duration,TimeUnit.MINUTES));
    }

    if(numCandidates > 0)
        ret.add(new MaxCandidatesCondition(numCandidates));
    if(ret.isEmpty()) {
        ret.add(new MaxCandidatesCondition(1));
    }
    return ret;
}
 
Example #2
Source File: BaseOptimizationRunner.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private boolean terminate() {
    for (TerminationCondition c : config.getTerminationConditions()) {
        if (c.terminate(this)) {
            log.info("BaseOptimizationRunner global termination condition hit: {}", c);
            return true;
        }
    }
    return false;
}
 
Example #3
Source File: HyperParameterTuningArbiterUiExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        runner.addListeners(new ArbiterStatusListener(ss));
        UIServer.getInstance().attach(ss);
        //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #4
Source File: HyperParameterTuning.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        //runner.addListeners(new ArbiterStatusListener(ss));
        //UIServer.getInstance().attach(ss);
        runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #5
Source File: HyperParameterTuningArbiterUiExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        runner.addListeners(new ArbiterStatusListener(ss));
        UIServer.getInstance().attach(ss);
        //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #6
Source File: HyperParameterTuning.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        //runner.addListeners(new ArbiterStatusListener(ss));
        //UIServer.getInstance().attach(ss);
        runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #7
Source File: TestBasic.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
@Ignore
public void testBasicMnistDataSource() throws InterruptedException {
    ParameterSpace<Double> learningRateHyperparam = new ContinuousParameterSpace(0.0001, 0.1);
    ParameterSpace<Integer> layerSizeHyperparam = new IntegerParameterSpace(16, 256);

    MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder()
            .weightInit(WeightInit.XAVIER)
            .l2(0.0001)
            .updater(new SgdSpace(learningRateHyperparam))
            .addLayer(new DenseLayerSpace.Builder()
                    .nIn(784)
                    .activation(Activation.LEAKYRELU)
                    .nOut(layerSizeHyperparam)
                    .build())
            .addLayer(new OutputLayerSpace.Builder()
                    .nOut(10)
                    .activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT)
                    .build())
            .build();
    CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperparameterSpace, null);
    ScoreFunction scoreFunction = new EvaluationScoreFunction(Evaluation.Metric.ACCURACY);
    TerminationCondition[] terminationConditions = {
            new MaxTimeCondition(5, TimeUnit.MINUTES),
            new MaxCandidatesCondition(2)};

    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterUiTestBasicMnist\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    if (!f.exists())
        throw new RuntimeException();
    Class<? extends DataSource> ds = MnistDataSource.class;
    Properties dsp = new Properties();
    dsp.setProperty("minibatch", "8");
    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
            .candidateGenerator(candidateGenerator).dataSource(ds, dsp)
            .modelSaver(new FileModelSaver(modelSavePath))
            .scoreFunction(scoreFunction)
            .terminationConditions(terminationConditions)
            .build();

    IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator());

    StatsStorage ss = new InMemoryStatsStorage();
    StatusListener sl = new ArbiterStatusListener(ss);
    runner.addListeners(sl);

    UIServer.getInstance().attach(ss);

    runner.execute();
    Thread.sleep(90000);
}
 
Example #8
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testInputTypeBasic() throws Exception {

    ParameterSpace<Integer> layerSizeHyperparam = new IntegerParameterSpace(20, 60);

    MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder().l2(0.0001)
                    .weightInit(WeightInit.XAVIER).updater(new Nesterovs())
                    .addLayer(new ConvolutionLayerSpace.Builder().kernelSize(5, 5).nIn(1).stride(1, 1)
                                    .nOut(layerSizeHyperparam).activation(Activation.IDENTITY).build())
                    .addLayer(new SubsamplingLayerSpace.Builder().poolingType(SubsamplingLayer.PoolingType.MAX)
                                    .kernelSize(2, 2).stride(2, 2).build())
                    .addLayer(new ConvolutionLayerSpace.Builder().kernelSize(5, 5)
                                    //Note that nIn need not be specified in later layers
                                    .stride(1, 1).nOut(50).activation(Activation.IDENTITY).build())
                    .addLayer(new SubsamplingLayerSpace.Builder().poolingType(SubsamplingLayer.PoolingType.MAX)
                                    .kernelSize(2, 2).stride(2, 2).build())
                    .addLayer(new DenseLayerSpace.Builder().activation(Activation.RELU).nOut(500).build())
                    .addLayer(new OutputLayerSpace.Builder()
                                    .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10)
                                    .activation(Activation.SOFTMAX).build())
                    .setInputType(InputType.convolutionalFlat(28, 28, 1)).build();


    DataProvider dataProvider = new TestDataSetProvider();

    File f = testDir.newFolder();
    if (f.exists())
        f.delete();
    f.mkdir();
    ResultSaver modelSaver = new FileModelSaver(f.getAbsolutePath());

    ScoreFunction scoreFunction = new TestSetAccuracyScoreFunction();

    int maxCandidates = 4;
    TerminationCondition[] terminationConditions;
    terminationConditions = new TerminationCondition[] {new MaxCandidatesCondition(maxCandidates)};

    //Given these configuration options, let's put them all together:
    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
                    .candidateGenerator(new RandomSearchGenerator(hyperparameterSpace, null))
                    .dataProvider(dataProvider).modelSaver(modelSaver).scoreFunction(scoreFunction)
                    .terminationConditions(terminationConditions).build();

    IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator());
    runner.execute();

    assertEquals(maxCandidates, runner.getResults().size());
}
 
Example #9
Source File: OptimizationConfiguration.java    From deeplearning4j with Apache License 2.0 2 votes vote down vote up
/**
 * Termination conditions to use
 * @param conditions
 * @return
 */
public Builder terminationConditions(TerminationCondition... conditions) {
    terminationConditions = Arrays.asList(conditions);
    return this;
}