org.deeplearning4j.arbiter.conf.updater.AdamSpace Java Examples

The following examples show how to use org.deeplearning4j.arbiter.conf.updater.AdamSpace. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestJson.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testComputationGraphSpaceJson() {
    ParameterSpace<Integer> p = new IntegerParameterSpace(10, 100);
    ComputationGraphSpace cgs =
                    new ComputationGraphSpace.Builder()
                                    .updater(new AdamSpace(new DiscreteParameterSpace<>(0.1, 0.5, 1.0)))
                                    .seed(12345).addInputs("in")
                                    .addLayer("0", new DenseLayerSpace.Builder()
                                                    .nIn(new IntegerParameterSpace(1, 100)).nOut(p).build(), "in")
                                    .addLayer("1", new DenseLayerSpace.Builder().nIn(p).nOut(10).build(), "0")
                                    .addLayer("2", new OutputLayerSpace.Builder().iLossFunction(
                                                    LossFunctions.LossFunction.MCXENT.getILossFunction()).nIn(10)
                                                    .nOut(5).build(), "1")
                                    .setOutputs("2").build();

    String asJson = cgs.toJson();
    ComputationGraphSpace fromJson = ComputationGraphSpace.fromJson(asJson);

    assertEquals(cgs, fromJson);
}
 
Example #2
Source File: HyperParameterTuningArbiterUiExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        runner.addListeners(new ArbiterStatusListener(ss));
        UIServer.getInstance().attach(ss);
        //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #3
Source File: HyperParameterTuning.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        //runner.addListeners(new ArbiterStatusListener(ss));
        //UIServer.getInstance().attach(ss);
        runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #4
Source File: HyperParameterTuningArbiterUiExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        runner.addListeners(new ArbiterStatusListener(ss));
        UIServer.getInstance().attach(ss);
        //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #5
Source File: HyperParameterTuning.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        //runner.addListeners(new ArbiterStatusListener(ss));
        //UIServer.getInstance().attach(ss);
        runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #6
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMathOps() {

    ParameterSpace<Integer> firstLayerSize = new IntegerParameterSpace(10,30);
    ParameterSpace<Integer> secondLayerSize = new MathOp<>(firstLayerSize, Op.MUL, 3);
    ParameterSpace<Double> firstLayerLR = new ContinuousParameterSpace(0.01, 0.1);
    ParameterSpace<Double> secondLayerLR = new MathOp<>(firstLayerLR, Op.ADD, 0.2);

    MultiLayerSpace mls =
            new MultiLayerSpace.Builder().updater(new Sgd(0.005))
                    .seed(12345)
                    .layer(new DenseLayerSpace.Builder().nOut(firstLayerSize)
                            .updater(new AdamSpace(firstLayerLR))
                            .build())
                    .layer(new OutputLayerSpace.Builder().nOut(secondLayerSize)
                            .updater(new AdamSpace(secondLayerLR))
                            .activation(Activation.SOFTMAX)
                            .build())
                    .setInputType(InputType.feedForward(10))
                    .build();

    int nParams = mls.numParameters();
    assertEquals(2, nParams);

    new RandomSearchGenerator(mls, null);    //Initializes the indices

    Random r = new Random(12345);
    for( int i=0; i<10; i++ ){
        double[] d = new double[nParams];
        for( int j=0; j<d.length; j++ ){
            d[j] = r.nextDouble();
        }

        MultiLayerConfiguration conf = mls.getValue(d).getMultiLayerConfiguration();
        long l0Size = ((FeedForwardLayer)conf.getConf(0).getLayer()).getNOut();
        long l1Size = ((FeedForwardLayer)conf.getConf(1).getLayer()).getNOut();
        assertEquals(3*l0Size, l1Size);

        double l0Lr = ((FeedForwardLayer)conf.getConf(0).getLayer()).getIUpdater().getLearningRate(0,0);
        double l1Lr = ((FeedForwardLayer)conf.getConf(1).getLayer()).getIUpdater().getLearningRate(0,0);
        assertEquals(l0Lr+0.2, l1Lr, 1e-6);
    }
}
 
Example #7
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testIssue8082(){
    ParameterSpace<Double> learningRateHyperparam = new DiscreteParameterSpace<>(0.003, 0.005, 0.01, 0.05);
    ParameterSpace<Integer> layerSizeHyperparam1 = new DiscreteParameterSpace<>(32, 64, 96, 128);
    ParameterSpace<Integer> layerSizeHyperparam2 = new DiscreteParameterSpace<>(32, 64, 96, 128);
    ParameterSpace<Double> dropoutHyperparam = new DiscreteParameterSpace<>(0.8, 0.9);

    MultiLayerSpace mls = new MultiLayerSpace.Builder()
            .updater(new AdamSpace(learningRateHyperparam))
            .weightInit(WeightInit.XAVIER)
            .l2(0.0001)
            .addLayer(new DenseLayerSpace.Builder()
                    .nIn(10)
                    .nOut(layerSizeHyperparam1)
                    .build())
            .addLayer(new BatchNormalizationSpace.Builder()
                    .nOut(layerSizeHyperparam1)
                    .activation(Activation.RELU)
                    .build())
            .addLayer(new DropoutLayerSpace.Builder()
                    .dropOut(dropoutHyperparam)
                    .build())
            .addLayer(new DenseLayerSpace.Builder()
                    .nOut(layerSizeHyperparam2)
                    .build())
            .addLayer(new BatchNormalizationSpace.Builder()
                    .nOut(layerSizeHyperparam2)
                    .activation(Activation.RELU)
                    .build())
            .addLayer(new DropoutLayerSpace.Builder()
                    .dropOut(dropoutHyperparam)
                    .build())
            .addLayer(new OutputLayerSpace.Builder()
                    .nOut(10)
                    .activation(Activation.SOFTMAX)
                    .lossFunction(LossFunction.MCXENT)
                    .build())
            .build();

    assertEquals(4, mls.getNumParameters());

    for( int discreteCount : new int[]{1, 5}) {
        GridSearchCandidateGenerator generator = new GridSearchCandidateGenerator(mls, discreteCount, GridSearchCandidateGenerator.Mode.Sequential, null);

        int expCandidates = 4 * 4 * 4 * 2;
        assertEquals(expCandidates, generator.getTotalNumCandidates());

        int count = 0;
        while (generator.hasMoreCandidates()) {
            generator.getCandidate();
            count++;
        }


        assertEquals(expCandidates, count);
    }
}
 
Example #8
Source File: TestScoreFunctions.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testROCScoreFunctions() throws Exception {


    for (boolean auc : new boolean[]{true, false}) {
        for (ROCScoreFunction.ROCType rocType : ROCScoreFunction.ROCType.values()) {
            String msg = (auc ? "AUC" : "AUPRC") + " - " + rocType;
            log.info("Starting: " + msg);

            ParameterSpace<Double> lr = new ContinuousParameterSpace(1e-5, 1e-3);

            int nOut = (rocType == ROCScoreFunction.ROCType.ROC ? 2 : 10);
            LossFunctions.LossFunction lf = (rocType == ROCScoreFunction.ROCType.BINARY ?
                    LossFunctions.LossFunction.XENT : LossFunctions.LossFunction.MCXENT);
            Activation a = (rocType == ROCScoreFunction.ROCType.BINARY ? Activation.SIGMOID : Activation.SOFTMAX);
            MultiLayerSpace mls = new MultiLayerSpace.Builder()
                    .trainingWorkspaceMode(WorkspaceMode.NONE)
                    .inferenceWorkspaceMode(WorkspaceMode.NONE)
                    .updater(new AdamSpace(lr))
                    .weightInit(WeightInit.XAVIER)
                    .layer(new OutputLayerSpace.Builder().nIn(784).nOut(nOut)
                            .activation(a)
                            .lossFunction(lf).build())
                    .build();

            CandidateGenerator cg = new RandomSearchGenerator(mls);
            ResultSaver rs = new InMemoryResultSaver();
            ScoreFunction sf = new ROCScoreFunction(rocType, (auc ? ROCScoreFunction.Metric.AUC : ROCScoreFunction.Metric.AUPRC));


            OptimizationConfiguration oc = new OptimizationConfiguration.Builder()
                    .candidateGenerator(cg)
                    .dataProvider(new DP(rocType))
                    .modelSaver(rs)
                    .scoreFunction(sf)
                    .terminationConditions(new MaxCandidatesCondition(3))
                    .rngSeed(12345)
                    .build();

            IOptimizationRunner runner = new LocalOptimizationRunner(oc, new MultiLayerNetworkTaskCreator());
            runner.execute();

            List<ResultReference> list = runner.getResults();

            for (ResultReference rr : list) {
                DataSetIterator testIter = new MnistDataSetIterator(4, 16, false, false, false, 12345);
                testIter.setPreProcessor(new PreProc(rocType));

                OptimizationResult or = rr.getResult();
                MultiLayerNetwork net = (MultiLayerNetwork) or.getResultReference().getResultModel();

                double expScore;
                switch (rocType){
                    case ROC:
                        if(auc){
                            expScore = net.doEvaluation(testIter, new ROC())[0].calculateAUC();
                        } else {
                            expScore = net.doEvaluation(testIter, new ROC())[0].calculateAUCPR();
                        }
                        break;
                    case BINARY:
                        if(auc){
                            expScore = net.doEvaluation(testIter, new ROCBinary())[0].calculateAverageAuc();
                        } else {
                            expScore = net.doEvaluation(testIter, new ROCBinary())[0].calculateAverageAUCPR();
                        }
                        break;
                    case MULTICLASS:
                        if(auc){
                            expScore = net.doEvaluation(testIter, new ROCMultiClass())[0].calculateAverageAUC();
                        } else {
                            expScore = net.doEvaluation(testIter, new ROCMultiClass())[0].calculateAverageAUCPR();
                        }
                        break;
                    default:
                        throw new RuntimeException();
                }


                DataSetIterator iter = new MnistDataSetIterator(4, 16, false, false, false, 12345);
                iter.setPreProcessor(new PreProc(rocType));

                assertEquals(msg, expScore, or.getScore(), 1e-4);
            }
        }
    }
}
 
Example #9
Source File: TestGraphLocalExecution.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLocalExecutionEarlyStopping() throws Exception {
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
            .epochTerminationConditions(new MaxEpochsTerminationCondition(2))
            .scoreCalculator(new ScoreProvider())
            .modelSaver(new InMemoryModelSaver()).build();
    Map<String, Object> commands = new HashMap<>();
    commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName());

    //Define: network config (hyperparameter space)
    ComputationGraphSpace cgs = new ComputationGraphSpace.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(new AdamSpace(new ContinuousParameterSpace(0.0001, 0.1)))
            .l2(new ContinuousParameterSpace(0.0001, 0.01)).addInputs("in")
            .setInputTypes(InputType.feedForward(784))
            .addLayer("first",
                    new DenseLayerSpace.Builder().nIn(784).nOut(new IntegerParameterSpace(2, 10))
                            .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                    Activation.TANH))
                            .build(),
                    "in") //1-2 identical layers (except nIn)
            .addLayer("out", new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "first")
            .setOutputs("out").earlyStoppingConfiguration(esConf).build();

    //Define configuration:

    CandidateGenerator candidateGenerator = new RandomSearchGenerator(cgs, commands);
    DataProvider dataProvider = new DataSetIteratorFactoryProvider();


    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest2CG\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    f.deleteOnExit();
    if (!f.exists())
        throw new RuntimeException();

    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
            .candidateGenerator(candidateGenerator)
            .dataProvider(dataProvider)
            .scoreFunction(ScoreFunctions.testSetF1())
            .modelSaver(new FileModelSaver(modelSavePath))
            .terminationConditions(new MaxTimeCondition(15, TimeUnit.SECONDS),
                    new MaxCandidatesCondition(3))
            .build();


    IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new ComputationGraphTaskCreator());
    runner.execute();

    assertEquals(0, runner.numCandidatesFailed());
    assertTrue(runner.numCandidatesCompleted() > 0);
}