org.deeplearning4j.arbiter.optimize.parameter.integer.IntegerParameterSpace Java Examples

The following examples show how to use org.deeplearning4j.arbiter.optimize.parameter.integer.IntegerParameterSpace. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestBasic.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private static MultiLayerSpace getMultiLayerSpaceMnist() {
    return new MultiLayerSpace.Builder()
            .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.2)))
            .l2(new ContinuousParameterSpace(0.0001, 0.05))
            .addLayer(
                    new ConvolutionLayerSpace.Builder().nIn(1)
                            .nOut(new IntegerParameterSpace(5, 30))
                            .kernelSize(new DiscreteParameterSpace<>(new int[]{3, 3},
                                    new int[]{4, 4}, new int[]{5, 5}))
                            .stride(new DiscreteParameterSpace<>(new int[]{1, 1},
                                    new int[]{2, 2}))
                            .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                    Activation.SOFTPLUS, Activation.LEAKYRELU))
                            .build())
            .addLayer(new DenseLayerSpace.Builder().nOut(new IntegerParameterSpace(32, 128))
                    .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH))
                    .build(), new IntegerParameterSpace(0, 1), true) //0 to 1 layers
            .addLayer(new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
            .setInputType(InputType.convolutionalFlat(28, 28, 1))
            .build();
}
 
Example #2
Source File: TestJson.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testComputationGraphSpaceJson() {
    ParameterSpace<Integer> p = new IntegerParameterSpace(10, 100);
    ComputationGraphSpace cgs =
                    new ComputationGraphSpace.Builder()
                                    .updater(new AdamSpace(new DiscreteParameterSpace<>(0.1, 0.5, 1.0)))
                                    .seed(12345).addInputs("in")
                                    .addLayer("0", new DenseLayerSpace.Builder()
                                                    .nIn(new IntegerParameterSpace(1, 100)).nOut(p).build(), "in")
                                    .addLayer("1", new DenseLayerSpace.Builder().nIn(p).nOut(10).build(), "0")
                                    .addLayer("2", new OutputLayerSpace.Builder().iLossFunction(
                                                    LossFunctions.LossFunction.MCXENT.getILossFunction()).nIn(10)
                                                    .nOut(5).build(), "1")
                                    .setOutputs("2").build();

    String asJson = cgs.toJson();
    ComputationGraphSpace fromJson = ComputationGraphSpace.fromJson(asJson);

    assertEquals(cgs, fromJson);
}
 
Example #3
Source File: TestJson.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMultiLayerSpaceJson() {
    MultiLayerSpace mls = new MultiLayerSpace.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.2)))
                    .l2(new ContinuousParameterSpace(0.0001, 0.05))
                    .addLayer(new DenseLayerSpace.Builder().nIn(1).nOut(new IntegerParameterSpace(5, 30))
                                    .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.SOFTPLUS,
                                                    Activation.LEAKYRELU))
                                    .build(), new IntegerParameterSpace(1, 2), true) //1-2 identical layers
                    .addLayer(new DenseLayerSpace.Builder().nIn(4).nOut(new IntegerParameterSpace(2, 10))
                                    .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH))
                                    .build(), new IntegerParameterSpace(0, 1), true) //0 to 1 layers
                    .addLayer(new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                                    .iLossFunction(LossFunctions.LossFunction.MCXENT.getILossFunction()).build())
                    .setInputType(InputType.convolutional(28, 28, 1)).build();

    String asJson = mls.toJson();
    //        System.out.println(asJson);

    MultiLayerSpace fromJson = MultiLayerSpace.fromJson(asJson);

    assertEquals(mls, fromJson);
}
 
Example #4
Source File: TestJson.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testParameterSpaceJson() throws Exception {

    List<ParameterSpace<?>> l = new ArrayList<>();
    l.add(new FixedValue<>(1.0));
    l.add(new FixedValue<>(1));
    l.add(new FixedValue<>("string"));
    l.add(new ContinuousParameterSpace(-1, 1));
    l.add(new ContinuousParameterSpace(new LogNormalDistribution(1, 1)));
    l.add(new ContinuousParameterSpace(new NormalDistribution(2, 0.01)));
    l.add(new DiscreteParameterSpace<>(1, 5, 7));
    l.add(new DiscreteParameterSpace<>("first", "second", "third"));
    l.add(new IntegerParameterSpace(0, 10));
    l.add(new IntegerParameterSpace(new UniformIntegerDistribution(0, 50)));
    l.add(new BooleanSpace());

    for (ParameterSpace<?> ps : l) {
        String strJson = jsonMapper.writeValueAsString(ps);
        String strYaml = yamlMapper.writeValueAsString(ps);

        ParameterSpace<?> fromJson = jsonMapper.readValue(strJson, ParameterSpace.class);
        ParameterSpace<?> fromYaml = yamlMapper.readValue(strYaml, ParameterSpace.class);

        assertEquals(ps, fromJson);
        assertEquals(ps, fromYaml);
    }
}
 
Example #5
Source File: TestParameterSpaces.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testIntegerParameterSpace() {
    ParameterSpace<Integer> ips = new IntegerParameterSpace(0, 4);
    ips.setIndices(0);

    for (int i = 0; i < 5; i++) {
        double d = i / 5.0 + 0.1; //Center
        double dEdgeLower = i / 5.0 + 1e-8; //Edge case: just above split threshold
        double dEdgeUpper = (i + 1) / 5.0 - 1e-8; //Edge case: just below split threshold
        assertEquals(i, (int) ips.getValue(new double[]{d}));
        assertEquals(i, (int) ips.getValue(new double[]{dEdgeLower}));
        assertEquals(i, (int) ips.getValue(new double[]{dEdgeUpper}));
    }
}
 
Example #6
Source File: TestDL4JLocalExecution.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
@Ignore
public void testLocalExecutionEarlyStopping() throws Exception {
    EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                    .epochTerminationConditions(new MaxEpochsTerminationCondition(100))
                    .scoreCalculator(new DataSetLossCalculator(new IrisDataSetIterator(150, 150), true))
                    .modelSaver(new InMemoryModelSaver()).build();
    Map<String, Object> commands = new HashMap<>();
    commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName());


    //Define: network config (hyperparameter space)
    MultiLayerSpace mls = new MultiLayerSpace.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.1)))
                    .l2(new ContinuousParameterSpace(0.0001, 0.01))
                    .addLayer(new DenseLayerSpace.Builder().nIn(4).nOut(new IntegerParameterSpace(2, 10))
                                                    .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                                                    Activation.TANH))
                                                    .build(),
                                    new IntegerParameterSpace(1, 2)) //1-2 identical layers (except nIn)
                    .addLayer(new OutputLayerSpace.Builder().nOut(3).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .earlyStoppingConfiguration(esConf).build();

    //Define configuration:

    CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls, commands);
    DataProvider dataProvider = new DataSetIteratorFactoryProvider();


    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest2\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    f.deleteOnExit();
    if (!f.exists())
        throw new RuntimeException();

    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
                    .candidateGenerator(candidateGenerator).dataProvider(dataProvider)
                    .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(new TestSetLossScoreFunction())
                    .terminationConditions(new MaxTimeCondition(2, TimeUnit.MINUTES),
                                    new MaxCandidatesCondition(100))
                    .build();

    IOptimizationRunner runner = new LocalOptimizationRunner(configuration,
                    new MultiLayerNetworkTaskCreator(new ClassificationEvaluator()));

    runner.execute();
    System.out.println("----- COMPLETE -----");
}
 
Example #7
Source File: GridSearchCandidateGenerator.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
protected void initialize() {
    super.initialize();

    List<ParameterSpace> leaves = LeafUtils.getUniqueObjects(parameterSpace.collectLeaves());
    int nParams = leaves.size();

    //Work out for each parameter: is it continuous or discrete?
    // for grid search: discrete values are grid-searchable as-is
    // continuous values: discretize using 'discretizationCount' bins
    // integer values: use min(max-min+1, discretizationCount) values. i.e., discretize if necessary
    numValuesPerParam = new int[nParams];
    long searchSize = 1;
    for (int i = 0; i < nParams; i++) {
        ParameterSpace ps = leaves.get(i);
        if (ps instanceof DiscreteParameterSpace) {
            DiscreteParameterSpace dps = (DiscreteParameterSpace) ps;
            numValuesPerParam[i] = dps.numValues();
        } else if (ps instanceof IntegerParameterSpace) {
            IntegerParameterSpace ips = (IntegerParameterSpace) ps;
            int min = ips.getMin();
            int max = ips.getMax();
            //Discretize, as some integer ranges are much too large to search (i.e., num. neural network units, between 100 and 1000)
            numValuesPerParam[i] = Math.min(max - min + 1, discretizationCount);
        } else if (ps instanceof FixedValue){
            numValuesPerParam[i] = 1;
        } else {
            numValuesPerParam[i] = discretizationCount;
        }
        searchSize *= numValuesPerParam[i];
    }

    if (searchSize >= Integer.MAX_VALUE)
        throw new IllegalStateException("Invalid search: cannot process search with " + searchSize
                        + " candidates > Integer.MAX_VALUE"); //TODO find a more reasonable upper bound?

    order = new ConcurrentLinkedQueue<>();

    totalNumCandidates = (int) searchSize;
    switch (mode) {
        case Sequential:
            for (int i = 0; i < totalNumCandidates; i++) {
                order.add(i);
            }
            break;
        case RandomOrder:
            List<Integer> tempList = new ArrayList<>(totalNumCandidates);
            for (int i = 0; i < totalNumCandidates; i++) {
                tempList.add(i);
            }

            Collections.shuffle(tempList, new RandomAdaptor(rng));
            order.addAll(tempList);
            break;
        default:
            throw new RuntimeException();
    }

}
 
Example #8
Source File: ArbiterCLIRunnerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testCliRunner() throws Exception {
        ArbiterCliRunner cliRunner = new ArbiterCliRunner();

        //Define: network config (hyperparameter space)
        MultiLayerSpace mls = new MultiLayerSpace.Builder()
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.1)))
                .l2(new ContinuousParameterSpace(0.0001, 0.01))
                .addLayer(new DenseLayerSpace.Builder().nIn(784).nOut(new IntegerParameterSpace(2,10))
                        .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH))
                        .build())
                .addLayer(new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                .numEpochs(3).build();
         assertEquals(mls,MultiLayerSpace.fromJson(mls.toJson()));
        //Define configuration:
        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,TestDataFactoryProviderMnist.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls,commands);
        DataProvider dataProvider = new DataSetIteratorFactoryProvider();


//        String modelSavePath = FilenameUtils.concat(System.getProperty("java.io.tmpdir"),"ArbiterDL4JTest/");
        String modelSavePath = new File(System.getProperty("java.io.tmpdir"),"ArbiterDL4JTest/").getAbsolutePath();
        File dir = new File(modelSavePath);
        if(!dir.exists())
            dir.mkdirs();
        String configPath = System.getProperty("java.io.tmpdir") + File.separator + UUID.randomUUID().toString() + ".json";
        OptimizationConfiguration configuration
                = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataProvider(dataProvider)
                .modelSaver(new FileModelSaver(modelSavePath))
                .scoreFunction(new TestSetLossScoreFunction())
                .terminationConditions(new MaxTimeCondition(30, TimeUnit.SECONDS),
                        new MaxCandidatesCondition(5))
                .build();
        assertEquals(configuration,OptimizationConfiguration.fromJson(configuration.toJson()));

        FileUtils.writeStringToFile(new File(configPath),configuration.toJson());
//        System.out.println(configuration.toJson());
        configuration.toJson();

        log.info("Starting test");
        cliRunner.runMain(
                "--dataSetIteratorClass",
                TestDataFactoryProviderMnist.class.getCanonicalName(),
                "--neuralNetType",
                ArbiterCliRunner.MULTI_LAYER_NETWORK,
                "--optimizationConfigPath",
                configPath
        );
    }
 
Example #9
Source File: TestGraphLocalExecution.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLocalExecutionEarlyStopping() throws Exception {
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
            .epochTerminationConditions(new MaxEpochsTerminationCondition(2))
            .scoreCalculator(new ScoreProvider())
            .modelSaver(new InMemoryModelSaver()).build();
    Map<String, Object> commands = new HashMap<>();
    commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName());

    //Define: network config (hyperparameter space)
    ComputationGraphSpace cgs = new ComputationGraphSpace.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(new AdamSpace(new ContinuousParameterSpace(0.0001, 0.1)))
            .l2(new ContinuousParameterSpace(0.0001, 0.01)).addInputs("in")
            .setInputTypes(InputType.feedForward(784))
            .addLayer("first",
                    new DenseLayerSpace.Builder().nIn(784).nOut(new IntegerParameterSpace(2, 10))
                            .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                    Activation.TANH))
                            .build(),
                    "in") //1-2 identical layers (except nIn)
            .addLayer("out", new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "first")
            .setOutputs("out").earlyStoppingConfiguration(esConf).build();

    //Define configuration:

    CandidateGenerator candidateGenerator = new RandomSearchGenerator(cgs, commands);
    DataProvider dataProvider = new DataSetIteratorFactoryProvider();


    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest2CG\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    f.deleteOnExit();
    if (!f.exists())
        throw new RuntimeException();

    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
            .candidateGenerator(candidateGenerator)
            .dataProvider(dataProvider)
            .scoreFunction(ScoreFunctions.testSetF1())
            .modelSaver(new FileModelSaver(modelSavePath))
            .terminationConditions(new MaxTimeCondition(15, TimeUnit.SECONDS),
                    new MaxCandidatesCondition(3))
            .build();


    IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new ComputationGraphTaskCreator());
    runner.execute();

    assertEquals(0, runner.numCandidatesFailed());
    assertTrue(runner.numCandidatesCompleted() > 0);
}
 
Example #10
Source File: TestGraphLocalExecution.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLocalExecutionMDS() throws Exception {
    //Define: network config (hyperparameter space)
    ComputationGraphSpace mls = new ComputationGraphSpace.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.1)))
            .l2(new ContinuousParameterSpace(0.0001, 0.01)).addInputs("in")
            .setInputTypes(InputType.feedForward(784))
            .addLayer("layer0",
                    new DenseLayerSpace.Builder().nIn(784).nOut(new IntegerParameterSpace(2, 10))
                            .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH))
                            .build(),
                    "in")
            .addLayer("out", new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "layer0")
            .setOutputs("out").numEpochs(3).build();

    //Define configuration:
    CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls, null);

    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    f.deleteOnExit();
    if (!f.exists())
        throw new RuntimeException();

    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
            .candidateGenerator(candidateGenerator)
            .dataProvider(new TestMdsDataProvider(1, 32))
            .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(ScoreFunctions.testSetLoss(true))
            .terminationConditions(new MaxTimeCondition(30, TimeUnit.SECONDS),
                    new MaxCandidatesCondition(3))
            .scoreFunction(ScoreFunctions.testSetAccuracy())
            .build();

    IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new ComputationGraphTaskCreator());

    runner.execute();

    assertEquals(0, runner.numCandidatesFailed());
    assertTrue(runner.numCandidatesCompleted() > 0);
}
 
Example #11
Source File: TestGraphLocalExecution.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLocalExecution() throws Exception {
    Map<String, Object> commands = new HashMap<>();
    commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName());

    //Define: network config (hyperparameter space)
    ComputationGraphSpace mls = new ComputationGraphSpace.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.1)))
            .l2(new ContinuousParameterSpace(0.0001, 0.01)).addInputs("in")
            .setInputTypes(InputType.feedForward(4))
            .addLayer("layer0",
                    new DenseLayerSpace.Builder().nIn(784).nOut(new IntegerParameterSpace(2, 10))
                            .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH))
                            .build(),
                    "in")
            .addLayer("out", new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "layer0")
            .setOutputs("out").numEpochs(3).build();

    //Define configuration:
    CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls, commands);
    DataProvider dataProvider = new DataSetIteratorFactoryProvider();

    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    f.deleteOnExit();
    if (!f.exists())
        throw new RuntimeException();

    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
            .candidateGenerator(candidateGenerator).dataProvider(dataProvider)
            .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(ScoreFunctions.testSetLoss(true))
            .terminationConditions(new MaxTimeCondition(30, TimeUnit.SECONDS),
                    new MaxCandidatesCondition(3))
            .build();

    IOptimizationRunner runner = new LocalOptimizationRunner(configuration,
            new ComputationGraphTaskCreator(new ClassificationEvaluator()));

    runner.execute();

    assertEquals(0, runner.numCandidatesFailed());
    assertTrue(runner.numCandidatesCompleted() > 0);
}
 
Example #12
Source File: TestJson.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testOptimizationFromJsonDataSource() {
    for(boolean withProperties : new boolean[]{false, true}) {
        //Define: network config (hyperparameter space)
        ComputationGraphSpace cgs = new ComputationGraphSpace.Builder()
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .updater(new AdaMaxSpace(new ContinuousParameterSpace(0.0001, 0.1)))
                .l2(new ContinuousParameterSpace(0.0001, 0.01)).addInputs("in")
                .setInputTypes(InputType.feedForward(4))
                .addLayer("first",
                        new DenseLayerSpace.Builder().nIn(4).nOut(new IntegerParameterSpace(2, 10))
                                .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                        Activation.TANH))
                                .build(),
                        "in") //1-2 identical layers (except nIn)
                .addLayer("out", new OutputLayerSpace.Builder().nOut(3).activation(Activation.SOFTMAX)
                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "first")
                .setOutputs("out").build();

        //Define configuration:
        Map<String, Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(cgs, commands);

        Properties p = new Properties();
        p.setProperty("minibatch", "16");

        OptimizationConfiguration configuration =
                new OptimizationConfiguration.Builder().candidateGenerator(candidateGenerator)
                        .dataSource(MnistDataSource.class, (withProperties ? p : null))
                        .scoreFunction(new TestSetLossScoreFunction())
                        .terminationConditions(new MaxTimeCondition(2, TimeUnit.MINUTES),
                                new MaxCandidatesCondition(100))
                        .build();

        String json = configuration.toJson();
        OptimizationConfiguration loadConf = OptimizationConfiguration.fromJson(json);
        assertEquals(configuration, loadConf);
        assertNotNull(loadConf.getDataSource());
        if(withProperties){
            assertNotNull(loadConf.getDataSourceProperties());
        }
    }
}
 
Example #13
Source File: TestJson.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testOptimizationFromJson() {
    EarlyStoppingConfiguration<ComputationGraph> esConf =
                    new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(100))
                                    .scoreCalculator(new DataSetLossCalculatorCG(new IrisDataSetIterator(150, 150),
                                                    true))
                                    .modelSaver(new InMemoryModelSaver<ComputationGraph>()).build();

    //Define: network config (hyperparameter space)
    ComputationGraphSpace cgs = new ComputationGraphSpace.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new AdaMaxSpace(new ContinuousParameterSpace(0.0001, 0.1)))
                    .l2(new ContinuousParameterSpace(0.0001, 0.01)).addInputs("in")
                    .setInputTypes(InputType.feedForward(4))
                    .addLayer("first",
                                    new DenseLayerSpace.Builder().nIn(4).nOut(new IntegerParameterSpace(2, 10))
                                                    .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                                                    Activation.TANH))
                                                    .build(),
                                    "in") //1-2 identical layers (except nIn)
                    .addLayer("out", new OutputLayerSpace.Builder().nOut(3).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "first")
                    .setOutputs("out").earlyStoppingConfiguration(esConf).build();

    //Define configuration:
    Map<String, Object> commands = new HashMap<>();
    commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName());

    CandidateGenerator candidateGenerator = new RandomSearchGenerator(cgs, commands);
    DataProvider dataProvider = new DataSetIteratorFactoryProvider();


    OptimizationConfiguration configuration =
                    new OptimizationConfiguration.Builder().candidateGenerator(candidateGenerator)
                                    .dataProvider(dataProvider).scoreFunction(new TestSetLossScoreFunction())
                                    .terminationConditions(new MaxTimeCondition(2, TimeUnit.MINUTES),
                                                    new MaxCandidatesCondition(100))
                                    .build();

    String json = configuration.toJson();
    OptimizationConfiguration loadConf = OptimizationConfiguration.fromJson(json);
    assertEquals(configuration, loadConf);
}
 
Example #14
Source File: TestDL4JLocalExecution.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testOcnn() {
    Map<String, Object> commands = new HashMap<>();
    commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName());


    //Define: network config (hyperparameter space)
    MultiLayerSpace mls = new MultiLayerSpace.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.1)))
            .l2(new ContinuousParameterSpace(0.0001, 0.01))
            .addLayer(
                    new DenseLayerSpace.Builder().nOut(new IntegerParameterSpace(250, 500))
                            .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                    Activation.TANH))
                            .build(),
                    new IntegerParameterSpace(1, 2)) //1-2 identical layers (except nIn)
            .addLayer(new OCNNLayerSpace.Builder().nu(new ContinuousParameterSpace(0.0001, 0.1))
                    .numHidden(new DiscreteParameterSpace<Integer>(784 / 2,784 / 4))
                    .activation(Activation.HARDSIGMOID)
                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
            .setInputType(InputType.convolutionalFlat(28,28,1))
            .build();

    //Define configuration:

    CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls, commands);
    DataProvider dataProvider = new DataSetIteratorFactoryProvider();


    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest3\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    f.deleteOnExit();
    if (!f.exists())
        throw new RuntimeException();

    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
            .candidateGenerator(candidateGenerator).dataProvider(dataProvider)
            .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(new TestSetLossScoreFunction())
            .terminationConditions(new MaxTimeCondition(2, TimeUnit.MINUTES),
                    new MaxCandidatesCondition(100))
            .build();


    //candidate generation: uncomment execute if you want to run
    IOptimizationRunner runner = new LocalOptimizationRunner(configuration,
            new MultiLayerNetworkTaskCreator(new ClassificationEvaluator()));

    Candidate candidate = candidateGenerator.getCandidate();

    // runner.execute();
    System.out.println("----- COMPLETE -----");
}
 
Example #15
Source File: HyperParameterTuningArbiterUiExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        runner.addListeners(new ArbiterStatusListener(ss));
        UIServer.getInstance().attach(ss);
        //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #16
Source File: TestDL4JLocalExecution.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
@org.junit.Ignore
public void testLocalExecutionGridSearch() throws Exception {

    //Define: network config (hyperparameter space)
    MultiLayerSpace mls = new MultiLayerSpace.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.2)))
                    .l2(new ContinuousParameterSpace(0.0001, 0.01))
                    .addLayer(
                                    new DenseLayerSpace.Builder().nIn(4).nOut(new IntegerParameterSpace(2, 10))
                                                    .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                                                    Activation.TANH))
                                                    .build(),
                                    new IntegerParameterSpace(1, 2)) //1-2 identical layers (except nIn)
                    .addLayer(new OutputLayerSpace.Builder().nOut(3).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .numEpochs(3).build();
    Map<String, Object> commands = new HashMap<>();
    commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName());

    CandidateGenerator candidateGenerator = new GridSearchCandidateGenerator(mls, 5,
                    GridSearchCandidateGenerator.Mode.Sequential, commands);
    DataProvider dataProvider = new DataSetIteratorFactoryProvider();

    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterDL4JTest/").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    f.deleteOnExit();
    if (!f.exists())
        throw new RuntimeException();

    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
                    .candidateGenerator(candidateGenerator).dataProvider(dataProvider)
                    .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(new TestSetLossScoreFunction())
                    .terminationConditions(new MaxTimeCondition(2, TimeUnit.MINUTES),
                                    new MaxCandidatesCondition(100))
                    .build();

    IOptimizationRunner runner = new LocalOptimizationRunner(configuration,
                    new MultiLayerNetworkTaskCreator(new ClassificationEvaluator()));

    runner.execute();

    System.out.println("----- COMPLETE -----");
}
 
Example #17
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testMathOps() {

    ParameterSpace<Integer> firstLayerSize = new IntegerParameterSpace(10,30);
    ParameterSpace<Integer> secondLayerSize = new MathOp<>(firstLayerSize, Op.MUL, 3);
    ParameterSpace<Double> firstLayerLR = new ContinuousParameterSpace(0.01, 0.1);
    ParameterSpace<Double> secondLayerLR = new MathOp<>(firstLayerLR, Op.ADD, 0.2);

    MultiLayerSpace mls =
            new MultiLayerSpace.Builder().updater(new Sgd(0.005))
                    .seed(12345)
                    .layer(new DenseLayerSpace.Builder().nOut(firstLayerSize)
                            .updater(new AdamSpace(firstLayerLR))
                            .build())
                    .layer(new OutputLayerSpace.Builder().nOut(secondLayerSize)
                            .updater(new AdamSpace(secondLayerLR))
                            .activation(Activation.SOFTMAX)
                            .build())
                    .setInputType(InputType.feedForward(10))
                    .build();

    int nParams = mls.numParameters();
    assertEquals(2, nParams);

    new RandomSearchGenerator(mls, null);    //Initializes the indices

    Random r = new Random(12345);
    for( int i=0; i<10; i++ ){
        double[] d = new double[nParams];
        for( int j=0; j<d.length; j++ ){
            d[j] = r.nextDouble();
        }

        MultiLayerConfiguration conf = mls.getValue(d).getMultiLayerConfiguration();
        long l0Size = ((FeedForwardLayer)conf.getConf(0).getLayer()).getNOut();
        long l1Size = ((FeedForwardLayer)conf.getConf(1).getLayer()).getNOut();
        assertEquals(3*l0Size, l1Size);

        double l0Lr = ((FeedForwardLayer)conf.getConf(0).getLayer()).getIUpdater().getLearningRate(0,0);
        double l1Lr = ((FeedForwardLayer)conf.getConf(1).getLayer()).getIUpdater().getLearningRate(0,0);
        assertEquals(l0Lr+0.2, l1Lr, 1e-6);
    }
}
 
Example #18
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testInputTypeBasic() throws Exception {

    ParameterSpace<Integer> layerSizeHyperparam = new IntegerParameterSpace(20, 60);

    MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder().l2(0.0001)
                    .weightInit(WeightInit.XAVIER).updater(new Nesterovs())
                    .addLayer(new ConvolutionLayerSpace.Builder().kernelSize(5, 5).nIn(1).stride(1, 1)
                                    .nOut(layerSizeHyperparam).activation(Activation.IDENTITY).build())
                    .addLayer(new SubsamplingLayerSpace.Builder().poolingType(SubsamplingLayer.PoolingType.MAX)
                                    .kernelSize(2, 2).stride(2, 2).build())
                    .addLayer(new ConvolutionLayerSpace.Builder().kernelSize(5, 5)
                                    //Note that nIn need not be specified in later layers
                                    .stride(1, 1).nOut(50).activation(Activation.IDENTITY).build())
                    .addLayer(new SubsamplingLayerSpace.Builder().poolingType(SubsamplingLayer.PoolingType.MAX)
                                    .kernelSize(2, 2).stride(2, 2).build())
                    .addLayer(new DenseLayerSpace.Builder().activation(Activation.RELU).nOut(500).build())
                    .addLayer(new OutputLayerSpace.Builder()
                                    .lossFunction(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10)
                                    .activation(Activation.SOFTMAX).build())
                    .setInputType(InputType.convolutionalFlat(28, 28, 1)).build();


    DataProvider dataProvider = new TestDataSetProvider();

    File f = testDir.newFolder();
    if (f.exists())
        f.delete();
    f.mkdir();
    ResultSaver modelSaver = new FileModelSaver(f.getAbsolutePath());

    ScoreFunction scoreFunction = new TestSetAccuracyScoreFunction();

    int maxCandidates = 4;
    TerminationCondition[] terminationConditions;
    terminationConditions = new TerminationCondition[] {new MaxCandidatesCondition(maxCandidates)};

    //Given these configuration options, let's put them all together:
    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
                    .candidateGenerator(new RandomSearchGenerator(hyperparameterSpace, null))
                    .dataProvider(dataProvider).modelSaver(modelSaver).scoreFunction(scoreFunction)
                    .terminationConditions(terminationConditions).build();

    IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator());
    runner.execute();

    assertEquals(maxCandidates, runner.getResults().size());
}
 
Example #19
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testVariationalAutoencoderLayerSpaceBasic() {
    MultiLayerSpace mls =
                    new MultiLayerSpace.Builder()
                                    .updater(new Sgd(0.005)).seed(
                                                    12345)
                                    .addLayer(new VariationalAutoencoderLayerSpace.Builder()
                                                    .nIn(new IntegerParameterSpace(50, 75)).nOut(200)
                                                    .encoderLayerSizes(234, 567).decoderLayerSizes(123, 456)
                                                    .reconstructionDistribution(
                                                                    new DiscreteParameterSpace<ReconstructionDistribution>(
                                                                                    new GaussianReconstructionDistribution(),
                                                                                    new BernoulliReconstructionDistribution()))
                                                    .build())
                                    .build();

    int numParams = mls.numParameters();

    //Assign numbers to each leaf ParameterSpace object (normally done by candidate generator - manual here for testing)
    List<ParameterSpace> noDuplicatesList = LeafUtils.getUniqueObjects(mls.collectLeaves());

    //Second: assign each a number
    int c = 0;
    for (ParameterSpace ps : noDuplicatesList) {
        int np = ps.numParameters();
        if (np == 1) {
            ps.setIndices(c++);
        } else {
            int[] values = new int[np];
            for (int j = 0; j < np; j++)
                values[c++] = j;
            ps.setIndices(values);
        }
    }

    double[] zeros = new double[numParams];

    DL4JConfiguration configuration = mls.getValue(zeros);

    MultiLayerConfiguration conf = configuration.getMultiLayerConfiguration();
    assertEquals(1, conf.getConfs().size());

    NeuralNetConfiguration nnc = conf.getConf(0);
    VariationalAutoencoder vae = (VariationalAutoencoder) nnc.getLayer();

    assertEquals(50, vae.getNIn());
    assertEquals(200, vae.getNOut());

    assertArrayEquals(new int[] {234, 567}, vae.getEncoderLayerSizes());
    assertArrayEquals(new int[] {123, 456}, vae.getDecoderLayerSizes());

    assertTrue(vae.getOutputDistribution() instanceof GaussianReconstructionDistribution);



    double[] ones = new double[numParams];
    for (int i = 0; i < ones.length; i++)
        ones[i] = 1.0;

    configuration = mls.getValue(ones);

    conf = configuration.getMultiLayerConfiguration();
    assertEquals(1, conf.getConfs().size());

    nnc = conf.getConf(0);
    vae = (VariationalAutoencoder) nnc.getLayer();

    assertEquals(75, vae.getNIn());
    assertEquals(200, vae.getNOut());

    assertArrayEquals(new int[] {234, 567}, vae.getEncoderLayerSizes());
    assertArrayEquals(new int[] {123, 456}, vae.getDecoderLayerSizes());

    assertTrue(vae.getOutputDistribution() instanceof BernoulliReconstructionDistribution);
}
 
Example #20
Source File: MNISTOptimizationTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    EarlyStoppingConfiguration<MultiLayerNetwork> esConf =
                    new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>()
                                    .epochTerminationConditions(new MaxEpochsTerminationCondition(3))
                                    .iterationTerminationConditions(
                                                    new MaxTimeIterationTerminationCondition(5, TimeUnit.MINUTES),
                                                    new MaxScoreIterationTerminationCondition(4.6) //Random score: -log_e(0.1) ~= 2.3
                                    ).scoreCalculator(new DataSetLossCalculator(new MnistDataSetIterator(64, 2000, false, false, true, 123), true)).modelSaver(new InMemoryModelSaver()).build();

    //Define: network config (hyperparameter space)
    MultiLayerSpace mls = new MultiLayerSpace.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.2)))
                    .l2(new ContinuousParameterSpace(0.0001, 0.05))
                    .addLayer(
                                    new ConvolutionLayerSpace.Builder().nIn(1)
                                                    .nOut(new IntegerParameterSpace(5, 30))
                                                    .kernelSize(new DiscreteParameterSpace<>(new int[] {3, 3},
                                                                    new int[] {4, 4}, new int[] {5, 5}))
                                                    .stride(new DiscreteParameterSpace<>(new int[] {1, 1},
                                                                    new int[] {2, 2}))
                                                    .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                                                    Activation.SOFTPLUS, Activation.LEAKYRELU))
                                                    .build(),
                                    new IntegerParameterSpace(1, 2)) //1-2 identical layers
                    .addLayer(new DenseLayerSpace.Builder().nIn(4).nOut(new IntegerParameterSpace(2, 10))
                                    .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH))
                                    .build(), new IntegerParameterSpace(0, 1)) //0 to 1 layers
                    .addLayer(new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .earlyStoppingConfiguration(esConf).build();
    Map<String, Object> commands = new HashMap<>();
    commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, TestDataFactoryProviderMnist.class.getCanonicalName());

    //Define configuration:
    CandidateGenerator candidateGenerator = new RandomSearchGenerator(mls, commands);
    DataProvider dataProvider = new MnistDataSetProvider();


    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterMNISTSmall\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    if (!f.exists())
        throw new RuntimeException();

    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
                    .candidateGenerator(candidateGenerator)
                    .dataProvider(dataProvider)
                    .modelSaver(new FileModelSaver(modelSavePath)).scoreFunction(new TestSetLossScoreFunction(true))
                    .terminationConditions(new MaxTimeCondition(120, TimeUnit.MINUTES),
                                    new MaxCandidatesCondition(100))
                    .build();

    IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator());

    //        ArbiterUIServer server = ArbiterUIServer.getInstance();
    //        runner.addListeners(new UIOptimizationRunnerStatusListener(server));

    runner.execute();


    System.out.println("----- COMPLETE -----");
}
 
Example #21
Source File: TestBasic.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
@Ignore
public void testBasicMnistCompGraph() throws Exception {

    ComputationGraphSpace cgs = new ComputationGraphSpace.Builder()
            .updater(new SgdSpace(new ContinuousParameterSpace(0.0001, 0.2)))
            .l2(new ContinuousParameterSpace(0.0001, 0.05))
            .addInputs("in")
            .addLayer("0",
                    new ConvolutionLayerSpace.Builder().nIn(1)
                            .nOut(new IntegerParameterSpace(5, 30))
                            .kernelSize(new DiscreteParameterSpace<>(new int[]{3, 3},
                                    new int[]{4, 4}, new int[]{5, 5}))
                            .stride(new DiscreteParameterSpace<>(new int[]{1, 1},
                                    new int[]{2, 2}))
                            .activation(new DiscreteParameterSpace<>(Activation.RELU,
                                    Activation.SOFTPLUS, Activation.LEAKYRELU))
                            .build(), "in")
            .addLayer("1", new DenseLayerSpace.Builder().nOut(new IntegerParameterSpace(32, 128))
                    .activation(new DiscreteParameterSpace<>(Activation.RELU, Activation.TANH))
                    .build(), "0")
            .addLayer("out", new OutputLayerSpace.Builder().nOut(10).activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "1")
            .setOutputs("out")
            .setInputTypes(InputType.convolutionalFlat(28, 28, 1))
            .build();

    //Define configuration:
    CandidateGenerator candidateGenerator = new RandomSearchGenerator(cgs);
    DataProvider dataProvider = new MnistDataSetProvider();


    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterUiTestBasicMnistCG\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    if (!f.exists())
        throw new RuntimeException();

    OptimizationConfiguration configuration =
            new OptimizationConfiguration.Builder()
                    .candidateGenerator(candidateGenerator).dataProvider(dataProvider)
                    .modelSaver(new FileModelSaver(modelSavePath))
                    .scoreFunction(new TestSetLossScoreFunction(true))
                    .terminationConditions(new MaxTimeCondition(120, TimeUnit.MINUTES),
                            new MaxCandidatesCondition(100))
                    .build();

    IOptimizationRunner runner =
            new LocalOptimizationRunner(configuration, new ComputationGraphTaskCreator());

    StatsStorage ss = new InMemoryStatsStorage();
    StatusListener sl = new ArbiterStatusListener(ss);
    runner.addListeners(sl);

    UIServer.getInstance().attach(ss);

    runner.execute();
    Thread.sleep(100000);
}
 
Example #22
Source File: TestBasic.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
@Ignore
public void testBasicMnistDataSource() throws InterruptedException {
    ParameterSpace<Double> learningRateHyperparam = new ContinuousParameterSpace(0.0001, 0.1);
    ParameterSpace<Integer> layerSizeHyperparam = new IntegerParameterSpace(16, 256);

    MultiLayerSpace hyperparameterSpace = new MultiLayerSpace.Builder()
            .weightInit(WeightInit.XAVIER)
            .l2(0.0001)
            .updater(new SgdSpace(learningRateHyperparam))
            .addLayer(new DenseLayerSpace.Builder()
                    .nIn(784)
                    .activation(Activation.LEAKYRELU)
                    .nOut(layerSizeHyperparam)
                    .build())
            .addLayer(new OutputLayerSpace.Builder()
                    .nOut(10)
                    .activation(Activation.SOFTMAX)
                    .lossFunction(LossFunctions.LossFunction.MCXENT)
                    .build())
            .build();
    CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperparameterSpace, null);
    ScoreFunction scoreFunction = new EvaluationScoreFunction(Evaluation.Metric.ACCURACY);
    TerminationCondition[] terminationConditions = {
            new MaxTimeCondition(5, TimeUnit.MINUTES),
            new MaxCandidatesCondition(2)};

    String modelSavePath = new File(System.getProperty("java.io.tmpdir"), "ArbiterUiTestBasicMnist\\").getAbsolutePath();

    File f = new File(modelSavePath);
    if (f.exists())
        f.delete();
    f.mkdir();
    if (!f.exists())
        throw new RuntimeException();
    Class<? extends DataSource> ds = MnistDataSource.class;
    Properties dsp = new Properties();
    dsp.setProperty("minibatch", "8");
    OptimizationConfiguration configuration = new OptimizationConfiguration.Builder()
            .candidateGenerator(candidateGenerator).dataSource(ds, dsp)
            .modelSaver(new FileModelSaver(modelSavePath))
            .scoreFunction(scoreFunction)
            .terminationConditions(terminationConditions)
            .build();

    IOptimizationRunner runner = new LocalOptimizationRunner(configuration, new MultiLayerNetworkTaskCreator());

    StatsStorage ss = new InMemoryStatsStorage();
    StatusListener sl = new ArbiterStatusListener(ss);
    runner.addListeners(sl);

    UIServer.getInstance().attach(ss);

    runner.execute();
    Thread.sleep(90000);
}
 
Example #23
Source File: HyperParameterTuning.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        //runner.addListeners(new ArbiterStatusListener(ss));
        //UIServer.getInstance().attach(ss);
        runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #24
Source File: HyperParameterTuningArbiterUiExample.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY, HyperParameterTuningArbiterUiExample.ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(HyperParameterTuningArbiterUiExample.ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        runner.addListeners(new ArbiterStatusListener(ss));
        UIServer.getInstance().attach(ss);
        //runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }
 
Example #25
Source File: HyperParameterTuning.java    From Java-Deep-Learning-Cookbook with MIT License 4 votes vote down vote up
public static void main(String[] args) {

        ParameterSpace<Double> learningRateParam = new ContinuousParameterSpace(0.0001,0.01);
        ParameterSpace<Integer> layerSizeParam = new IntegerParameterSpace(5,11);
        MultiLayerSpace hyperParamaterSpace = new MultiLayerSpace.Builder()
                .updater(new AdamSpace(learningRateParam))
                //  .weightInit(WeightInit.DISTRIBUTION).dist(new LogNormalDistribution())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(11)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new DenseLayerSpace.Builder()
                        .activation(Activation.RELU)
                        .nIn(layerSizeParam)
                        .nOut(layerSizeParam)
                        .build())
                .addLayer(new OutputLayerSpace.Builder()
                        .activation(Activation.SIGMOID)
                        .lossFunction(LossFunctions.LossFunction.XENT)
                        .nOut(1)
                        .build())
                .build();

        Map<String,Object> dataParams = new HashMap<>();
        dataParams.put("batchSize",new Integer(10));

        Map<String,Object> commands = new HashMap<>();
        commands.put(DataSetIteratorFactoryProvider.FACTORY_KEY,ExampleDataSource.class.getCanonicalName());

        CandidateGenerator candidateGenerator = new RandomSearchGenerator(hyperParamaterSpace,dataParams);

        Properties dataSourceProperties = new Properties();
        dataSourceProperties.setProperty("minibatchSize", "64");

        ResultSaver modelSaver = new FileModelSaver("resources/");
        ScoreFunction scoreFunction = new EvaluationScoreFunction(org.deeplearning4j.eval.Evaluation.Metric.ACCURACY);


        TerminationCondition[] conditions = {
                new MaxTimeCondition(120, TimeUnit.MINUTES),
                new MaxCandidatesCondition(30)

        };

        OptimizationConfiguration optimizationConfiguration = new OptimizationConfiguration.Builder()
                .candidateGenerator(candidateGenerator)
                .dataSource(ExampleDataSource.class,dataSourceProperties)
                .modelSaver(modelSaver)
                .scoreFunction(scoreFunction)
                .terminationConditions(conditions)
                .build();

        IOptimizationRunner runner = new LocalOptimizationRunner(optimizationConfiguration,new MultiLayerNetworkTaskCreator());
        //Uncomment this if you want to store the model.
        //StatsStorage ss = new FileStatsStorage(new File("HyperParamOptimizationStats.dl4j"));
        //runner.addListeners(new ArbiterStatusListener(ss));
        //UIServer.getInstance().attach(ss);
        runner.addListeners(new LoggingStatusListener()); //new ArbiterStatusListener(ss)
        runner.execute();

        //Print the best hyper params

        double bestScore = runner.bestScore();
        int bestCandidateIndex = runner.bestScoreCandidateIndex();
        int numberOfConfigsEvaluated = runner.numCandidatesCompleted();

        String s = "Best score: " + bestScore + "\n" +
                "Index of model with best score: " + bestCandidateIndex + "\n" +
                "Number of configurations evaluated: " + numberOfConfigsEvaluated + "\n";

        System.out.println(s);

    }