org.deeplearning4j.arbiter.optimize.api.ParameterSpace Java Examples

The following examples show how to use org.deeplearning4j.arbiter.optimize.api.ParameterSpace. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestLayerSpace.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBatchNorm() {
    BatchNormalizationSpace sp = new BatchNormalizationSpace.Builder().gamma(1.5)
            .beta(new ContinuousParameterSpace(2, 3)).lockGammaBeta(true).build();

    //Set the parameter numbers...
    List<ParameterSpace> list = sp.collectLeaves();
    int k = 0;
    for (int j = 0; j < list.size(); j++) {
        if (list.get(j).numParameters() > 0) {
            list.get(j).setIndices(k++);
        }
    }

    BatchNormalization bn = sp.getValue(new double[]{0.6});
    assertTrue(bn.isLockGammaBeta());
    assertEquals(1.5, bn.getGamma(), 0.0);
    assertEquals(0.6 * (3 - 2) + 2, bn.getBeta(), 1e-4);
}
 
Example #2
Source File: ComputationGraphSpace.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
protected ComputationGraphSpace(Builder builder) {
    super(builder);

    this.earlyStoppingConfiguration = builder.earlyStoppingConfiguration;
    this.layerSpaces = builder.layerList;
    this.vertices = builder.vertexList;

    this.networkInputs = builder.networkInputs;
    this.networkOutputs = builder.networkOutputs;
    this.inputTypes = builder.inputTypes;
    this.trainingWorkspaceMode = builder.trainingWorkspaceMode;
    this.inferenceWorkspaceMode = builder.inferenceWorkspaceMode;
    this.validateOutputLayerConfig = builder.validateOutputLayerConfig;

    //Determine total number of parameters:
    List<ParameterSpace> list = LeafUtils.getUniqueObjects(collectLeaves());
    for (ParameterSpace ps : list)
        numParameters += ps.numParameters();
}
 
Example #3
Source File: BaseNetworkSpace.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public String toString() {
    StringBuilder sb = new StringBuilder();

    for (Map.Entry<String, ParameterSpace> e : getNestedSpaces().entrySet()) {
        sb.append(e.getKey()).append(": ").append(e.getValue()).append("\n");
    }

    int i = 0;
    for (LayerConf conf : layerSpaces) {

        sb.append("Layer config ").append(i++).append(": (Number layers:").append(conf.numLayers)
                        .append(", duplicate: ").append(conf.duplicateConfig).append("), ")
                        .append(conf.layerSpace.toString()).append("\n");
    }


    return sb.toString();
}
 
Example #4
Source File: MultiLayerSpace.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
protected MultiLayerSpace(Builder builder) {
    super(builder);
    this.inputType = builder.inputType;
    this.inputPreProcessors = builder.inputPreProcessors;

    this.earlyStoppingConfiguration = builder.earlyStoppingConfiguration;

    this.layerSpaces = builder.layerSpaces;

    //Determine total number of parameters:
    //Collect the leaves, and make sure they are unique.
    //Note that the *object instances* must be unique - and consequently we don't want to use .equals(), as
    // this would incorrectly filter out equal range parameter spaces
    List<ParameterSpace> allLeaves = collectLeaves();
    List<ParameterSpace> list = LeafUtils.getUniqueObjects(allLeaves);

    for (ParameterSpace ps : list) {
        int n = ps.numParameters();
        numParameters += ps.numParameters();
    }

    this.trainingWorkspaceMode = builder.trainingWorkspaceMode;
    this.inferenceWorkspaceMode = builder.inferenceWorkspaceMode;
    this.validateOutputLayerConfig = builder.validateOutputLayerConfig;
}
 
Example #5
Source File: LayerSpace.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public List<ParameterSpace> collectLeaves() {
    //To avoid manually coding EVERY parameter, in every layer:
    // Do a depth-first search of nested spaces
    LinkedList<ParameterSpace> stack = new LinkedList<>();
    stack.add(this);

    List<ParameterSpace> out = new ArrayList<>();
    while (!stack.isEmpty()) {
        ParameterSpace next = stack.removeLast();
        if (next.isLeaf()) {
            out.add(next);
        } else {
            Map<String, ParameterSpace> m = next.getNestedSpaces();
            ParameterSpace[] arr = m.values().toArray(new ParameterSpace[m.size()]);
            for (int i = arr.length - 1; i >= 0; i--) {
                stack.add(arr[i]);
            }
        }
    }

    return out;
}
 
Example #6
Source File: BaseLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
protected String toString(String delim) {
    StringBuilder sb = new StringBuilder();

    for (Map.Entry<String, ParameterSpace> e : getNestedSpaces().entrySet()) {
        sb.append(e.getKey()).append(": ").append(e.getValue()).append("\n");
    }
    return sb.toString();
}
 
Example #7
Source File: BaseNetworkSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public List<ParameterSpace> collectLeaves() {
    Map<String, ParameterSpace> global = getNestedSpaces();
    //Note: Results on previous line does NOT include the LayerSpaces, therefore we need to add these manually...
    //This is because the type is a list, not a ParameterSpace
    LinkedList<ParameterSpace> stack = new LinkedList<>();
    stack.add(this);

    for (LayerConf layerConf : layerSpaces) {
        LayerSpace ls = layerConf.getLayerSpace();
        stack.addAll(ls.collectLeaves());
    }

    List<ParameterSpace> out = new ArrayList<>();
    while (!stack.isEmpty()) {
        ParameterSpace next = stack.removeLast();
        if (next.isLeaf()) {
            out.add(next);
        } else {
            Map<String, ParameterSpace> m = next.getNestedSpaces();
            ParameterSpace[] arr = m.values().toArray(new ParameterSpace[m.size()]);
            for (int i = arr.length - 1; i >= 0; i--) {
                stack.add(arr[i]);
            }
        }
    }
    return out;
}
 
Example #8
Source File: NesterovsSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public NesterovsSpace(@JsonProperty("learningRate") ParameterSpace<Double> learningRate,
                      @JsonProperty("learningRateSchedule") ParameterSpace<ISchedule> learningRateSchedule,
                      @JsonProperty("momentum") ParameterSpace<Double> momentum,
                      @JsonProperty("momentumSchedule") ParameterSpace<ISchedule> momentumSchedule) {
    this.learningRate = learningRate;
    this.learningRateSchedule = learningRateSchedule;
    this.momentum = momentum;
    this.momentumSchedule = momentumSchedule;
}
 
Example #9
Source File: SigmoidScheduleSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public SigmoidScheduleSpace(@NonNull @JsonProperty("scheduleType") ScheduleType scheduleType,
                            @NonNull @JsonProperty("initialValue") ParameterSpace<Double> initialValue,
                            @NonNull @JsonProperty("gamma") ParameterSpace<Double> gamma,
                            @NonNull @JsonProperty("stepSize") ParameterSpace<Integer> stepSize){
    this.scheduleType = scheduleType;
    this.initialValue = initialValue;
    this.gamma = gamma;
    this.stepSize = stepSize;
}
 
Example #10
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDropout2(){

    MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
            .addLayer(new ConvolutionLayerSpace.Builder().nOut(2)
                    .dropOut(new ContinuousParameterSpace(0.4,0.6))
                    .build())
            .addLayer(new DropoutLayerSpace.Builder().dropOut(new ContinuousParameterSpace(0.4,0.6)).build())
            .addLayer(new OutputLayerSpace.Builder().activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
            .setInputType(InputType.convolutional(28, 28, 1))
            .build();

    int nParams = mls.numParameters();
    List<ParameterSpace> l = LeafUtils.getUniqueObjects(mls.collectLeaves());
    int x=0;
    for( ParameterSpace p : l){
        int n = p.numParameters();
        int[] arr = new int[n];
        for(int i=0; i<arr.length; i++ ){
            arr[i] = x++;
        }
        p.setIndices(arr);
    }


    MultiLayerConfiguration conf = mls.getValue(new double[nParams]).getMultiLayerConfiguration();
}
 
Example #11
Source File: LeafUtils.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Count the number of unique parameters in the specified leaf nodes
 *
 * @param allLeaves Leaf values to count the parameters fore
 * @return Number of parameters for all unique objects
 */
public static int countUniqueParameters(List<ParameterSpace> allLeaves) {
    List<ParameterSpace> unique = getUniqueObjects(allLeaves);
    int count = 0;
    for (ParameterSpace ps : unique) {
        if (!ps.isLeaf()) {
            throw new IllegalStateException("Method should only be used with leaf nodes");
        }
        count += ps.numParameters();
    }
    return count;
}
 
Example #12
Source File: NadamSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public NadamSpace(@JsonProperty("learningRate") ParameterSpace<Double> learningRate,
                  @JsonProperty("learningRateSchedule") ParameterSpace<ISchedule> learningRateSchedule,
                  @JsonProperty("beta1") ParameterSpace<Double> beta1,
                  @JsonProperty("beta2") ParameterSpace<Double> beta2,
                  @JsonProperty("epsilon") ParameterSpace<Double> epsilon){
    this.learningRate = learningRate;
    this.learningRateSchedule = learningRateSchedule;
    this.beta1 = beta1;
    this.beta2 = beta2;
    this.epsilon = epsilon;
}
 
Example #13
Source File: AdaMaxSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public AdaMaxSpace(@JsonProperty("learningRate") ParameterSpace<Double> learningRate,
                   @JsonProperty("learningRateSchedule") ParameterSpace<ISchedule> learningRateSchedule,
                   @JsonProperty("beta1") ParameterSpace<Double> beta1,
                   @JsonProperty("beta2") ParameterSpace<Double> beta2,
                   @JsonProperty("epsilon") ParameterSpace<Double> epsilon){
    this.learningRate = learningRate;
    this.learningRateSchedule = learningRateSchedule;
    this.beta1 = beta1;
    this.beta2 = beta2;
    this.epsilon = epsilon;
}
 
Example #14
Source File: InverseScheduleSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, ParameterSpace> getNestedSpaces() {
    Map<String,ParameterSpace> out = new LinkedHashMap<>();
    out.put("initialValue", initialValue);
    out.put("gamma", gamma);
    out.put("power", power);
    return out;
}
 
Example #15
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDropout(){

    MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
            .addLayer(new ConvolutionLayerSpace.Builder().nOut(2)
                    .dropOut(new ContinuousParameterSpace(0.4,0.6))
                    .build())
            .addLayer(new GlobalPoolingLayerSpace.Builder().dropOut(new ContinuousParameterSpace(0.4,0.6)).build())
            .addLayer(new OutputLayerSpace.Builder().activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
            .setInputType(InputType.convolutional(28, 28, 1))
            .build();

    int nParams = mls.numParameters();
    List<ParameterSpace> l = LeafUtils.getUniqueObjects(mls.collectLeaves());
    int x=0;
    for( ParameterSpace p : l){
        int n = p.numParameters();
        int[] arr = new int[n];
        for(int i=0; i<arr.length; i++ ){
            arr[i] = x++;
        }
        p.setIndices(arr);
    }


    MultiLayerConfiguration conf = mls.getValue(new double[nParams]).getMultiLayerConfiguration();
}
 
Example #16
Source File: AdamSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
protected AdamSpace(@JsonProperty("learningRate") ParameterSpace<Double> learningRate,
                 @JsonProperty("learningRateSchedule") ParameterSpace<ISchedule> learningRateSchedule,
                 @JsonProperty("beta1") ParameterSpace<Double> beta1,
                 @JsonProperty("beta2") ParameterSpace<Double> beta2,
                 @JsonProperty("epsilon") ParameterSpace<Double> epsilon){
    this.learningRate = learningRate;
    this.learningRateSchedule = learningRateSchedule;
    this.beta1 = beta1;
    this.beta2 = beta2;
    this.epsilon = epsilon;
}
 
Example #17
Source File: ExponentialScheduleSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public ExponentialScheduleSpace(@NonNull @JsonProperty("scheduleType") ScheduleType scheduleType,
                                @NonNull @JsonProperty("initialValue") ParameterSpace<Double> initialValue,
                                @NonNull @JsonProperty("gamma") ParameterSpace<Double> gamma){
    this.scheduleType = scheduleType;
    this.initialValue = initialValue;
    this.gamma = gamma;
}
 
Example #18
Source File: ExponentialScheduleSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, ParameterSpace> getNestedSpaces() {
    Map<String,ParameterSpace> out = new LinkedHashMap<>();
    out.put("initialValue", initialValue);
    out.put("gamma", gamma);
    return out;
}
 
Example #19
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSameRanges() {

    ParameterSpace<Double> l1Hyperparam = new ContinuousParameterSpace(0.001, 0.1);
    ParameterSpace<Double> l2Hyperparam = new ContinuousParameterSpace(0.001, 0.1);

    MultiLayerSpace hyperparameterSpace =
                    new MultiLayerSpace.Builder().addLayer(new DenseLayerSpace.Builder().nIn(10).nOut(10).build())
                                    .l1(l1Hyperparam).l2(l2Hyperparam).build();

    CandidateGenerator c = new RandomSearchGenerator(hyperparameterSpace, null);

    Candidate candidate = c.getCandidate();
}
 
Example #20
Source File: ComputationGraphSpace.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public List<ParameterSpace> collectLeaves() {
    List<ParameterSpace> list = super.collectLeaves();
    for (LayerConf lc : layerSpaces) {
        list.addAll(lc.layerSpace.collectLeaves());
    }
    if (inputTypes != null)
        list.add(inputTypes);
    return list;
}
 
Example #21
Source File: SgdSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public SgdSpace(ParameterSpace<Double> learningRate) {
    this(learningRate, null);
}
 
Example #22
Source File: LossLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public Builder iLossFunction(ParameterSpace<ILossFunction> lossFunction) {
    this.lossFunction = lossFunction;
    return this;
}
 
Example #23
Source File: BaseLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public T activation(ParameterSpace<Activation> activationFunction) {
    return activationFn(new ActivationParameterSpaceAdapter(activationFunction));
}
 
Example #24
Source File: AdaMaxSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public AdaMaxSpace(ParameterSpace<Double> learningRate) {
    this(learningRate, null, null, null);
}
 
Example #25
Source File: BaseLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public T activationFn(ParameterSpace<IActivation> activationFunction) {
    this.activationFunction = activationFunction;
    return (T) this;
}
 
Example #26
Source File: AdaGradSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public static AdaGradSpace withLR(ParameterSpace<Double> lr){
    return new AdaGradSpace(lr, null);
}
 
Example #27
Source File: NesterovsSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public static NesterovsSpace withLRSchedule(ParameterSpace<ISchedule> lrSchedule, ParameterSpace<Double> momentum){
    return new NesterovsSpace(null, lrSchedule, momentum, null);
}
 
Example #28
Source File: NesterovsSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public static NesterovsSpace withLRSchedule(ParameterSpace<ISchedule> lrSchedule, double momentum){
    return new NesterovsSpace(null, lrSchedule, new FixedValue<>(momentum), null);
}
 
Example #29
Source File: NesterovsSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public static NesterovsSpace withLR(ParameterSpace<Double> lr, ParameterSpace<Double> momentum){
    return new NesterovsSpace(lr, null, momentum, null);
}
 
Example #30
Source File: NesterovsSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public static NesterovsSpace withLRSchedule(ParameterSpace<ISchedule> lrSchedule){
    return new NesterovsSpace(null, lrSchedule, null, null);
}