Java Code Examples for org.deeplearning4j.nn.multilayer.MultiLayerNetwork#numParams()

The following examples show how to use org.deeplearning4j.nn.multilayer.MultiLayerNetwork#numParams() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: VaeReconstructionProbWithKeyFunction.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public VariationalAutoencoder getVaeLayer() {
    MultiLayerNetwork network =
                    new MultiLayerNetwork(MultiLayerConfiguration.fromJson((String) jsonConfig.getValue()));
    network.init();
    INDArray val = ((INDArray) params.value()).unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcast set parameters");
    network.setParameters(val);

    Layer l = network.getLayer(0);
    if (!(l instanceof VariationalAutoencoder)) {
        throw new RuntimeException(
                        "Cannot use VaeReconstructionProbWithKeyFunction on network that doesn't have a VAE "
                                        + "layer as layer 0. Layer type: " + l.getClass());
    }
    return (VariationalAutoencoder) l;
}
 
Example 2
Source File: VaeReconstructionErrorWithKeyFunction.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public VariationalAutoencoder getVaeLayer() {
    MultiLayerNetwork network =
                    new MultiLayerNetwork(MultiLayerConfiguration.fromJson((String) jsonConfig.getValue()));
    network.init();
    INDArray val = ((INDArray) params.value()).unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcast set parameters");
    network.setParameters(val);

    Layer l = network.getLayer(0);
    if (!(l instanceof VariationalAutoencoder)) {
        throw new RuntimeException(
                        "Cannot use VaeReconstructionErrorWithKeyFunction on network that doesn't have a VAE "
                                        + "layer as layer 0. Layer type: " + l.getClass());
    }
    return (VariationalAutoencoder) l;
}
 
Example 3
Source File: RegressionTest050.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void regressionTestMLP1() throws Exception {

    File f = Resources.asFile("regression_testing/050/050_ModelSerializer_Regression_MLP_1.zip");

    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);

    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());

    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertEquals("relu", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(new WeightInitXavier(), l0.getWeightInitFn());
    assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater());
    assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6);

    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertEquals("softmax", l1.getActivationFn().toString());
    assertTrue(l1.getLossFn() instanceof LossMCXENT);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(new WeightInitXavier(), l1.getWeightInitFn());
    assertEquals(new Nesterovs(0.15, 0.9), l1.getIUpdater());
    assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6);
    assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6);

    int numParams = (int)net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params());
    int updaterSize = (int) new Nesterovs().stateSize(net.numParams());
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray());
}
 
Example 4
Source File: RegressionTest050.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void regressionTestMLP2() throws Exception {

    File f = Resources.asFile("regression_testing/050/050_ModelSerializer_Regression_MLP_2.zip");

    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);

    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());

    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
    assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
    assertEquals(new Dropout(0.6), l0.getIDropout());
    assertEquals(0.1, TestUtils.getL1(l0), 1e-6);
    assertEquals(new WeightDecay(0.2, false), TestUtils.getWeightDecayReg(l0));

    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertEquals("identity", l1.getActivationFn().toString());
    assertTrue(l1.getLossFn() instanceof LossMSE);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l1.getIUpdater());
    assertEquals(0.15, ((RmsProp)l1.getIUpdater()).getLearningRate(), 1e-6);
    assertEquals(new Dropout(0.6), l1.getIDropout());
    assertEquals(0.1, TestUtils.getL1(l1), 1e-6);
    assertEquals(new WeightDecay(0.2, false), TestUtils.getWeightDecayReg(l1));

    int numParams = (int)net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params());
    int updaterSize = (int) new RmsProp().stateSize(numParams);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray());
}
 
Example 5
Source File: RegressionTest080.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void regressionTestMLP1() throws Exception {

    File f = Resources.asFile("regression_testing/080/080_ModelSerializer_Regression_MLP_1.zip");

    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);

    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());

    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertTrue(l0.getActivationFn() instanceof ActivationReLU);
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(new WeightInitXavier(), l0.getWeightInitFn());
    assertTrue(l0.getIUpdater() instanceof Nesterovs);
    Nesterovs n = (Nesterovs) l0.getIUpdater();
    assertEquals(0.9, n.getMomentum(), 1e-6);
    assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6);
    assertEquals(0.15, n.getLearningRate(), 1e-6);


    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertTrue(l1.getActivationFn() instanceof ActivationSoftmax);
    assertTrue(l1.getLossFn() instanceof LossMCXENT);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(new WeightInitXavier(), l1.getWeightInitFn());
    assertTrue(l1.getIUpdater() instanceof Nesterovs);
    assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6);
    assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6);
    assertEquals(0.15, n.getLearningRate(), 1e-6);

    int numParams = (int)net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params());
    int updaterSize = (int) new Nesterovs().stateSize(numParams);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray());
}
 
Example 6
Source File: RegressionTest071.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void regressionTestMLP1() throws Exception {

    File f = Resources.asFile("regression_testing/071/071_ModelSerializer_Regression_MLP_1.zip");

    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);

    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());

    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertEquals("relu", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(new WeightInitXavier(), l0.getWeightInitFn());
    assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater());
    assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6);

    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertEquals("softmax", l1.getActivationFn().toString());
    assertTrue(l1.getLossFn() instanceof LossMCXENT);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(new WeightInitXavier(), l1.getWeightInitFn());
    assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6);
    assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6);
    assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6);

    long numParams = (int)net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams).reshape(1,numParams), net.params());
    int updaterSize = (int) new Nesterovs().stateSize(numParams);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize).reshape(1,numParams), net.getUpdater().getStateViewArray());
}
 
Example 7
Source File: RegressionTest060.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void regressionTestMLP1() throws Exception {

    File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_MLP_1.zip");

    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);

    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());

    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertEquals("relu", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(new WeightInitXavier(), l0.getWeightInitFn());
    assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater());
    assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6);

    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertEquals("softmax", l1.getActivationFn().toString());
    assertTrue(l1.getLossFn() instanceof LossMCXENT);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(new WeightInitXavier(), l1.getWeightInitFn());
    assertEquals(new Nesterovs(0.15, 0.9), l1.getIUpdater());
    assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6);
    assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6);

    int numParams = (int)net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params());
    int updaterSize = (int) new Nesterovs().stateSize(numParams);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray());
}
 
Example 8
Source File: ScoreFlatMapFunction.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public Iterator<Tuple2<Integer, Double>> call(Iterator<DataSet> dataSetIterator) throws Exception {
    if (!dataSetIterator.hasNext()) {
        return Collections.singletonList(new Tuple2<>(0, 0.0)).iterator();
    }

    DataSetIterator iter = new IteratorDataSetIterator(dataSetIterator, minibatchSize); //Does batching where appropriate

    MultiLayerNetwork network = new MultiLayerNetwork(MultiLayerConfiguration.fromJson(json));
    network.init();
    INDArray val = params.value().unsafeDuplication(); //.value() object will be shared by all executors on each machine -> OK, as params are not modified by score function
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcast set parameters");
    network.setParameters(val);

    List<Tuple2<Integer, Double>> out = new ArrayList<>();
    while (iter.hasNext()) {
        DataSet ds = iter.next();
        double score = network.score(ds, false);

        val numExamples = (int) ds.getFeatures().size(0);
        out.add(new Tuple2<>(numExamples, score * numExamples));
    }

    Nd4j.getExecutioner().commit();

    return out.iterator();
}
 
Example 9
Source File: RegressionTest050.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void regressionTestCNN1() throws Exception {

    File f = Resources.asFile("regression_testing/050/050_ModelSerializer_Regression_CNN_1.zip");

    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);

    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(3, conf.getConfs().size());

    ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer();
    assertEquals("tanh", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(3, l0.getNOut());
    assertEquals(new WeightInitRelu(), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
    assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
    assertArrayEquals(new int[] {2, 2}, l0.getKernelSize());
    assertArrayEquals(new int[] {1, 1}, l0.getStride());
    assertArrayEquals(new int[] {0, 0}, l0.getPadding());
    assertEquals(ConvolutionMode.Truncate, l0.getConvolutionMode()); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set

    SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer();
    assertArrayEquals(new int[] {2, 2}, l1.getKernelSize());
    assertArrayEquals(new int[] {1, 1}, l1.getStride());
    assertArrayEquals(new int[] {0, 0}, l1.getPadding());
    assertEquals(PoolingType.MAX, l1.getPoolingType());
    assertEquals(ConvolutionMode.Truncate, l1.getConvolutionMode()); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set

    OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer();
    assertEquals("sigmoid", l2.getActivationFn().toString());
    assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood);
    assertEquals(26 * 26 * 3, l2.getNIn());
    assertEquals(5, l2.getNOut());
    assertEquals(new WeightInitRelu(), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
    assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);

    int numParams = (int)net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params());
    int updaterSize = (int) new RmsProp().stateSize(numParams);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray());
}
 
Example 10
Source File: RegressionTest071.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void regressionTestMLP2() throws Exception {

    File f = Resources.asFile("regression_testing/071/071_ModelSerializer_Regression_MLP_2.zip");

    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);

    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());

    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
    assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
    assertEquals(new Dropout(0.6), l0.getIDropout());
    assertEquals(0.1, TestUtils.getL1(l0), 1e-6);
    assertEquals(new WeightDecay(0.2,false), TestUtils.getWeightDecayReg(l0));
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
    assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);

    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertTrue(l1.getActivationFn() instanceof ActivationIdentity);
    assertTrue(l1.getLossFn() instanceof LossMSE);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l1.getIUpdater());
    assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
    assertEquals(new Dropout(0.6), l1.getIDropout());
    assertEquals(0.1, TestUtils.getL1(l1), 1e-6);
    assertEquals(new WeightDecay(0.2,false), TestUtils.getWeightDecayReg(l1));
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
    assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);

    long numParams = net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams).reshape(1,numParams), net.params());
    int updaterSize = (int) new RmsProp().stateSize(numParams);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize).reshape(1,numParams), net.getUpdater().getStateViewArray());
}
 
Example 11
Source File: RegressionTest071.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void regressionTestCNN1() throws Exception {

    File f = Resources.asFile("regression_testing/071/071_ModelSerializer_Regression_CNN_1.zip");

    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);

    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(3, conf.getConfs().size());

    ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer();
    assertEquals("tanh", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(3, l0.getNOut());
    assertEquals(new WeightInitRelu(), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
    assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
    assertArrayEquals(new int[] {2, 2}, l0.getKernelSize());
    assertArrayEquals(new int[] {1, 1}, l0.getStride());
    assertArrayEquals(new int[] {0, 0}, l0.getPadding());
    assertEquals(ConvolutionMode.Same, l0.getConvolutionMode());

    SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer();
    assertArrayEquals(new int[] {2, 2}, l1.getKernelSize());
    assertArrayEquals(new int[] {1, 1}, l1.getStride());
    assertArrayEquals(new int[] {0, 0}, l1.getPadding());
    assertEquals(PoolingType.MAX, l1.getPoolingType());
    assertEquals(l1.getConvolutionMode(), ConvolutionMode.Same);

    OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer();
    assertEquals("sigmoid", l2.getActivationFn().toString());
    assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood); //TODO
    assertEquals(26 * 26 * 3, l2.getNIn());
    assertEquals(5, l2.getNOut());
    assertEquals(new WeightInitRelu(), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
    assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);

    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);

    long numParams = net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams).reshape(1,numParams), net.params());
    int updaterSize = (int) new RmsProp().stateSize(numParams);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize).reshape(1,numParams), net.getUpdater().getStateViewArray());
}
 
Example 12
Source File: RegressionTest060.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void regressionTestMLP2() throws Exception {

    File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_MLP_2.zip");

    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);

    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(2, conf.getConfs().size());

    DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer();
    assertTrue(l0.getActivationFn() instanceof ActivationLReLU);
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
    assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
    assertEquals(new Dropout(0.6), l0.getIDropout());
    assertEquals(0.1, TestUtils.getL1(l0), 1e-6);
    assertEquals(new WeightDecay(0.2, false), TestUtils.getWeightDecayReg(l0));
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
    assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);

    OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer();
    assertEquals("identity", l1.getActivationFn().toString());
    assertTrue(l1.getLossFn() instanceof LossMSE);
    assertEquals(4, l1.getNIn());
    assertEquals(5, l1.getNOut());
    assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l1.getIUpdater());
    assertEquals(0.15, ((RmsProp)l1.getIUpdater()).getLearningRate(), 1e-6);
    assertEquals(new Dropout(0.6), l1.getIDropout());
    assertEquals(0.1, TestUtils.getL1(l1), 1e-6);
    assertEquals(new WeightDecay(0.2,false), TestUtils.getWeightDecayReg(l1));
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
    assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);

    int numParams = (int)net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params());
    int updaterSize = (int) new RmsProp().stateSize(numParams);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray());
}
 
Example 13
Source File: RegressionTest060.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void regressionTestCNN1() throws Exception {

    File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_CNN_1.zip");

    MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true);

    MultiLayerConfiguration conf = net.getLayerWiseConfigurations();
    assertEquals(3, conf.getConfs().size());

    ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer();
    assertEquals("tanh", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(3, l0.getNOut());
    assertEquals(new WeightInitRelu(), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
    assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);
    assertArrayEquals(new int[] {2, 2}, l0.getKernelSize());
    assertArrayEquals(new int[] {1, 1}, l0.getStride());
    assertArrayEquals(new int[] {0, 0}, l0.getPadding());
    assertEquals(ConvolutionMode.Truncate, l0.getConvolutionMode()); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set

    SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer();
    assertArrayEquals(new int[] {2, 2}, l1.getKernelSize());
    assertArrayEquals(new int[] {1, 1}, l1.getStride());
    assertArrayEquals(new int[] {0, 0}, l1.getPadding());
    assertEquals(PoolingType.MAX, l1.getPoolingType());
    assertEquals(ConvolutionMode.Truncate, l1.getConvolutionMode()); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set

    OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer();
    assertEquals("sigmoid", l2.getActivationFn().toString());
    assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood); //TODO
    assertEquals(26 * 26 * 3, l2.getNIn());
    assertEquals(5, l2.getNOut());
    assertEquals(new WeightInitRelu(), l0.getWeightInitFn());
    assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater());
    assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6);

    assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor);

    int numParams = (int)net.numParams();
    assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params());
    int updaterSize = (int) new RmsProp().stateSize(numParams);
    assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray());
}
 
Example 14
Source File: ScoreExamplesWithKeyFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public Iterator<Tuple2<K, Double>> call(Iterator<Tuple2<K, DataSet>> iterator) throws Exception {
    if (!iterator.hasNext()) {
        return Collections.emptyIterator();
    }

    MultiLayerNetwork network = new MultiLayerNetwork(MultiLayerConfiguration.fromJson(jsonConfig.getValue()));
    network.init();
    INDArray val = params.value().unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcast set parameters");
    network.setParameters(val);

    List<Tuple2<K, Double>> ret = new ArrayList<>();

    List<DataSet> collect = new ArrayList<>(batchSize);
    List<K> collectKey = new ArrayList<>(batchSize);
    int totalCount = 0;
    while (iterator.hasNext()) {
        collect.clear();
        collectKey.clear();
        int nExamples = 0;
        while (iterator.hasNext() && nExamples < batchSize) {
            Tuple2<K, DataSet> t2 = iterator.next();
            DataSet ds = t2._2();
            int n = ds.numExamples();
            if (n != 1)
                throw new IllegalStateException("Cannot score examples with one key per data set if "
                                + "data set contains more than 1 example (numExamples: " + n + ")");
            collect.add(ds);
            collectKey.add(t2._1());
            nExamples += n;
        }
        totalCount += nExamples;

        DataSet data = DataSet.merge(collect);


        INDArray scores = network.scoreExamples(data, addRegularization);
        double[] doubleScores = scores.data().asDouble();

        for (int i = 0; i < doubleScores.length; i++) {
            ret.add(new Tuple2<>(collectKey.get(i), doubleScores[i]));
        }
    }

    Nd4j.getExecutioner().commit();

    if (log.isDebugEnabled()) {
        log.debug("Scored {} examples ", totalCount);
    }

    return ret.iterator();
}
 
Example 15
Source File: ScoreExamplesFunction.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public Iterator<Double> call(Iterator<DataSet> iterator) throws Exception {
    if (!iterator.hasNext()) {
        return Collections.emptyIterator();
    }

    MultiLayerNetwork network = new MultiLayerNetwork(MultiLayerConfiguration.fromJson(jsonConfig.getValue()));
    network.init();
    INDArray val = params.value().unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcast set parameters");
    network.setParameters(val);

    List<Double> ret = new ArrayList<>();

    List<DataSet> collect = new ArrayList<>(batchSize);
    int totalCount = 0;
    while (iterator.hasNext()) {
        collect.clear();
        int nExamples = 0;
        while (iterator.hasNext() && nExamples < batchSize) {
            DataSet ds = iterator.next();
            int n = ds.numExamples();
            collect.add(ds);
            nExamples += n;
        }
        totalCount += nExamples;

        DataSet data = DataSet.merge(collect);


        INDArray scores = network.scoreExamples(data, addRegularization);
        double[] doubleScores = scores.data().asDouble();

        for (double doubleScore : doubleScores) {
            ret.add(doubleScore);
        }
    }

    Nd4j.getExecutioner().commit();

    if (log.isDebugEnabled()) {
        log.debug("Scored {} examples ", totalCount);
    }

    return ret.iterator();
}
 
Example 16
Source File: ArbiterStatusListener.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public void onCandidateIteration(CandidateInfo candidateInfo, Object candidate, int iteration) {
    double score;
    long numParams;
    int numLayers;
    String modelConfigJson;
    int totalNumUpdates;
    if(candidate instanceof MultiLayerNetwork){
        MultiLayerNetwork m = (MultiLayerNetwork)candidate;
        score = m.score();
        numParams = m.numParams();
        numLayers = m.getnLayers();
        modelConfigJson = m.getLayerWiseConfigurations().toJson();
        totalNumUpdates = m.getLayerWiseConfigurations().getIterationCount();
    } else if(candidate instanceof ComputationGraph) {
        ComputationGraph cg = (ComputationGraph)candidate;
        score = cg.score();
        numParams = cg.numParams();
        numLayers = cg.getNumLayers();
        modelConfigJson = cg.getConfiguration().toJson();
        totalNumUpdates = cg.getConfiguration().getIterationCount();
    } else {
        score = 0;
        numParams = 0;
        numLayers = 0;
        totalNumUpdates = 0;
        modelConfigJson = "";
    }

    int idx = candidateInfo.getIndex();

    Pair<IntArrayList, FloatArrayList> pair = candidateScoreVsIter.computeIfAbsent(idx, k -> new Pair<>(new IntArrayList(), new FloatArrayList()));

    IntArrayList iter = pair.getFirst();
    FloatArrayList scores = pair.getSecond();

    //Do we need subsampling to avoid having too many data points?
    int subsamplingFreq = candidateScoreVsIterSubsampleFreq.computeIfAbsent(idx, k -> 1);
    if(iteration / subsamplingFreq > MAX_SCORE_VS_ITER_PTS){
        //Double subsampling frequency and re-parse data
        subsamplingFreq *= 2;
        candidateScoreVsIterSubsampleFreq.put(idx, subsamplingFreq);

        IntArrayList newIter = new IntArrayList();
        FloatArrayList newScores = new FloatArrayList();
        for( int i=0; i<iter.size(); i++ ){
            int it = iter.get(i);
            if(it % subsamplingFreq == 0){
                newIter.add(it);
                newScores.add(scores.get(i));
            }
        }

        iter = newIter;
        scores = newScores;
        candidateScoreVsIter.put(idx, new Pair<>(iter, scores));
    }

    if(iteration % subsamplingFreq == 0) {
        iter.add(iteration);
        scores.add((float) score);
    }


    int[] iters = iter.toIntArray();
    float[] fScores = new float[iters.length];
    for( int i=0; i<iters.length; i++ ){
        fScores[i] = scores.get(i);
    }

    ModelInfoPersistable p = new ModelInfoPersistable.Builder()
            .timestamp(candidateInfo.getCreatedTime())
            .sessionId(sessionId)
            .workerId(String.valueOf(candidateInfo.getIndex()))
            .modelIdx(candidateInfo.getIndex())
            .score(candidateInfo.getScore())
            .status(candidateInfo.getCandidateStatus())
            .scoreVsIter(iters, fScores)
            .lastUpdateTime(System.currentTimeMillis())
            .numParameters(numParams)
            .numLayers(numLayers)
            .totalNumUpdates(totalNumUpdates)
            .paramSpaceValues(candidateInfo.getFlatParams())
            .modelConfigJson(modelConfigJson)
            .exceptionStackTrace(candidateInfo.getExceptionStackTrace())
            .build();


    lastModelInfoPersistable.put(candidateInfo.getIndex(), p);
    statsStorage.putUpdate(p);
}