org.deeplearning4j.nn.conf.ComputationGraphConfiguration Java Examples

The following examples show how to use org.deeplearning4j.nn.conf.ComputationGraphConfiguration. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestCompGraphCNN.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
protected static ComputationGraphConfiguration getMultiInputGraphConfig() {
    ComputationGraphConfiguration conf =
                    new NeuralNetConfiguration.Builder()
                                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                                    .graphBuilder().addInputs("input")
                                    .setInputTypes(InputType.convolutional(32, 32, 3))
                                    .addLayer("cnn1",
                                                    new ConvolutionLayer.Builder(4, 4).stride(2, 2).nIn(3).nOut(3)
                                                                    .build(),
                                                    "input")
                                    .addLayer("cnn2",
                                                    new ConvolutionLayer.Builder(4, 4).stride(2, 2).nIn(3).nOut(3)
                                                                    .build(),
                                                    "input")
                                    .addLayer("max1",
                                                    new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
                                                                    .stride(1, 1).kernelSize(2, 2).build(),
                                                    "cnn1", "cnn2")
                                    .addLayer("dnn1", new DenseLayer.Builder().nOut(7).build(), "max1")
                                    .addLayer("output", new OutputLayer.Builder().nIn(7).nOut(10).activation(Activation.SOFTMAX).build(), "dnn1")
                                    .setOutputs("output").build();

    return conf;
}
 
Example #2
Source File: ActorCriticCompGraph.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public void applyGradient(Gradient[] gradient, int batchSize) {
    if (recurrent) {
        // assume batch sizes of 1 for recurrent networks,
        // since we are learning each episode as a time serie
        batchSize = 1;
    }
    ComputationGraphConfiguration cgConf = cg.getConfiguration();
    int iterationCount = cgConf.getIterationCount();
    int epochCount = cgConf.getEpochCount();
    cg.getUpdater().update(gradient[0], iterationCount, epochCount, batchSize, LayerWorkspaceMgr.noWorkspaces());
    cg.params().subi(gradient[0].gradient());
    Collection<TrainingListener> iterationListeners = cg.getListeners();
    if (iterationListeners != null && iterationListeners.size() > 0) {
        for (TrainingListener listener : iterationListeners) {
            listener.iterationDone(cg, iterationCount, epochCount);
        }
    }
    cgConf.setIterationCount(iterationCount + 1);
}
 
Example #3
Source File: TestUtils.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public static ComputationGraph testModelSerialization(ComputationGraph net){

        ComputationGraph restored;
        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            ModelSerializer.writeModel(net, baos, true);
            byte[] bytes = baos.toByteArray();

            ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
            restored = ModelSerializer.restoreComputationGraph(bais, true);

            assertEquals(net.getConfiguration(), restored.getConfiguration());
            assertEquals(net.params(), restored.params());
        } catch (IOException e){
            //Should never happen
            throw new RuntimeException(e);
        }

        //Also check the ComputationGraphConfiguration is serializable (required by Spark etc)
        ComputationGraphConfiguration conf = net.getConfiguration();
        serializeDeserializeJava(conf);

        return restored;
    }
 
Example #4
Source File: Dl4jMlpClassifier.java    From wekaDeeplearning4j with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Build the multilayer network defined by the networkconfiguration and the list of layers.
 */
protected void createModel() throws Exception {
  final INDArray features = getFirstBatchFeatures(trainData);
  ComputationGraphConfiguration.GraphBuilder gb =
      netConfig.builder().seed(getSeed()).graphBuilder();

  // Set ouput size
  final Layer lastLayer = layers[layers.length - 1];
  final int nOut = trainData.numClasses();
  if (lastLayer instanceof FeedForwardLayer) {
    ((FeedForwardLayer) lastLayer).setNOut(nOut);
  }

  if (getInstanceIterator() instanceof CnnTextEmbeddingInstanceIterator) {
    makeCnnTextLayerSetup(gb);
  } else {
    makeDefaultLayerSetup(gb);
  }

  gb.setInputTypes(InputType.inferInputType(features));
  ComputationGraphConfiguration conf =
      gb.build();
  ComputationGraph model = new ComputationGraph(conf);
  model.init();
  this.model = model;
}
 
Example #5
Source File: MiscRegressionTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testFrozen() throws Exception {
    File f = new ClassPathResource("regression_testing/misc/legacy_frozen/configuration.json").getFile();
    String json = FileUtils.readFileToString(f, StandardCharsets.UTF_8.name());
    ComputationGraphConfiguration conf = ComputationGraphConfiguration.fromJson(json);

    int countFrozen = 0;
    for(Map.Entry<String,GraphVertex> e : conf.getVertices().entrySet()){
        GraphVertex gv = e.getValue();
        assertNotNull(gv);
        if(gv instanceof LayerVertex){
            LayerVertex lv = (LayerVertex)gv;
            Layer layer = lv.getLayerConf().getLayer();
            if(layer instanceof FrozenLayer)
                countFrozen++;
        }
    }

    assertTrue(countFrozen > 0);
}
 
Example #6
Source File: TestVertxUI.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testUICompGraph() {

    StatsStorage ss = new InMemoryStatsStorage();

    UIServer uiServer = UIServer.getInstance();
    uiServer.attach(ss);

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs("in")
                    .addLayer("L0", new DenseLayer.Builder().activation(Activation.TANH).nIn(4).nOut(4).build(),
                                    "in")
                    .addLayer("L1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT)
                                    .activation(Activation.SOFTMAX).nIn(4).nOut(3).build(), "L0")
                    .setOutputs("L1").build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    net.setListeners(new StatsListener(ss), new ScoreIterationListener(1));

    DataSetIterator iter = new IrisDataSetIterator(150, 150);

    for (int i = 0; i < 100; i++) {
        net.fit(iter);
    }
}
 
Example #7
Source File: CenterLossOutputLayerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private ComputationGraph getGraph(int numLabels, double lambda) {
    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .dist(new NormalDistribution(0, 1)).updater(new NoOp())
                    .graphBuilder().addInputs("input1")
                    .addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.RELU).build(),
                                    "input1")
                    .addLayer("lossLayer", new CenterLossOutputLayer.Builder()
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).nIn(5).nOut(numLabels)
                                    .lambda(lambda).activation(Activation.SOFTMAX).build(), "l1")
                    .setOutputs("lossLayer").build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();

    return graph;
}
 
Example #8
Source File: CGVaeReconstructionErrorWithKeyFunction.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public VariationalAutoencoder getVaeLayer() {
    ComputationGraph network =
                    new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue()));
    network.init();
    INDArray val = ((INDArray) params.value()).unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcasted set parameters");
    network.setParams(val);

    Layer l = network.getLayer(0);
    if (!(l instanceof VariationalAutoencoder)) {
        throw new RuntimeException(
                        "Cannot use CGVaeReconstructionErrorWithKeyFunction on network that doesn't have a VAE "
                                        + "layer as layer 0. Layer type: " + l.getClass());
    }
    return (VariationalAutoencoder) l;
}
 
Example #9
Source File: ModelSerializerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testJavaSerde_1() throws Exception {
    int nIn = 5;
    int nOut = 6;

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).l1(0.01)
            .graphBuilder()
            .addInputs("in")
            .layer("0", new OutputLayer.Builder().nIn(nIn).nOut(nOut).build(), "in")
            .setOutputs("0")
            .validateOutputLayerConfig(false)
            .build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    DataSet dataSet = trivialDataSet();
    NormalizerStandardize norm = new NormalizerStandardize();
    norm.fit(dataSet);

    val b = SerializationUtils.serialize(net);

    ComputationGraph restored = SerializationUtils.deserialize(b);

    assertEquals(net, restored);
}
 
Example #10
Source File: ModelSerializerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testInvalidLoading1() throws Exception {
    ComputationGraphConfiguration config = new NeuralNetConfiguration.Builder()
            .graphBuilder().addInputs("in")
            .addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in")
            .addLayer("out",new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT)
                            .activation(Activation.SOFTMAX).nIn(2).nOut(3).build(),
                    "dense")
            .setOutputs("out").build();

    ComputationGraph cg = new ComputationGraph(config);
    cg.init();

    File tempFile = tempDir.newFile();

    ModelSerializer.writeModel(cg, tempFile, true);

    try {
        ModelSerializer.restoreMultiLayerNetwork(tempFile);
        fail();
    } catch (Exception e){
        String msg = e.getMessage();
        assertTrue(msg, msg.contains("JSON") && msg.contains("restoreComputationGraph"));
    }
}
 
Example #11
Source File: ComputationGraphTestRNN.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testTbpttMasking() {
    //Simple "does it throw an exception" type test...
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .graphBuilder().addInputs("in")
                    .addLayer("out", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE)
                                    .activation(Activation.IDENTITY).nIn(1).nOut(1).build(), "in")
                    .setOutputs("out").backpropType(BackpropType.TruncatedBPTT).tBPTTForwardLength(8)
                    .tBPTTBackwardLength(8).build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    MultiDataSet data = new MultiDataSet(new INDArray[] {Nd4j.linspace(1, 10, 10, Nd4j.dataType()).reshape(1, 1, 10)},
                    new INDArray[] {Nd4j.linspace(2, 20, 10, Nd4j.dataType()).reshape(1, 1, 10)}, null,
                    new INDArray[] {Nd4j.ones(1, 10)});

    net.fit(data);
}
 
Example #12
Source File: ModelSerializerTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testWriteCGModelInputStream() throws Exception {
    ComputationGraphConfiguration config = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(0.1))
                    .graphBuilder().addInputs("in")
                    .addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out",
                                    new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3)
                                            .activation(Activation.SOFTMAX).build(),
                                    "dense")
                    .setOutputs("out").build();

    ComputationGraph cg = new ComputationGraph(config);
    cg.init();

    File tempFile = tempDir.newFile();

    ModelSerializer.writeModel(cg, tempFile, true);
    FileInputStream fis = new FileInputStream(tempFile);

    ComputationGraph network = ModelSerializer.restoreComputationGraph(fis);

    assertEquals(network.getConfiguration().toJson(), cg.getConfiguration().toJson());
    assertEquals(cg.params(), network.params());
    assertEquals(cg.getUpdater().getStateViewArray(), network.getUpdater().getStateViewArray());
}
 
Example #13
Source File: TestListeners.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testListenersViaModelGraph() {
    TestListener.clearCounts();

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder()
                    .addInputs("in").addLayer("0",
                                    new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(10).nOut(10)
                                                    .activation(Activation.TANH).build(),
                                    "in")
                    .setOutputs("0").build();

    ComputationGraph model = new ComputationGraph(conf);
    model.init();

    StatsStorage ss = new InMemoryStatsStorage();
    model.setListeners(new TestListener(), new StatsListener(ss));

    testListenersForModel(model, null);

    assertEquals(1, ss.listSessionIDs().size());
    assertEquals(2, ss.listWorkerIDsForSession(ss.listSessionIDs().get(0)).size());
}
 
Example #14
Source File: TrainModule.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private TrainModuleUtils.GraphInfo getGraphInfo(Triple<MultiLayerConfiguration,
        ComputationGraphConfiguration, NeuralNetConfiguration> conf) {
    if (conf == null) {
        return null;
    }

    if (conf.getFirst() != null) {
        return TrainModuleUtils.buildGraphInfo(conf.getFirst());
    } else if (conf.getSecond() != null) {
        return TrainModuleUtils.buildGraphInfo(conf.getSecond());
    } else if (conf.getThird() != null) {
        return TrainModuleUtils.buildGraphInfo(conf.getThird());
    } else {
        return null;
    }
}
 
Example #15
Source File: GraphTestCase.java    From jstarcraft-ai with Apache License 2.0 6 votes vote down vote up
private ComputationGraph getOldFunction() {
    NeuralNetConfiguration.Builder netBuilder = new NeuralNetConfiguration.Builder();
    // 设置随机种子
    netBuilder.seed(6);
    netBuilder.setL1(l1Regularization);
    netBuilder.setL1Bias(l1Regularization);
    netBuilder.setL2(l2Regularization);
    netBuilder.setL2Bias(l2Regularization);
    netBuilder.weightInit(WeightInit.XAVIER_UNIFORM);
    netBuilder.updater(new Sgd(learnRatio)).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT);

    GraphBuilder graphBuilder = netBuilder.graphBuilder();
    graphBuilder.addInputs("leftInput", "rightInput");
    graphBuilder.addLayer("leftEmbed", new EmbeddingLayer.Builder().nIn(5).nOut(5).hasBias(true).activation(Activation.IDENTITY).build(), "leftInput");
    graphBuilder.addLayer("rightEmbed", new EmbeddingLayer.Builder().nIn(5).nOut(5).hasBias(true).activation(Activation.IDENTITY).build(), "rightInput");
    graphBuilder.addVertex("embed", new MergeVertex(), "leftEmbed", "rightEmbed");
    graphBuilder.addLayer("output", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nIn(10).nOut(1).build(), "embed");
    graphBuilder.setOutputs("output");

    ComputationGraphConfiguration configuration = graphBuilder.build();
    ComputationGraph graph = new ComputationGraph(configuration);
    graph.init();
    return graph;
}
 
Example #16
Source File: TestUtils.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public static ComputationGraph testModelSerialization(ComputationGraph net){

        ComputationGraph restored;
        try {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            ModelSerializer.writeModel(net, baos, true);
            byte[] bytes = baos.toByteArray();

            ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
            restored = ModelSerializer.restoreComputationGraph(bais, true);

            assertEquals(net.getConfiguration(), restored.getConfiguration());
            assertEquals(net.params(), restored.params());
        } catch (IOException e){
            //Should never happen
            throw new RuntimeException(e);
        }

        //Also check the ComputationGraphConfiguration is serializable (required by Spark etc)
        ComputationGraphConfiguration conf = net.getConfiguration();
        serializeDeserializeJava(conf);

        return restored;
    }
 
Example #17
Source File: CGVaeReconstructionProbWithKeyFunction.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public VariationalAutoencoder getVaeLayer() {
    ComputationGraph network =
                    new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue()));
    network.init();
    INDArray val = ((INDArray) params.value()).unsafeDuplication();
    if (val.length() != network.numParams(false))
        throw new IllegalStateException(
                        "Network did not have same number of parameters as the broadcasted set parameters");
    network.setParams(val);

    Layer l = network.getLayer(0);
    if (!(l instanceof VariationalAutoencoder)) {
        throw new RuntimeException(
                        "Cannot use CGVaeReconstructionProbWithKeyFunction on network that doesn't have a VAE "
                                        + "layer as layer 0. Layer type: " + l.getClass());
    }
    return (VariationalAutoencoder) l;
}
 
Example #18
Source File: Keras2ModelConfigurationTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private void runModelConfigTest(String path) throws Exception {
    try(InputStream is = Resources.asStream(path)) {
        ComputationGraphConfiguration config =
                new KerasModel().modelBuilder().modelJsonInputStream(is)
                        .enforceTrainingConfig(false).buildModel().getComputationGraphConfiguration();
        ComputationGraph model = new ComputationGraph(config);
        model.init();
    }
}
 
Example #19
Source File: RnnSequenceClassifier.java    From wekaDeeplearning4j with GNU General Public License v3.0 5 votes vote down vote up
@Override
protected void createModel() throws Exception {
  final INDArray features = getFirstBatchFeatures(trainData);
  log.info("Feature shape: {}", features.shape());
  ComputationGraphConfiguration.GraphBuilder gb =
      netConfig
          .builder()
          .seed(getSeed())
          .graphBuilder()
          .backpropType(BackpropType.TruncatedBPTT)
          .tBPTTBackwardLength(tBPTTbackwardLength)
          .tBPTTForwardLength(tBPTTforwardLength);

  // Set ouput size
  final Layer lastLayer = layers[layers.length - 1];
  final int nOut = trainData.numClasses();
  if (lastLayer.getBackend() instanceof RnnOutputLayer) {
    ((weka.dl4j.layers.RnnOutputLayer) lastLayer).setNOut(nOut);
  }

  String currentInput = "input";
  gb.addInputs(currentInput);
  // Collect layers
  for (Layer layer : layers) {
    String lName = layer.getLayerName();
    gb.addLayer(lName, layer.getBackend().clone(), currentInput);
    currentInput = lName;
  }
  gb.setOutputs(currentInput);
  gb.setInputTypes(InputType.inferInputType(features));

  ComputationGraphConfiguration conf = gb.build();
  ComputationGraph model = new ComputationGraph(conf);
  model.init();
  this.model = model;
}
 
Example #20
Source File: RegressionTest080.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void regressionTestCGLSTM1() throws Exception {

    File f = Resources.asFile("regression_testing/080/080_ModelSerializer_Regression_CG_LSTM_1.zip");

    ComputationGraph net = ModelSerializer.restoreComputationGraph(f, true);

    ComputationGraphConfiguration conf = net.getConfiguration();
    assertEquals(3, conf.getVertices().size());

    GravesLSTM l0 = (GravesLSTM) ((LayerVertex) conf.getVertices().get("0")).getLayerConf().getLayer();
    assertTrue(l0.getActivationFn() instanceof ActivationTanH);
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
    assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);

    GravesBidirectionalLSTM l1 =
                    (GravesBidirectionalLSTM) ((LayerVertex) conf.getVertices().get("1")).getLayerConf().getLayer();
    assertTrue(l1.getActivationFn() instanceof ActivationSoftSign);
    assertEquals(4, l1.getNIn());
    assertEquals(4, l1.getNOut());
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
    assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);

    RnnOutputLayer l2 = (RnnOutputLayer) ((LayerVertex) conf.getVertices().get("2")).getLayerConf().getLayer();
    assertEquals(4, l2.getNIn());
    assertEquals(5, l2.getNOut());
    assertTrue(l2.getActivationFn() instanceof ActivationSoftmax);
    assertTrue(l2.getLossFn() instanceof LossMCXENT);
}
 
Example #21
Source File: RegressionTest060.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void regressionTestCGLSTM1() throws Exception {

    File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_CG_LSTM_1.zip");

    ComputationGraph net = ModelSerializer.restoreComputationGraph(f, true);

    ComputationGraphConfiguration conf = net.getConfiguration();
    assertEquals(3, conf.getVertices().size());

    GravesLSTM l0 = (GravesLSTM) ((LayerVertex) conf.getVertices().get("0")).getLayerConf().getLayer();
    assertEquals("tanh", l0.getActivationFn().toString());
    assertEquals(3, l0.getNIn());
    assertEquals(4, l0.getNOut());
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization());
    assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5);

    GravesBidirectionalLSTM l1 =
                    (GravesBidirectionalLSTM) ((LayerVertex) conf.getVertices().get("1")).getLayerConf().getLayer();
    assertEquals("softsign", l1.getActivationFn().toString());
    assertEquals(4, l1.getNIn());
    assertEquals(4, l1.getNOut());
    assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization());
    assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5);

    RnnOutputLayer l2 = (RnnOutputLayer) ((LayerVertex) conf.getVertices().get("2")).getLayerConf().getLayer();
    assertEquals(4, l2.getNIn());
    assertEquals(5, l2.getNOut());
    assertEquals("softmax", l2.getActivationFn().toString());
    assertTrue(l2.getLossFn() instanceof LossMCXENT);
}
 
Example #22
Source File: TestEarlyStoppingSparkCompGraph.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBadTuning() {
    //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition

    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(2.0)) //Intentionally huge LR
                    .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
                    .addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
                                    .lossFunction(LossFunctions.LossFunction.MSE).build(), "in")
                    .setOutputs("0").build();
    ComputationGraph net = new ComputationGraph(conf);
    net.setListeners(new ScoreIterationListener(5));

    JavaRDD<DataSet> irisData = getIris();
    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5000))
                    .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(2, TimeUnit.MINUTES),
                                    new MaxScoreIterationTerminationCondition(7.5)) //Initial score is ~2.5
                    .scoreCalculator(new SparkLossCalculatorComputationGraph(
                                    irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc()))
                    .modelSaver(saver).build();

    TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);

    IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm,
                    esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
    EarlyStoppingResult result = trainer.fit();

    assertTrue(result.getTotalEpochs() < 5);
    assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition,
                    result.getTerminationReason());
    String expDetails = new MaxScoreIterationTerminationCondition(7.5).toString();
    assertEquals(expDetails, result.getTerminationDetails());
}
 
Example #23
Source File: NetBroadcastTuple.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public NetBroadcastTuple(MultiLayerConfiguration configuration, ComputationGraphConfiguration graphConfiguration,
                INDArray parameters, INDArray updaterState, AtomicInteger counter) {
    this.configuration = configuration;
    this.graphConfiguration = graphConfiguration;
    this.parameters = parameters;
    this.updaterState = updaterState;
    this.counter = counter;
}
 
Example #24
Source File: TestEarlyStoppingCompGraph.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBadTuning() {
    //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition

    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(5.0)) //Intentionally huge LR
                    .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
                    .addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
                    .setOutputs("0").build();
    ComputationGraph net = new ComputationGraph(conf);
    net.setListeners(new ScoreIterationListener(1));

    DataSetIterator irisIter = new IrisDataSetIterator(150, 150);
    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5000))
                    .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES),
                                    new MaxScoreIterationTerminationCondition(10)) //Initial score is ~2.5
                    .scoreCalculator(new DataSetLossCalculatorCG(irisIter, true)).modelSaver(saver).build();

    IEarlyStoppingTrainer trainer = new EarlyStoppingGraphTrainer(esConf, net, irisIter);
    EarlyStoppingResult result = trainer.fit();

    assertTrue(result.getTotalEpochs() < 5);
    assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition,
                    result.getTerminationReason());
    String expDetails = new MaxScoreIterationTerminationCondition(10).toString();
    assertEquals(expDetails, result.getTerminationDetails());

    assertEquals(0, result.getBestModelEpoch());
    assertNotNull(result.getBestModel());
}
 
Example #25
Source File: TestEarlyStoppingCompGraph.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testTimeTermination() {
    //test termination after max time

    Nd4j.getRandom().setSeed(12345);
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).graphBuilder()
                    .addInputs("in")
                    .addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
                            .activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
                    .setOutputs("0").build();
    ComputationGraph net = new ComputationGraph(conf);
    net.setListeners(new ScoreIterationListener(1));

    DataSetIterator irisIter = new IrisDataSetIterator(150, 150);

    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                    .epochTerminationConditions(new MaxEpochsTerminationCondition(10000))
                    .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(5, TimeUnit.SECONDS),
                                    new MaxScoreIterationTerminationCondition(50)) //Initial score is ~8
                    .scoreCalculator(new DataSetLossCalculator(irisIter, true))
                    .modelSaver(saver).build();

    IEarlyStoppingTrainer trainer = new EarlyStoppingGraphTrainer(esConf, net, irisIter);
    long startTime = System.currentTimeMillis();
    EarlyStoppingResult result = trainer.fit();
    long endTime = System.currentTimeMillis();
    int durationSeconds = (int) (endTime - startTime) / 1000;

    assertTrue(durationSeconds >= 3);
    assertTrue(durationSeconds <= 20);

    assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition,
                    result.getTerminationReason());
    String expDetails = new MaxTimeIterationTerminationCondition(5, TimeUnit.SECONDS).toString();
    assertEquals(expDetails, result.getTerminationDetails());
}
 
Example #26
Source File: TestEarlyStoppingCompGraph.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testListeners() {
    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                    .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
                    .addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
                            .activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
                    .setOutputs("0").build();
    ComputationGraph net = new ComputationGraph(conf);
    net.setListeners(new ScoreIterationListener(1));

    DataSetIterator irisIter = new IrisDataSetIterator(150, 150);
    EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
    EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
                    .epochTerminationConditions(new MaxEpochsTerminationCondition(5))
                    .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES))
                    .scoreCalculator(new DataSetLossCalculatorCG(irisIter, true)).modelSaver(saver).build();

    LoggingEarlyStoppingListener listener = new LoggingEarlyStoppingListener();

    IEarlyStoppingTrainer trainer = new EarlyStoppingGraphTrainer(esConf, net, irisIter, listener);

    trainer.fit();

    assertEquals(1, listener.onStartCallCount);
    assertEquals(5, listener.onEpochCallCount);
    assertEquals(1, listener.onCompletionCallCount);
}
 
Example #27
Source File: TestDropout.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBasicConfig(){

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .dropOut(0.6)
            .list()
            .layer(new DenseLayer.Builder().nIn(10).nOut(10).build())
            .layer(new DenseLayer.Builder().nIn(10).nOut(10).dropOut(0.7).build())
            .layer(new DenseLayer.Builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build())
            .build();

    assertEquals(new Dropout(0.6), conf.getConf(0).getLayer().getIDropout());
    assertEquals(new Dropout(0.7), conf.getConf(1).getLayer().getIDropout());
    assertEquals(new AlphaDropout(0.5), conf.getConf(2).getLayer().getIDropout());


    ComputationGraphConfiguration conf2 = new NeuralNetConfiguration.Builder()
            .dropOut(0.6)
            .graphBuilder()
            .addInputs("in")
            .addLayer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
            .addLayer("1", new DenseLayer.Builder().nIn(10).nOut(10).dropOut(0.7).build(), "0")
            .addLayer("2", new DenseLayer.Builder().nIn(10).nOut(10).dropOut(new AlphaDropout(0.5)).build(), "1")
            .setOutputs("2")
            .build();

    assertEquals(new Dropout(0.6), ((LayerVertex)conf2.getVertices().get("0")).getLayerConf().getLayer().getIDropout());
    assertEquals(new Dropout(0.7), ((LayerVertex)conf2.getVertices().get("1")).getLayerConf().getLayer().getIDropout());
    assertEquals(new AlphaDropout(0.5), ((LayerVertex)conf2.getVertices().get("2")).getLayerConf().getLayer().getIDropout());
}
 
Example #28
Source File: TestGraphNodes.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDuplicateToTimeSeriesVertex() {

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder()
                    .addInputs("in2d", "in3d")
                    .addVertex("duplicateTS", new DuplicateToTimeSeriesVertex("in3d"), "in2d")
                    .addLayer("out", new OutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "duplicateTS")
                    .addLayer("out3d", new RnnOutputLayer.Builder().nIn(1).nOut(1).activation(Activation.TANH).lossFunction(LossFunctions.LossFunction.MSE).build(), "in3d")
                    .setOutputs("out", "out3d").build();

    ComputationGraph graph = new ComputationGraph(conf);
    graph.init();

    INDArray in2d = Nd4j.rand(3, 5);
    INDArray in3d = Nd4j.rand(new int[] {3, 2, 7});

    graph.setInputs(in2d, in3d);

    INDArray expOut = Nd4j.zeros(3, 5, 7);
    for (int i = 0; i < 7; i++) {
        expOut.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(i)}, in2d);
    }

    GraphVertex gv = graph.getVertex("duplicateTS");
    gv.setInputs(in2d);
    INDArray outFwd = gv.doForward(true, LayerWorkspaceMgr.noWorkspaces());
    assertEquals(expOut, outFwd);

    INDArray expOutBackward = expOut.sum(2);
    gv.setEpsilon(expOut);
    INDArray outBwd = gv.doBackward(false, LayerWorkspaceMgr.noWorkspaces()).getSecond()[0];
    assertEquals(expOutBackward, outBwd);

    String json = conf.toJson();
    ComputationGraphConfiguration conf2 = ComputationGraphConfiguration.fromJson(json);
    assertEquals(conf, conf2);
}
 
Example #29
Source File: TestMemoryReports.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMemoryReportsVerticesCG() {
    List<Pair<? extends GraphVertex, InputType[]>> l = getTestVertices();

    for (Pair<? extends GraphVertex, InputType[]> p : l) {
        List<String> inputs = new ArrayList<>();
        for (int i = 0; i < p.getSecond().length; i++) {
            inputs.add(String.valueOf(i));
        }

        String[] layerInputs = inputs.toArray(new String[inputs.size()]);
        if (p.getFirst() instanceof DuplicateToTimeSeriesVertex) {
            layerInputs = new String[] {"1"};
        }

        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().graphBuilder().addInputs(inputs)
                        .allowDisconnected(true)
                        .addVertex("gv", p.getFirst(), layerInputs).setOutputs("gv").build();

        MemoryReport mr = conf.getMemoryReport(p.getSecond());
        //            System.out.println(mr.toString());
        //            System.out.println("\n\n");

        //Test to/from JSON + YAML
        String json = mr.toJson();
        String yaml = mr.toYaml();

        MemoryReport fromJson = MemoryReport.fromJson(json);
        MemoryReport fromYaml = MemoryReport.fromYaml(yaml);

        assertEquals(mr, fromJson);
        assertEquals(mr, fromYaml);
    }
}
 
Example #30
Source File: AutoEncoderTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void sanityCheckIssue5662(){
    int mergeSize = 50;
    int encdecSize = 25;
    int in1Size = 20;
    int in2Size = 15;
    int hiddenSize = 10;

    ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
            .weightInit(WeightInit.XAVIER)
            .graphBuilder()
            .addInputs("in1", "in2")
            .addLayer("1", new DenseLayer.Builder().nOut(mergeSize).build(), "in1")
            .addLayer("2", new DenseLayer.Builder().nOut(mergeSize).build(), "in2")
            .addVertex("merge", new MergeVertex(), "1", "2")
            .addLayer("e",new AutoEncoder.Builder().nOut(encdecSize).corruptionLevel(0.2).build(),"merge")
            .addLayer("hidden",new AutoEncoder.Builder().nOut(hiddenSize).build(),"e")
            .addLayer("decoder",new AutoEncoder.Builder().nOut(encdecSize).corruptionLevel(0.2).build(),"hidden")
            .addLayer("L4", new DenseLayer.Builder().nOut(mergeSize).build(), "decoder")
            .addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(in1Size).build(),"L4")
            .addLayer("out2",new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MSE).nOut(in2Size).build(),"L4")
            .setOutputs("out1","out2")
            .setInputTypes(InputType.feedForward(in1Size), InputType.feedForward(in2Size))

            .build();

    ComputationGraph net = new ComputationGraph(conf);
    net.init();

    MultiDataSet mds = new org.nd4j.linalg.dataset.MultiDataSet(
            new INDArray[]{Nd4j.create(1, in1Size), Nd4j.create(1, in2Size)},
            new INDArray[]{Nd4j.create(1, in1Size), Nd4j.create(1, in2Size)});

    net.summary(InputType.feedForward(in1Size), InputType.feedForward(in2Size));
    net.fit(new SingletonMultiDataSetIterator(mds));
}