Java Code Examples for org.nd4j.linalg.activations.Activation#RELU

The following examples show how to use org.nd4j.linalg.activations.Activation#RELU . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KerasActivationUtils.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Map Keras to DL4J activation functions.
 *
 * @param conf Keras layer configuration
 * @param kerasActivation String containing Keras activation function name
 * @return Activation enum value containing DL4J activation function name
 */
public static Activation mapToActivation(String kerasActivation, KerasLayerConfiguration conf)
        throws UnsupportedKerasConfigurationException {
    Activation dl4jActivation;
    if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SOFTMAX())) {
        dl4jActivation = Activation.SOFTMAX;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SOFTPLUS())) {
        dl4jActivation = Activation.SOFTPLUS;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SOFTSIGN())) {
        dl4jActivation = Activation.SOFTSIGN;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_RELU())) {
        dl4jActivation = Activation.RELU;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_RELU6())) {
        dl4jActivation = Activation.RELU6;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_ELU())) {
        dl4jActivation = Activation.ELU;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SELU())) {
        dl4jActivation = Activation.SELU;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_TANH())) {
        dl4jActivation = Activation.TANH;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SIGMOID())) {
        dl4jActivation = Activation.SIGMOID;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_HARD_SIGMOID())) {
        dl4jActivation = Activation.HARDSIGMOID;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_LINEAR())) {
        dl4jActivation = Activation.IDENTITY;
    } else if (kerasActivation.equals(conf.getKERAS_ACTIVATION_SWISH())) {
        dl4jActivation = Activation.SWISH;
    } else {
        throw new UnsupportedKerasConfigurationException(
                "Unknown Keras activation function " + kerasActivation);
    }
    return dl4jActivation;
}
 
Example 2
Source File: SameDiffTests.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testActivationBackprop() {

        Activation[] afns = new Activation[]{
                Activation.TANH,
                Activation.SIGMOID,
                Activation.ELU,
                Activation.SOFTPLUS,
                Activation.SOFTSIGN,
                Activation.HARDTANH,
                Activation.CUBE,            //WRONG output - see issue https://github.com/deeplearning4j/nd4j/issues/2426
                Activation.RELU,            //JVM crash
                Activation.LEAKYRELU        //JVM crash
        };

        for (Activation a : afns) {

            SameDiff sd = SameDiff.create();
            INDArray inArr = Nd4j.linspace(-3, 3, 7);
            INDArray labelArr = Nd4j.linspace(-3, 3, 7).muli(0.5);
            SDVariable in = sd.var("in", inArr.dup());

//            System.out.println("inArr: " + inArr);

            INDArray outExp;
            SDVariable out;
            switch (a) {
                case ELU:
                    out = sd.elu("out", in);
                    outExp = Transforms.elu(inArr, true);
                    break;
                case HARDTANH:
                    out = sd.hardTanh("out", in);
                    outExp = Transforms.hardTanh(inArr, true);
                    break;
                case LEAKYRELU:
                    out = sd.leakyRelu("out", in, 0.01);
                    outExp = Transforms.leakyRelu(inArr, true);
                    break;
                case RELU:
                    out = sd.relu("out", in, 0.0);
                    outExp = Transforms.relu(inArr, true);
                    break;
                case SIGMOID:
                    out = sd.sigmoid("out", in);
                    outExp = Transforms.sigmoid(inArr, true);
                    break;
                case SOFTPLUS:
                    out = sd.softplus("out", in);
                    outExp = Transforms.softPlus(inArr, true);
                    break;
                case SOFTSIGN:
                    out = sd.softsign("out", in);
                    outExp = Transforms.softsign(inArr, true);
                    break;
                case TANH:
                    out = sd.tanh("out", in);
                    outExp = Transforms.tanh(inArr, true);
                    break;
                case CUBE:
                    out = sd.cube("out", in);
                    outExp = Transforms.pow(inArr, 3, true);
                    break;
                default:
                    throw new RuntimeException(a.toString());
            }

            //Sum squared error loss:
            SDVariable label = sd.var("label", labelArr.dup());
            SDVariable diff = label.sub("diff", out);
            SDVariable sqDiff = diff.mul("sqDiff", diff);
            SDVariable totSum = sd.sum("totSum", sqDiff, Integer.MAX_VALUE);    //Loss function...

            sd.exec();
            INDArray outAct = sd.getVariable("out").getArr();
            assertEquals(a.toString(), outExp, outAct);

            // L = sum_i (label - out)^2
            //dL/dOut = 2(out - label)
            INDArray dLdOutExp = outExp.sub(labelArr).mul(2);
            INDArray dLdInExp = a.getActivationFunction().backprop(inArr.dup(), dLdOutExp.dup()).getFirst();

            sd.execBackwards();
            SameDiff gradFn = sd.getFunction("grad");
            INDArray dLdOutAct = gradFn.getVariable("out-grad").getArr();
            INDArray dLdInAct = gradFn.getVariable("in-grad").getArr();

            assertEquals(a.toString(), dLdOutExp, dLdOutAct);
            assertEquals(a.toString(), dLdInExp, dLdInAct);
        }
    }
 
Example 3
Source File: SameDiffTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testActivationBackprop() {

        Activation[] afns = new Activation[]{
                Activation.TANH,
                Activation.SIGMOID,
                Activation.ELU,
                Activation.SOFTPLUS,
                Activation.SOFTSIGN,
                Activation.HARDTANH,
                Activation.CUBE,
                //WRONG output - see issue https://github.com/deeplearning4j/nd4j/issues/2426
                Activation.RELU,            //JVM crash
                Activation.LEAKYRELU        //JVM crash
        };

        for (Activation a : afns) {

            SameDiff sd = SameDiff.create();
            INDArray inArr = Nd4j.linspace(-3, 3, 7);
            INDArray labelArr = Nd4j.linspace(-3, 3, 7).muli(0.5);
            SDVariable in = sd.var("in", inArr.dup());

//            System.out.println("inArr: " + inArr);

            INDArray outExp;
            SDVariable out;
            switch (a) {
                case ELU:
                    out = sd.nn().elu("out", in);
                    outExp = Transforms.elu(inArr, true);
                    break;
                case HARDTANH:
                    out = sd.nn().hardTanh("out", in);
                    outExp = Transforms.hardTanh(inArr, true);
                    break;
                case LEAKYRELU:
                    out = sd.nn().leakyRelu("out", in, 0.01);
                    outExp = Transforms.leakyRelu(inArr, true);
                    break;
                case RELU:
                    out = sd.nn().relu("out", in, 0.0);
                    outExp = Transforms.relu(inArr, true);
                    break;
                case SIGMOID:
                    out = sd.nn().sigmoid("out", in);
                    outExp = Transforms.sigmoid(inArr, true);
                    break;
                case SOFTPLUS:
                    out = sd.nn().softplus("out", in);
                    outExp = Transforms.softPlus(inArr, true);
                    break;
                case SOFTSIGN:
                    out = sd.nn().softsign("out", in);
                    outExp = Transforms.softsign(inArr, true);
                    break;
                case TANH:
                    out = sd.math().tanh("out", in);
                    outExp = Transforms.tanh(inArr, true);
                    break;
                case CUBE:
                    out = sd.math().cube("out", in);
                    outExp = Transforms.pow(inArr, 3, true);
                    break;
                default:
                    throw new RuntimeException(a.toString());
            }

            //Sum squared error loss:
            SDVariable label = sd.var("label", labelArr.dup());
            SDVariable diff = label.sub("diff", out);
            SDVariable sqDiff = diff.mul("sqDiff", diff);
            SDVariable totSum = sd.sum("totSum", sqDiff, Integer.MAX_VALUE);    //Loss function...

            Map<String,INDArray> m = sd.output(Collections.emptyMap(), "out");
            INDArray outAct = m.get("out");
            assertEquals(a.toString(), outExp, outAct);

            // L = sum_i (label - out)^2
            //dL/dOut = 2(out - label)
            INDArray dLdOutExp = outExp.sub(labelArr).mul(2);
            INDArray dLdInExp = a.getActivationFunction().backprop(inArr.dup(), dLdOutExp.dup()).getFirst();

            Map<String,INDArray> grads = sd.calculateGradients(null, "out", "in");
//            sd.execBackwards(Collections.emptyMap());
//            SameDiff gradFn = sd.getFunction("grad");
            INDArray dLdOutAct = grads.get("out");
            INDArray dLdInAct = grads.get("in");

            assertEquals(a.toString(), dLdOutExp, dLdOutAct);
            assertEquals(a.toString(), dLdInExp, dLdInAct);
        }
    }
 
Example 4
Source File: TestSameDiffDense.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSameDiffDenseForward() {
    for(WorkspaceMode wsm : new WorkspaceMode[]{WorkspaceMode.ENABLED, WorkspaceMode.NONE}) {
        for (int minibatch : new int[]{5, 1}) {
            int nIn = 3;
            int nOut = 4;

            Activation[] afns = new Activation[]{
                    Activation.TANH,
                    Activation.SIGMOID,
                    Activation.ELU,
                    Activation.IDENTITY,
                    Activation.SOFTPLUS,
                    Activation.SOFTSIGN,
                    Activation.CUBE,
                    Activation.HARDTANH,
                    Activation.RELU
            };

            for (Activation a : afns) {
                log.info("Starting test - " + a + ", workspace = " + wsm);
                MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                        .inferenceWorkspaceMode(wsm)
                        .trainingWorkspaceMode(wsm)
                        .list()
                        .layer(new SameDiffDense.Builder().nIn(nIn).nOut(nOut)
                                .activation(a)
                                .build())
                        .build();

                MultiLayerNetwork net = new MultiLayerNetwork(conf);
                net.init();

                assertNotNull(net.paramTable());

                MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder()
                        .list()
                        .layer(new DenseLayer.Builder().activation(a).nIn(nIn).nOut(nOut).build())
                        .build();

                MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
                net2.init();

                net.params().assign(net2.params());

                //Check params:
                assertEquals(net2.params(), net.params());
                Map<String, INDArray> params1 = net.paramTable();
                Map<String, INDArray> params2 = net2.paramTable();
                assertEquals(params2, params1);

                INDArray in = Nd4j.rand(minibatch, nIn);
                INDArray out = net.output(in);
                INDArray outExp = net2.output(in);

                assertEquals(outExp, out);

                //Also check serialization:
                MultiLayerNetwork netLoaded = TestUtils.testModelSerialization(net);
                INDArray outLoaded = netLoaded.output(in);

                assertEquals(outExp, outLoaded);

                //Sanity check on different minibatch sizes:
                INDArray newIn = Nd4j.vstack(in, in);
                INDArray outMbsd = net.output(newIn);
                INDArray outMb = net2.output(newIn);
                assertEquals(outMb, outMbsd);
            }
        }
    }
}
 
Example 5
Source File: TestSameDiffDense.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSameDiffDenseForwardMultiLayer() {
    for(WorkspaceMode wsm : new WorkspaceMode[]{WorkspaceMode.ENABLED, WorkspaceMode.NONE}) {
        for (int minibatch : new int[]{5, 1}) {
            int nIn = 3;
            int nOut = 4;

            Activation[] afns = new Activation[]{
                    Activation.TANH,
                    Activation.SIGMOID,
                    Activation.ELU,
                    Activation.IDENTITY,
                    Activation.SOFTPLUS,
                    Activation.SOFTSIGN,
                    Activation.CUBE,    //https://github.com/deeplearning4j/nd4j/issues/2426
                    Activation.HARDTANH,
                    Activation.RELU      //JVM crash
            };

            for (Activation a : afns) {
                log.info("Starting test - " + a + " - workspace=" + wsm);
                MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                        .seed(12345)
                        .list()
                        .layer(new SameDiffDense.Builder().nIn(nIn).nOut(nOut)
                                .weightInit(WeightInit.XAVIER)
                                .activation(a).build())
                        .layer(new SameDiffDense.Builder().nIn(nOut).nOut(nOut)
                                .weightInit(WeightInit.XAVIER)
                                .activation(a).build())
                        .layer(new OutputLayer.Builder().nIn(nOut).nOut(nOut)
                                .weightInit(WeightInit.XAVIER)
                                .activation(a).build())
                        .validateOutputLayerConfig(false)
                        .build();

                MultiLayerNetwork net = new MultiLayerNetwork(conf);
                net.init();

                assertNotNull(net.paramTable());

                MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder()
                        .seed(12345)
                        .weightInit(WeightInit.XAVIER)
                        .list()
                        .layer(new DenseLayer.Builder().activation(a).nIn(nIn).nOut(nOut).build())
                        .layer(new DenseLayer.Builder().activation(a).nIn(nOut).nOut(nOut).build())
                        .layer(new OutputLayer.Builder().nIn(nOut).nOut(nOut)
                                .activation(a).build())
                        .validateOutputLayerConfig(false)
                        .build();

                MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
                net2.init();

                assertEquals(net2.params(), net.params());

                //Check params:
                assertEquals(net2.params(), net.params());
                Map<String, INDArray> params1 = net.paramTable();
                Map<String, INDArray> params2 = net2.paramTable();
                assertEquals(params2, params1);

                INDArray in = Nd4j.rand(minibatch, nIn);
                INDArray out = net.output(in);
                INDArray outExp = net2.output(in);

                assertEquals(outExp, out);

                //Also check serialization:
                MultiLayerNetwork netLoaded = TestUtils.testModelSerialization(net);
                INDArray outLoaded = netLoaded.output(in);

                assertEquals(outExp, outLoaded);


                //Sanity check different minibatch sizes
                in = Nd4j.rand(2 * minibatch, nIn);
                out = net.output(in);
                outExp = net2.output(in);
                assertEquals(outExp, out);
            }
        }
    }
}
 
Example 6
Source File: TestSameDiffDense.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testSameDiffDenseBackward() {
        int nIn = 3;
        int nOut = 4;

        for (boolean workspaces : new boolean[]{false, true}) {

            for (int minibatch : new int[]{5, 1}) {

                Activation[] afns = new Activation[]{
                        Activation.TANH,
                        Activation.SIGMOID,
                        Activation.ELU,
                        Activation.IDENTITY,
                        Activation.SOFTPLUS,
                        Activation.SOFTSIGN,
                        Activation.HARDTANH,
                        Activation.CUBE,
                        Activation.RELU
                };

                for (Activation a : afns) {
                    log.info("Starting test - " + a + " - minibatch " + minibatch + ", workspaces: " + workspaces);
                    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                            .trainingWorkspaceMode(workspaces ? WorkspaceMode.ENABLED : WorkspaceMode.NONE)
                            .inferenceWorkspaceMode(workspaces ? WorkspaceMode.ENABLED : WorkspaceMode.NONE)
                            .list()
                            .layer(new SameDiffDense.Builder().nIn(nIn).nOut(nOut)
                                    .activation(a)
                                    .build())
                            .layer(new OutputLayer.Builder().nIn(nOut).nOut(nOut).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                            .build();

                    MultiLayerNetwork netSD = new MultiLayerNetwork(conf);
                    netSD.init();

                    MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder()
                            .list()
                            .layer(new DenseLayer.Builder().activation(a).nIn(nIn).nOut(nOut).build())
                            .layer(new OutputLayer.Builder().nIn(nOut).nOut(nOut).activation(Activation.SOFTMAX)
                                    .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                            .build();

                    MultiLayerNetwork netStandard = new MultiLayerNetwork(conf2);
                    netStandard.init();

                    netSD.params().assign(netStandard.params());

                    //Check params:
                    assertEquals(netStandard.params(), netSD.params());
                    assertEquals(netStandard.paramTable(), netSD.paramTable());

                    INDArray in = Nd4j.rand(minibatch, nIn);
                    INDArray l = TestUtils.randomOneHot(minibatch, nOut, 12345);
                    netSD.setInput(in);
                    netStandard.setInput(in);
                    netSD.setLabels(l);
                    netStandard.setLabels(l);

                    netSD.computeGradientAndScore();
                    netStandard.computeGradientAndScore();

                    Gradient gSD = netSD.gradient();
                    Gradient gStd = netStandard.gradient();

                    Map<String, INDArray> m1 = gSD.gradientForVariable();
                    Map<String, INDArray> m2 = gStd.gradientForVariable();

                    assertEquals(m2.keySet(), m1.keySet());

                    for (String s : m1.keySet()) {
                        INDArray i1 = m1.get(s);
                        INDArray i2 = m2.get(s);

                        assertEquals(s, i2, i1);
                    }

                    assertEquals(gStd.gradient(), gSD.gradient());

                    //Sanity check: different minibatch size
                    in = Nd4j.rand(2 * minibatch, nIn);
                    l = TestUtils.randomOneHot(2 * minibatch, nOut, 12345);
                    netSD.setInput(in);
                    netStandard.setInput(in);
                    netSD.setLabels(l);
                    netStandard.setLabels(l);

                    netSD.computeGradientAndScore();
//                    netStandard.computeGradientAndScore();
//                    assertEquals(netStandard.gradient().gradient(), netSD.gradient().gradient());

                    //Sanity check on different minibatch sizes:
                    INDArray newIn = Nd4j.vstack(in, in);
                    INDArray outMbsd = netSD.output(newIn);
                    INDArray outMb = netStandard.output(newIn);
                    assertEquals(outMb, outMbsd);
                }
            }
        }
    }
 
Example 7
Source File: MultiLayerNeuralNetConfigurationTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testInvalidOutputLayer(){
    /*
    Test case (invalid configs)
    1. nOut=1 + softmax
    2. mcxent + tanh
    3. xent + softmax
    4. xent + relu
    5. mcxent + sigmoid
     */

    LossFunctions.LossFunction[] lf = new LossFunctions.LossFunction[]{
            LossFunctions.LossFunction.MCXENT, LossFunctions.LossFunction.MCXENT, LossFunctions.LossFunction.XENT,
            LossFunctions.LossFunction.XENT, LossFunctions.LossFunction.MCXENT};
    int[] nOut = new int[]{1, 3, 3, 3, 3};
    Activation[] activations = new Activation[]{Activation.SOFTMAX, Activation.TANH, Activation.SOFTMAX, Activation.RELU, Activation.SIGMOID};
    for( int i=0; i<lf.length; i++ ){
        for(boolean lossLayer : new boolean[]{false, true}) {
            for (boolean validate : new boolean[]{true, false}) {
                String s = "nOut=" + nOut[i] + ",lossFn=" + lf[i] + ",lossLayer=" + lossLayer + ",validate=" + validate;
                if(nOut[i] == 1 && lossLayer)
                    continue;   //nOuts are not availabel in loss layer, can't expect it to detect this case
                try {
                    new NeuralNetConfiguration.Builder()
                            .list()
                            .layer(new DenseLayer.Builder().nIn(10).nOut(10).build())
                            .layer(!lossLayer ? new OutputLayer.Builder().nIn(10).nOut(nOut[i]).activation(activations[i]).lossFunction(lf[i]).build()
                                            : new LossLayer.Builder().activation(activations[i]).lossFunction(lf[i]).build())
                            .validateOutputLayerConfig(validate)
                            .build();
                    if (validate) {
                        fail("Expected exception: " + s);
                    }
                } catch (DL4JInvalidConfigException e) {
                    if (validate) {
                        assertTrue(s, e.getMessage().toLowerCase().contains("invalid output"));
                    } else {
                        fail("Validation should not be enabled");
                    }
                }
            }
        }
    }
}
 
Example 8
Source File: ComputationGraphConfigurationTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testInvalidOutputLayer(){
    /*
    Test case (invalid configs)
    1. nOut=1 + softmax
    2. mcxent + tanh
    3. xent + softmax
    4. xent + relu
    5. mcxent + sigmoid
     */

    LossFunctions.LossFunction[] lf = new LossFunctions.LossFunction[]{
            LossFunctions.LossFunction.MCXENT, LossFunctions.LossFunction.MCXENT, LossFunctions.LossFunction.XENT,
            LossFunctions.LossFunction.XENT, LossFunctions.LossFunction.MCXENT};
    int[] nOut = new int[]{1, 3, 3, 3, 3};
    Activation[] activations = new Activation[]{Activation.SOFTMAX, Activation.TANH, Activation.SOFTMAX, Activation.RELU, Activation.SIGMOID};
    for( int i=0; i<lf.length; i++ ){
        for(boolean lossLayer : new boolean[]{false, true}) {
            for (boolean validate : new boolean[]{true, false}) {
                String s = "nOut=" + nOut[i] + ",lossFn=" + lf[i] + ",lossLayer=" + lossLayer + ",validate=" + validate;
                if(nOut[i] == 1 && lossLayer)
                    continue;   //nOuts are not availabel in loss layer, can't expect it to detect this case
                try {
                    new NeuralNetConfiguration.Builder()
                            .graphBuilder()
                            .addInputs("in")
                            .layer("0", new DenseLayer.Builder().nIn(10).nOut(10).build(), "in")
                            .layer("1",
                                    !lossLayer ? new OutputLayer.Builder().nIn(10).nOut(nOut[i]).activation(activations[i]).lossFunction(lf[i]).build()
                                            : new LossLayer.Builder().activation(activations[i]).lossFunction(lf[i]).build(), "0")
                            .setOutputs("1")
                            .validateOutputLayerConfig(validate)
                            .build();
                    if (validate) {
                        fail("Expected exception: " + s);
                    }
                } catch (DL4JInvalidConfigException e) {
                    if (validate) {
                        assertTrue(s, e.getMessage().toLowerCase().contains("invalid output"));
                    } else {
                        fail("Validation should not be enabled");
                    }
                }
            }
        }
    }
}