Java Code Examples for org.nd4j.linalg.ops.transforms.Transforms#log()

The following examples show how to use org.nd4j.linalg.ops.transforms.Transforms#log() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LossMSLE.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if(!labels.equalShapes(preOutput)){
        Preconditions.throwEx("Labels and preOutput must have equal shapes: got shapes %s vs %s", labels.shape(), preOutput.shape());
    }
    labels = labels.castTo(preOutput.dataType());   //No-op if already correct dtype
    INDArray scoreArr;
    INDArray output = activationFn.getActivation(preOutput.dup(), true);
    scoreArr = Transforms.log(output.addi(1.0).divi(labels.add(1.0)), false);
    scoreArr = scoreArr.muli(scoreArr).divi(labels.size(1));

    //Weighted loss function
    if (weights != null) {
        if (weights.length() != output.size(1)) {
            throw new IllegalStateException("Weights vector (length " + weights.length()
                            + ") does not match output.size(1)=" + output.size(1));
        }
        scoreArr.muliRowVector(weights.castTo(scoreArr.dataType()));
    }

    if (mask != null) {
        LossUtil.applyMask(scoreArr, mask);
    }
    return scoreArr;
}
 
Example 2
Source File: LossKLD.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if(!labels.equalShapes(preOutput)){
        Preconditions.throwEx("Labels and preOutput must have equal shapes: got shapes %s vs %s", labels.shape(), preOutput.shape());
    }
    labels = labels.castTo(preOutput.dataType());   //No-op if already correct dtype
    INDArray output = activationFn.getActivation(preOutput.dup(), true);

    // Clip output and labels to be between Nd4j.EPS_THREsHOLD and 1, i.e. a valid non-zero probability
    output = Transforms.min(Transforms.max(output, Nd4j.EPS_THRESHOLD, false), 1, false);
    labels = Transforms.min(Transforms.max(labels, Nd4j.EPS_THRESHOLD, true), 1, false);

    INDArray logRatio = Transforms.log(output.rdivi(labels), false);

    INDArray scoreArr = logRatio.muli(labels);
    if (mask != null) {
        LossUtil.applyMask(scoreArr, mask);
    }
    return scoreArr;
}
 
Example 3
Source File: LossPoisson.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if(!labels.equalShapes(preOutput)){
        Preconditions.throwEx("Labels and preOutput must have equal shapes: got shapes %s vs %s", labels.shape(), preOutput.shape());
    }
    labels = labels.castTo(preOutput.dataType());   //No-op if already correct dtype
    /*
     mean of (yhat - y * log(yhat))
     */
    INDArray postOutput = activationFn.getActivation(preOutput.dup(), true);

    INDArray scoreArr = Transforms.log(postOutput);
    scoreArr.muli(labels);
    scoreArr = postOutput.sub(scoreArr);

    if (mask != null) {
        LossUtil.applyMask(scoreArr, mask);
    }
    return scoreArr;
}
 
Example 4
Source File: LossPoisson.java    From nd4j with Apache License 2.0 6 votes vote down vote up
public INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if (labels.size(1) != preOutput.size(1)) {
        throw new IllegalArgumentException(
                        "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
                                        + " number of outputs (nOut = " + preOutput.size(1) + ") ");

    }
    /*
     mean of (yhat - y * log(yhat))
     */
    //INDArray postOutput = Nd4j.utioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
    INDArray postOutput = activationFn.getActivation(preOutput.dup(), true);

    INDArray scoreArr = Transforms.log(postOutput);
    scoreArr.muli(labels);
    scoreArr = postOutput.sub(scoreArr);

    if (mask != null) {
        LossUtil.applyMask(scoreArr, mask);
    }
    return scoreArr;
}
 
Example 5
Source File: LossKLD.java    From nd4j with Apache License 2.0 6 votes vote down vote up
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if (labels.size(1) != preOutput.size(1)) {
        throw new IllegalArgumentException(
                        "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
                                        + " number of outputs (nOut = " + preOutput.size(1) + ") ");

    }
    INDArray output = activationFn.getActivation(preOutput.dup(), true);

    // Clip output and labels to be between Nd4j.EPS_THREsHOLD and 1, i.e. a valid non-zero probability
    output = Transforms.min(Transforms.max(output, Nd4j.EPS_THRESHOLD, false), 1, false);
    labels = Transforms.min(Transforms.max(labels, Nd4j.EPS_THRESHOLD, true), 1, false);

    INDArray logRatio = Transforms.log(output.rdivi(labels), false);

    INDArray scoreArr = logRatio.muli(labels);
    if (mask != null) {
        LossUtil.applyMask(scoreArr, mask);
    }
    return scoreArr;
}
 
Example 6
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testLogSumExp2() {

    for (int dim = 0; dim <= 2; dim++) {
        Nd4j.getRandom().setSeed(12345);
        INDArray inputArr = Nd4j.rand(DataType.DOUBLE, 3, 4, 5);
        SameDiff sd = SameDiff.create();
        SDVariable in = sd.var(inputArr);
        SDVariable lse = sd.math().logSumExp(in, dim);

        INDArray exp = Transforms.exp(inputArr, true);
        INDArray sum = exp.sum(dim);
        INDArray log = Transforms.log(sum);

        OpValidation.validate(new TestCase(sd)
                .expectedOutput(lse.name(), log)
                .gradientCheck(true));
    }
}
 
Example 7
Source File: ActorCriticLoss.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    INDArray output = activationFn.getActivation(preOutput.dup(), true).addi(1e-5);
    INDArray logOutput = Transforms.log(output, true);
    INDArray entropyDev = logOutput.addi(1);
    INDArray dLda = output.rdivi(labels).subi(entropyDev.muli(BETA)).negi();
    INDArray grad = activationFn.backprop(preOutput, dLda).getFirst();

    if (mask != null) {
        LossUtil.applyMask(grad, mask);
    }
    return grad;
}
 
Example 8
Source File: NDArrayTestsFortran.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLogDouble() {
    INDArray linspace = Nd4j.linspace(1, 6, 6, DataType.DOUBLE).castTo(DataType.DOUBLE);
    INDArray log = Transforms.log(linspace);
    INDArray assertion = Nd4j.create(new double[] {0, 0.6931471805599453, 1.0986122886681098, 1.3862943611198906, 1.6094379124341005, 1.791759469228055});
    assertEquals(assertion, log);
}
 
Example 9
Source File: OpExecutionerTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLog() {
    INDArray arr = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2);
    INDArray assertion = Nd4j.create(new double[][] {{0., 1.09861229}, {0.69314718, 1.38629436}});

    INDArray logTest = Transforms.log(arr);
    assertEquals(assertion, logTest);
    arr = Nd4j.linspace(1, 6, 6, DataType.DOUBLE).reshape(2, 3);
    assertion = Nd4j.create(new double[][] {{0., 1.09861229, 1.60943791}, {0.69314718, 1.38629436, 1.79175947}});

    logTest = Transforms.log(arr);
    assertEquals(assertion, logTest);
}
 
Example 10
Source File: OpExecutionerTestsC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLog() {
    INDArray log = Nd4j.linspace(1, 6, 6, DataType.DOUBLE);
    INDArray transformed = Transforms.log(log);
    INDArray assertion = Nd4j.create(new double[] {0., 0.69314718, 1.09861229, 1.38629436, 1.60943791, 1.79175947});
    assertEquals(assertion, transformed);
}
 
Example 11
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLogSumExp() {
    Nd4j.getRandom().setSeed(12345);
    INDArray inputArr = Nd4j.rand(DataType.FLOAT, 1, 4);
    SameDiff sd = SameDiff.create();
    SDVariable in = sd.var(inputArr);
    SDVariable lse = sd.math().logSumExp(in);
    INDArray out = lse.eval();

    INDArray exp = Transforms.exp(inputArr, true);
    INDArray sum = exp.sum();
    INDArray log = Transforms.log(sum);
    assertEquals(log, out);
}
 
Example 12
Source File: OpExecutionerTestsC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLog() {
    INDArray log = Nd4j.linspace(1, 6, 6);
    INDArray transformed = Transforms.log(log);
    INDArray assertion = Nd4j.create(new double[] {0., 0.69314718, 1.09861229, 1.38629436, 1.60943791, 1.79175947});
    assertEquals(assertion, transformed);
}
 
Example 13
Source File: ActorCriticLoss.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    INDArray output = activationFn.getActivation(preOutput.dup(), true).addi(1e-5);
    INDArray logOutput = Transforms.log(output, true);
    INDArray entropy = output.muli(logOutput);
    INDArray scoreArr = logOutput.muli(labels).subi(entropy.muli(BETA));

    if (mask != null) {
        LossUtil.applyMask(scoreArr, mask);
    }
    return scoreArr;
}
 
Example 14
Source File: LossMSLE.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if(!labels.equalShapes(preOutput)){
        Preconditions.throwEx("Labels and preOutput must have equal shapes: got shapes %s vs %s", labels.shape(), preOutput.shape());
    }
    labels = labels.castTo(preOutput.dataType());   //No-op if already correct dtype
    INDArray output = activationFn.getActivation(preOutput.dup(), true);

    INDArray p1 = output.add(1.0);
    INDArray dlda = p1.rdiv(2.0 / labels.size(1));
    INDArray logRatio = Transforms.log(p1.divi(labels.add(1.0)), false);
    dlda.muli(logRatio);

    if (weights != null) {
        dlda.muliRowVector(weights.castTo(dlda.dataType()));
    }

    if (mask != null && LossUtil.isPerOutputMasking(dlda, mask)) {
        //For *most* activation functions: we don't actually need to mask dL/da in addition to masking dL/dz later
        //but: some, like softmax, require both (due to dL/dz_i being a function of dL/da_j, for i != j)
        //We could add a special case for softmax (activationFn instanceof ActivationSoftmax) but that would be
        // error prone - though buy us a tiny bit of performance
        LossUtil.applyMask(dlda, mask);
    }

    //dL/dz
    INDArray gradients = activationFn.backprop(preOutput, dlda).getFirst(); //TODO activation functions with weights

    if (mask != null) {
        LossUtil.applyMask(gradients, mask);
    }

    return gradients;
}
 
Example 15
Source File: BernoulliReconstructionDistribution.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private INDArray calcLogProbArray(INDArray x, INDArray preOutDistributionParams) {
    x = x.castTo(preOutDistributionParams.dataType());
    INDArray output = preOutDistributionParams.dup();
    activationFn.getActivation(output, false);

    INDArray logOutput = Transforms.log(output, true);
    INDArray log1SubOut = Transforms.log(output.rsubi(1.0), false);

    //For numerical stability: if output = 0, then log(output) == -infinity
    //then x * log(output) = NaN, but lim(x->0, output->0)[ x * log(output) ] == 0
    // therefore: want 0*log(0) = 0, NOT 0*log(0) = NaN by default
    BooleanIndexing.replaceWhere(logOutput, 0.0, Conditions.isInfinite()); //log(out)= +/- inf -> x == 0.0 -> 0 * log(0) = 0
    BooleanIndexing.replaceWhere(log1SubOut, 0.0, Conditions.isInfinite()); //log(out)= +/- inf -> x == 0.0 -> 0 * log(0) = 0
    return logOutput.muli(x).addi(x.rsub(1.0).muli(log1SubOut));
}
 
Example 16
Source File: LogSoftMax.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public void exec(int... dimensions) {
    if (dimensions[0] != 1)
        throw new IllegalArgumentException("Only supports row wise calculations");

    Nd4j.getExecutioner().exec(new OldSoftMax(x, z));
    Transforms.log(z, false);
}
 
Example 17
Source File: NDArrayTestsFortran.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLogDouble() {
    INDArray linspace = Nd4j.linspace(1, 6, 6);
    INDArray log = Transforms.log(linspace);
    INDArray assertion = Nd4j.create(new double[] {0, 0.6931471805599453, 1.0986122886681098, 1.3862943611198906,
                    1.6094379124341005, 1.791759469228055});
    assertEquals(assertion, log);
}
 
Example 18
Source File: OpExecutionerTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLog() {
    INDArray arr = Nd4j.linspace(1, 4, 4).reshape(2, 2);
    INDArray assertion = Nd4j.create(new double[][] {{0., 1.09861229}, {0.69314718, 1.38629436}});

    INDArray logTest = Transforms.log(arr);
    assertEquals(assertion, logTest);
    arr = Nd4j.linspace(1, 6, 6).reshape(2, 3);
    assertion = Nd4j.create(new double[][] {{0., 1.09861229, 1.60943791}, {0.69314718, 1.38629436, 1.79175947}});

    logTest = Transforms.log(arr);
    assertEquals(assertion, logTest);
}
 
Example 19
Source File: LossBinaryXENT.java    From nd4j with Apache License 2.0 4 votes vote down vote up
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {

        if (labels.size(1) != preOutput.size(1)) {
            throw new IllegalArgumentException(
                            "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
                                            + " number of outputs (nOut = " + preOutput.size(1) + ") ");
        }

        INDArray scoreArr;
        if (activationFn instanceof ActivationSoftmax) {
            //Use LogSoftMax op to avoid numerical issues when calculating score
            INDArray logsoftmax = Nd4j.getExecutioner().execAndReturn(new LogSoftMax(preOutput.dup()));
            scoreArr = logsoftmax.muli(labels);

        } else {
            //INDArray output = Nd4j.getExecutioner().execAndReturn(Nd4j.getOpFactory().createTransform(activationFn, preOutput.dup()));
            INDArray output = activationFn.getActivation(preOutput.dup(), true);
            if (clipEps > 0.0) {
                CustomOp op = DynamicCustomOp.builder("clipbyvalue")
                        .addInputs(output)
                        .callInplace(true)
                        .addFloatingPointArguments(clipEps, 1.0-clipEps)
                        .build();
                Nd4j.getExecutioner().exec(op);
            }
            scoreArr = Transforms.log(output, true).muli(labels);
            INDArray secondTerm = output.rsubi(1);
            Transforms.log(secondTerm, false);
            secondTerm.muli(labels.rsub(1));
            scoreArr.addi(secondTerm);
        }

        //Weighted loss function
        if (weights != null) {
            if (weights.length() != preOutput.size(1)) {
                throw new IllegalStateException("Weights vector (length " + weights.length()
                                + ") does not match output.size(1)=" + preOutput.size(1));
            }

            scoreArr.muliRowVector(weights);
        }

        if (mask != null) {
            LossUtil.applyMask(scoreArr, mask);
        }
        return scoreArr;
    }
 
Example 20
Source File: LossBinaryXENT.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if(!labels.equalShapes(preOutput)){
        Preconditions.throwEx("Labels and preOutput must have equal shapes: got shapes %s vs %s", labels.shape(), preOutput.shape());
    }
    labels = labels.castTo(preOutput.dataType());   //No-op if already correct dtype

    INDArray scoreArr;
    if (activationFn instanceof ActivationSoftmax) {
        //TODO Post GPU support for custom ops: Use LogSoftMax op to avoid numerical issues when calculating score
        INDArray logsoftmax = Nd4j.exec((CustomOp) new SoftMax(preOutput, preOutput.ulike(), -1))[0];
        Transforms.log(logsoftmax, false);
        scoreArr = logsoftmax.muli(labels);

    } else {
        INDArray output = activationFn.getActivation(preOutput.dup(), true);
        if (clipEps > 0.0) {
            CustomOp op = DynamicCustomOp.builder("clipbyvalue")
                    .addInputs(output)
                    .callInplace(true)
                    .addFloatingPointArguments(clipEps, 1.0-clipEps)
                    .build();
            Nd4j.getExecutioner().execAndReturn(op);
        }
        scoreArr = Transforms.log(output, true).muli(labels);
        INDArray secondTerm = output.rsubi(1);
        Transforms.log(secondTerm, false);
        secondTerm.muli(labels.rsub(1));
        scoreArr.addi(secondTerm);
    }

    //Weighted loss function
    if (weights != null) {
        if (weights.length() != preOutput.size(1)) {
            throw new IllegalStateException("Weights vector (length " + weights.length()
                            + ") does not match output.size(1)=" + preOutput.size(1));
        }

        scoreArr.muliRowVector(weights.castTo(scoreArr.dataType()));
    }

    if (mask != null) {
        LossUtil.applyMask(scoreArr, mask);
    }
    return scoreArr;
}