Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#divi()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#divi() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MinMaxStrategy.java    From nd4j with Apache License 2.0 6 votes vote down vote up
/**
 * Denormalize a data array
 *
 * @param array the data to denormalize
 * @param stats statistics of the data population
 */
@Override
public void revert(INDArray array, INDArray maskArray, MinMaxStats stats) {
    // Subtract target range minimum value
    array.subi(minRange);
    // Scale by target range
    array.divi(maxRange - minRange);

    if (array.rank() <= 2) {
        array.muliRowVector(stats.getRange());
        array.addiRowVector(stats.getLower());
    } else {
        Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(array, stats.getRange(), array, 1));
        Nd4j.getExecutioner().execAndReturn(new BroadcastAddOp(array, stats.getLower(), array, 1));
    }

    if (maskArray != null) {
        DataSetUtil.setMaskedValuesToZero(array, maskArray);
    }
}
 
Example 2
Source File: LayerHelperValidationUtil.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private static INDArray relError(@NonNull INDArray a1, @NonNull INDArray a2, double minAbsError){
    long numNaN1 = Nd4j.getExecutioner().exec(new MatchCondition(a1, Conditions.isNan(), Integer.MAX_VALUE)).getInt(0);
    long numNaN2 = Nd4j.getExecutioner().exec(new MatchCondition(a2, Conditions.isNan(), Integer.MAX_VALUE)).getInt(0);
    Preconditions.checkState(numNaN1 == 0, "Array 1 has NaNs");
    Preconditions.checkState(numNaN2 == 0, "Array 2 has NaNs");

    INDArray abs1 = Transforms.abs(a1, true);
    INDArray abs2 = Transforms.abs(a2, true);
    INDArray absDiff = Transforms.abs(a1.sub(a2), false);

    //abs(a1-a2) < minAbsError ? 1 : 0
    INDArray greaterThanMinAbs = Transforms.abs(a1.sub(a2), false);
    BooleanIndexing.replaceWhere(greaterThanMinAbs, 0.0, Conditions.lessThan(minAbsError));
    BooleanIndexing.replaceWhere(greaterThanMinAbs, 1.0, Conditions.greaterThan(0.0));

    INDArray result = absDiff.divi(abs1.add(abs2));
    //Only way to have NaNs given there weren't any in original : both 0s
    BooleanIndexing.replaceWhere(result, 0.0, Conditions.isNan());
    //Finally, set to 0 if less than min abs error, or unchanged otherwise
    result.muli(greaterThanMinAbs);

    return result;
}
 
Example 3
Source File: DataSetIteratorTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMnist() throws Exception {
    ClassPathResource cpr = new ClassPathResource("mnist_first_200.txt");
    CSVRecordReader rr = new CSVRecordReader(0, ',');
    rr.initialize(new FileSplit(cpr.getTempFileFromArchive()));
    RecordReaderDataSetIterator dsi = new RecordReaderDataSetIterator(rr, 10, 0, 10);

    MnistDataSetIterator iter = new MnistDataSetIterator(10, 200, false, true, false, 0);

    while (dsi.hasNext()) {
        DataSet dsExp = dsi.next();
        DataSet dsAct = iter.next();

        INDArray fExp = dsExp.getFeatures();
        fExp.divi(255);
        INDArray lExp = dsExp.getLabels();

        INDArray fAct = dsAct.getFeatures();
        INDArray lAct = dsAct.getLabels();

        assertEquals(fExp, fAct.castTo(fExp.dataType()));
        assertEquals(lExp, lAct.castTo(lExp.dataType()));
    }
    assertFalse(iter.hasNext());
}
 
Example 4
Source File: ImageMultiPreProcessingScaler.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void revertFeatures(INDArray[] features) {
    for( int i=0; i<featureIndices.length; i++ ){
        INDArray f = features[featureIndices[i]];
        if (minRange != 0) {
            f.subi(minRange);
        }
        if (maxRange - minRange != 1.0) {
            f.divi(maxRange - minRange);
        }
        f.muli(this.maxPixelVal);
    }
}
 
Example 5
Source File: MaxNormConstraint.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void apply(INDArray param){
    INDArray norm = param.norm2(dimensions);
    INDArray clipped = norm.unsafeDuplication();
    BooleanIndexing.replaceWhere(clipped, maxNorm, Conditions.greaterThan(maxNorm));
    norm.addi(epsilon);
    clipped.divi(norm);

    Broadcast.mul(param, clipped, param, getBroadcastDims(dimensions, param.rank()) );
}
 
Example 6
Source File: NDArrayTestsFortran.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testScalarOps() {
    INDArray n = Nd4j.create(Nd4j.ones(27).data(), new long[] {3, 3, 3});
    assertEquals(27d, n.length(), 1e-1);
    n.addi(Nd4j.scalar(1d));
    n.subi(Nd4j.scalar(1.0d));
    n.muli(Nd4j.scalar(1.0d));
    n.divi(Nd4j.scalar(1.0d));

    n = Nd4j.create(Nd4j.ones(27).data(), new long[] {3, 3, 3});
    assertEquals(27, n.sumNumber().doubleValue(), 1e-1);
    INDArray a = n.slice(2);
    assertEquals(true, Arrays.equals(new long[] {3, 3}, a.shape()));

}
 
Example 7
Source File: FastText.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray getWordVectorMatrixNormalized(String word) {
    if (modelVectorsLoaded) {
        return word2Vec.getWordVectorMatrixNormalized(word);
    }
    else {
        INDArray r = getWordVectorMatrix(word);
        return r.divi(Nd4j.getBlasWrapper().nrm2(r));
    }
}
 
Example 8
Source File: CudaScalarsTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testScalMul5() throws Exception {
    INDArray array1 = Nd4j.create(new double[] {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 191.0, 253.0, 253.0, 253.0, 60.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 190.0, 251.0, 251.0, 251.0, 230.0, 166.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 32.0, 127.0, 0.0, 190.0, 251.0, 251.0, 251.0, 253.0, 220.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 24.0, 84.0, 221.0, 229.0, 251.0, 139.0, 23.0, 31.0, 225.0, 251.0, 253.0, 248.0, 111.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 194.0, 251.0, 251.0, 251.0, 251.0, 218.0, 39.0, 0.0, 83.0, 193.0, 253.0, 251.0, 126.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 24.0, 194.0, 255.0, 253.0, 253.0, 253.0, 253.0, 255.0, 63.0, 0.0, 0.0, 100.0, 255.0, 253.0, 173.0, 12.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 16.0, 186.0, 251.0, 253.0, 251.0, 251.0, 251.0, 251.0, 221.0, 54.0, 0.0, 0.0, 0.0, 253.0, 251.0, 251.0, 149.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 16.0, 189.0, 251.0, 251.0, 253.0, 251.0, 251.0, 219.0, 126.0, 0.0, 0.0, 0.0, 0.0, 0.0, 253.0, 251.0, 251.0, 188.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0, 141.0, 251.0, 251.0, 253.0, 251.0, 251.0, 50.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 114.0, 251.0, 251.0, 244.0, 83.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 127.0, 251.0, 251.0, 253.0, 231.0, 94.0, 12.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 96.0, 251.0, 251.0, 251.0, 94.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 92.0, 253.0, 253.0, 253.0, 255.0, 63.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 96.0, 253.0, 253.0, 253.0, 95.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 12.0, 197.0, 251.0, 251.0, 251.0, 161.0, 16.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 115.0, 251.0, 251.0, 251.0, 94.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 96.0, 251.0, 251.0, 251.0, 172.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 253.0, 251.0, 251.0, 219.0, 47.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 96.0, 251.0, 251.0, 251.0, 94.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 16.0, 162.0, 253.0, 251.0, 251.0, 148.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 96.0, 251.0, 251.0, 251.0, 94.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 20.0, 158.0, 181.0, 251.0, 253.0, 251.0, 172.0, 12.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 96.0, 253.0, 253.0, 253.0, 153.0, 96.0, 96.0, 96.0, 96.0, 96.0, 155.0, 253.0, 253.0, 253.0, 253.0, 255.0, 253.0, 126.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 24.0, 185.0, 251.0, 251.0, 251.0, 253.0, 251.0, 251.0, 251.0, 251.0, 253.0, 251.0, 251.0, 251.0, 251.0, 253.0, 207.0, 31.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 16.0, 188.0, 251.0, 251.0, 253.0, 251.0, 251.0, 251.0, 251.0, 253.0, 251.0, 251.0, 251.0, 172.0, 126.0, 31.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 95.0, 188.0, 228.0, 253.0, 251.0, 251.0, 251.0, 251.0, 253.0, 243.0, 109.0, 31.0, 12.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 59.0, 95.0, 94.0, 94.0, 94.0, 193.0, 95.0, 82.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0});
    array1.divi(255f);

    System.out.println("Array1: "+ array1);
}
 
Example 9
Source File: CudaScalarsTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testScalMul4() throws Exception {
    INDArray array1 = Nd4j.zeros(1, 784);
    INDArray array2 = Nd4j.zeros(1, 784).dup('f');

    array1.divi(255f);
    array2.divi(255f);
    //System.out.println("MUL result: " + array1);
    assertEquals(0.0f, array1.getDouble(0), 0.0001f);
    assertEquals(0.0f, array1.getDouble(783), 0.0001f);

    assertEquals(array1, array2);
}
 
Example 10
Source File: CudaScalarsTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testScalMul3() throws Exception {
    INDArray array1 = Nd4j.linspace(1, 784, 784).reshape(1, 784);
    INDArray array2 = Nd4j.linspace(1, 784, 784).reshape(1, 784).dup('f');

    array1.divi(0.5f);
    array2.divi(0.5f);
    //System.out.println("MUL result: " + array1);
    assertEquals(2.0f, array1.getDouble(0), 0.0001f);
    assertEquals(1568f, array1.getDouble(783), 0.0001f);

    assertEquals(array1, array2);
}
 
Example 11
Source File: GaussianReconstructionDistribution.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray gradient(INDArray x, INDArray preOutDistributionParams) {
    INDArray output = preOutDistributionParams.dup();
    activationFn.getActivation(output, true);

    val size = output.size(1) / 2;
    INDArray mean = output.get(NDArrayIndex.all(), NDArrayIndex.interval(0, size));
    INDArray logStdevSquared = output.get(NDArrayIndex.all(), NDArrayIndex.interval(size, 2 * size));

    INDArray sigmaSquared = Transforms.exp(logStdevSquared, true).castTo(x.dataType());

    INDArray xSubMean = x.sub(mean.castTo(x.dataType()));
    INDArray xSubMeanSq = xSubMean.mul(xSubMean);

    INDArray dLdmu = xSubMean.divi(sigmaSquared);

    INDArray sigma = Transforms.sqrt(sigmaSquared, true);
    INDArray sigma3 = Transforms.pow(sigmaSquared, 3.0 / 2);

    INDArray dLdsigma = sigma.rdiv(-1).addi(xSubMeanSq.divi(sigma3));
    INDArray dLdlogSigma2 = sigma.divi(2).muli(dLdsigma);

    INDArray dLdx = Nd4j.createUninitialized(preOutDistributionParams.dataType(), output.shape());
    dLdx.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.interval(0, size)}, dLdmu);
    dLdx.put(new INDArrayIndex[] {NDArrayIndex.all(), NDArrayIndex.interval(size, 2 * size)}, dLdlogSigma2);
    dLdx.negi();

    //dL/dz
    return activationFn.backprop(preOutDistributionParams.dup(), dLdx).getFirst();
}
 
Example 12
Source File: DeepGL.java    From ml-models with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray op(INDArray neighbourhoodFeatures, INDArray nodeFeature) {
    double sigma = 16;
    final INDArray norm2 = Transforms.pow(neighbourhoodFeatures.subRowVector(nodeFeature), 2).sum(0);
    norm2.divi(-sigma * sigma);
    return Transforms.exp(norm2);
}
 
Example 13
Source File: CuDNNValidationUtil.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private static INDArray relError(@NonNull INDArray a1, @NonNull INDArray a2, double minAbsError){
        long numNaN1 = Nd4j.getExecutioner().exec(new MatchCondition(a1, Conditions.isNan(), Integer.MAX_VALUE)).getInt(0);
        long numNaN2 = Nd4j.getExecutioner().exec(new MatchCondition(a2, Conditions.isNan(), Integer.MAX_VALUE)).getInt(0);
        Preconditions.checkState(numNaN1 == 0, "Array 1 has NaNs");
        Preconditions.checkState(numNaN2 == 0, "Array 2 has NaNs");


//        INDArray isZero1 = a1.eq(0.0);
//        INDArray isZero2 = a2.eq(0.0);
//        INDArray bothZero = isZero1.muli(isZero2);

        INDArray abs1 = Transforms.abs(a1, true);
        INDArray abs2 = Transforms.abs(a2, true);
        INDArray absDiff = Transforms.abs(a1.sub(a2), false);

        //abs(a1-a2) < minAbsError ? 1 : 0
        INDArray greaterThanMinAbs = Transforms.abs(a1.sub(a2), false);
        BooleanIndexing.replaceWhere(greaterThanMinAbs, 0.0, Conditions.lessThan(minAbsError));
        BooleanIndexing.replaceWhere(greaterThanMinAbs, 1.0, Conditions.greaterThan(0.0));

        INDArray result = absDiff.divi(abs1.add(abs2));
        //Only way to have NaNs given there weren't any in original : both 0s
        BooleanIndexing.replaceWhere(result, 0.0, Conditions.isNan());
        //Finally, set to 0 if less than min abs error, or unchanged otherwise
        result.muli(greaterThanMinAbs);

//        double maxRE = result.maxNumber().doubleValue();
//        if(maxRE > t.maxRe){
//            System.out.println();
//        }
        return result;
    }
 
Example 14
Source File: CustomImagePreProcessingScaler.java    From konduit-serving with Apache License 2.0 5 votes vote down vote up
public void preProcess(INDArray features) {
    features.divi(this.maxPixelVal); //Scaled to 0->1
    if (this.maxRange - this.minRange != 1)
        features.muli(this.maxRange - this.minRange); //Scaled to minRange -> maxRange
    if (this.minRange != 0)
        features.addi(this.minRange); //Offset by minRange
}
 
Example 15
Source File: LossMSE.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    INDArray gradients = super.computeGradient(labels, preOutput, activationFn, mask);
    return gradients.divi(labels.size(1));
}
 
Example 16
Source File: LossMAE.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray computeScoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    INDArray scoreArr = super.computeScoreArray(labels, preOutput, activationFn, mask);
    scoreArr.divi(scoreArr.size(1));
    return scoreArr;
}
 
Example 17
Source File: LossMAE.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    INDArray gradients = super.computeGradient(labels, preOutput, activationFn, mask);
    gradients.divi(labels.size(1));
    return gradients;
}
 
Example 18
Source File: NDArrayColumnsMathOpTransform.java    From DataVec with Apache License 2.0 4 votes vote down vote up
@Override
protected Writable doOp(Writable... input) {
    INDArray out = ((NDArrayWritable) input[0]).get().dup();

    switch (mathOp) {
        case Add:
            for (int i = 1; i < input.length; i++) {
                out.addi(((NDArrayWritable) input[i]).get());
            }
            break;
        case Subtract:
            out.subi(((NDArrayWritable) input[1]).get());
            break;
        case Multiply:
            for (int i = 1; i < input.length; i++) {
                out.muli(((NDArrayWritable) input[i]).get());
            }
            break;
        case Divide:
            out.divi(((NDArrayWritable) input[1]).get());
            break;
        case ReverseSubtract:
            out.rsubi(((NDArrayWritable) input[1]).get());
            break;
        case ReverseDivide:
            out.rdivi(((NDArrayWritable) input[1]).get());
            break;
        case Modulus:
        case ScalarMin:
        case ScalarMax:
            throw new IllegalArgumentException(
                            "Invalid MathOp: cannot use " + mathOp + " with NDArrayColumnsMathOpTransform");
        default:
            throw new RuntimeException("Unknown MathOp: " + mathOp);
    }

    //To avoid threading issues...
    Nd4j.getExecutioner().commit();

    return new NDArrayWritable(out);
}
 
Example 19
Source File: UpdaterJavaCode.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public static void applyNadamUpdater(INDArray gradient, INDArray m, INDArray v, double learningRate, double beta1, double beta2,
                                    double epsilon, int iteration){

    INDArray oneMinusBeta1Grad = gradient.mul(1.0 - beta1);
    m.muli(beta1).addi(oneMinusBeta1Grad);

    INDArray oneMinusBeta2GradSquared = gradient.mul(gradient).muli(1.0 - beta2);
    v.muli(beta2).addi(oneMinusBeta2GradSquared);

    double beta1t = FastMath.pow(beta1, iteration + 1);

    INDArray biasCorrectedEstimateOfMomentum = m.mul(beta1).divi(1.0 - beta1t);
    INDArray secondTerm = oneMinusBeta1Grad.divi(1 - beta1t);

    INDArray alphat = biasCorrectedEstimateOfMomentum.add(secondTerm).muli(learningRate);

    INDArray sqrtV = Transforms.sqrt(v.dup('c'), false).addi(epsilon);

    gradient.assign(alphat).divi(sqrtV);
}
 
Example 20
Source File: GlobalPoolingLayer.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
private INDArray epsilonHelperFullArray(INDArray inputArray, INDArray epsilon, int[] poolDim) {

        //Broadcast: occurs on the remaining dimensions, after the pool dimensions have been removed.
        //TODO find a more efficient way to do this
        int[] broadcastDims = new int[inputArray.rank() - poolDim.length];
        int count = 0;
        for (int i = 0; i < inputArray.rank(); i++) {
            if (ArrayUtils.contains(poolDim, i))
                continue;
            broadcastDims[count++] = i;
        }

        switch (poolingType) {
            case MAX:
                INDArray isMax = Nd4j.exec(new IsMax(inputArray, inputArray.ulike(), poolDim))[0];
                return Nd4j.getExecutioner().exec(new BroadcastMulOp(isMax, epsilon, isMax, broadcastDims));
            case AVG:
                //if out = avg(in,dims) then dL/dIn = 1/N * dL/dOut
                int n = 1;
                for (int d : poolDim) {
                    n *= inputArray.size(d);
                }
                INDArray ret = inputArray.ulike();
                Nd4j.getExecutioner().exec(new BroadcastCopyOp(ret, epsilon, ret, broadcastDims));
                ret.divi(n);

                return ret;
            case SUM:
                INDArray retSum = inputArray.ulike();
                Nd4j.getExecutioner().exec(new BroadcastCopyOp(retSum, epsilon, retSum, broadcastDims));
                return retSum;
            case PNORM:
                int pnorm = layerConf().getPnorm();

                //First: do forward pass to get pNorm array
                INDArray abs = Transforms.abs(inputArray, true);
                Transforms.pow(abs, pnorm, false);

                INDArray pNorm = Transforms.pow(abs.sum(poolDim), 1.0 / pnorm);

                //dL/dIn = dL/dOut * dOut/dIn
                //dOut/dIn = in .* |in|^(p-2) /  ||in||_p^(p-1), where ||in||_p is the output p-norm

                INDArray numerator;
                if (pnorm == 2) {
                    numerator = inputArray.dup();
                } else {
                    INDArray absp2 = Transforms.pow(Transforms.abs(inputArray, true), pnorm - 2, false);
                    numerator = inputArray.mul(absp2);
                }

                INDArray denom = Transforms.pow(pNorm, pnorm - 1, false);
                denom.rdivi(epsilon);
                Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(numerator, denom, numerator, broadcastDims));

                return numerator;
            default:
                throw new RuntimeException("Unknown or not supported pooling type: " + poolingType + " " + layerId());
        }
    }