org.nd4j.linalg.indexing.conditions.Conditions Java Examples

The following examples show how to use org.nd4j.linalg.indexing.conditions.Conditions. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BernoulliReconstructionDistribution.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray gradient(INDArray x, INDArray preOutDistributionParams) {
    INDArray output = preOutDistributionParams.dup();
    activationFn.getActivation(output, true);
    x = x.castTo(preOutDistributionParams.dataType());

    INDArray diff = x.sub(output);
    INDArray outOneMinusOut = output.rsub(1.0).muli(output);

    INDArray grad = diff.divi(outOneMinusOut);
    grad = activationFn.backprop(preOutDistributionParams.dup(), grad).getFirst();

    //Issue: if output == 0 or output == 1, then (assuming sigmoid output or similar)
    //sigmaPrime == 0, sigmaPrime * (x-out) / (out*(1-out)) == 0 * (x-out) / 0 -> 0/0 -> NaN. But taking limit, we want
    //0*(x-out)/0 == 0 -> implies 0 gradient at the far extremes (0 or 1) of the output
    BooleanIndexing.replaceWhere(grad, 0.0, Conditions.isNan());
    return grad.negi();
}
 
Example #2
Source File: RandomTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBinomialDistribution1() {
    Random random1 = Nd4j.getRandomFactory().getNewRandomInstance(119);
    Random random2 = Nd4j.getRandomFactory().getNewRandomInstance(119);

    INDArray z1 = Nd4j.zeros(1000);
    INDArray z2 = Nd4j.zeros(1000);
    INDArray z1Dup = Nd4j.zeros(1000);

    BinomialDistribution op1 = new BinomialDistribution(z1, 5, 0.25);
    BinomialDistribution op2 = new BinomialDistribution(z2, 5, 0.25);

    Nd4j.getExecutioner().exec(op1, random1);
    Nd4j.getExecutioner().exec(op2, random2);

    assertNotEquals(z1Dup, z1);

    assertEquals(z1, z2);

    BooleanIndexing.and(z1, Conditions.lessThanOrEqual(5.0));
    BooleanIndexing.and(z1, Conditions.greaterThanOrEqual(0.0));
}
 
Example #3
Source File: ActivationRReLU.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray getActivation(INDArray in, boolean training) {
    if (training) {
        try(MemoryWorkspace ws = Nd4j.getWorkspaceManager().scopeOutOfWorkspaces()) {
            this.alpha = Nd4j.rand(in.shape(), l, u, Nd4j.getRandom());
        }
        INDArray inTimesAlpha = in.mul(alpha);
        BooleanIndexing.replaceWhere(in, inTimesAlpha, Conditions.lessThan(0));
    } else {
        this.alpha = null;
        double a = 0.5 * (l + u);
        return Nd4j.getExecutioner().execAndReturn(new RectifedLinear(in, a));
    }

    return in;
}
 
Example #4
Source File: RandomTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBinomialDistribution2() throws Exception {
    Random random1 = Nd4j.getRandomFactory().getNewRandomInstance(119);
    Random random2 = Nd4j.getRandomFactory().getNewRandomInstance(119);

    INDArray z1 = Nd4j.zeros(1000);
    INDArray z2 = Nd4j.zeros(1000);
    INDArray z1Dup = Nd4j.zeros(1000);

    INDArray probs = Nd4j.create(new float[] {0.25f, 0.43f, 0.55f, 0.43f, 0.25f});

    BinomialDistribution op1 = new BinomialDistribution(z1, 5, probs);
    BinomialDistribution op2 = new BinomialDistribution(z2, 5, probs);

    Nd4j.getExecutioner().exec(op1, random1);
    Nd4j.getExecutioner().exec(op2, random2);

    assertNotEquals(z1Dup, z1);

    assertEquals(z1, z2);

    BooleanIndexing.and(z1, Conditions.lessThanOrEqual(5.0));
    BooleanIndexing.and(z1, Conditions.greaterThanOrEqual(0.0));
}
 
Example #5
Source File: RandomTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBinomialDistribution1() throws Exception {
    Random random1 = Nd4j.getRandomFactory().getNewRandomInstance(119);
    Random random2 = Nd4j.getRandomFactory().getNewRandomInstance(119);

    INDArray z1 = Nd4j.zeros(1000);
    INDArray z2 = Nd4j.zeros(1000);
    INDArray z1Dup = Nd4j.zeros(1000);

    BinomialDistribution op1 = new BinomialDistribution(z1, 5, 0.25);
    BinomialDistribution op2 = new BinomialDistribution(z2, 5, 0.25);

    Nd4j.getExecutioner().exec(op1, random1);
    Nd4j.getExecutioner().exec(op2, random2);

    assertNotEquals(z1Dup, z1);

    assertEquals(z1, z2);

    BooleanIndexing.and(z1, Conditions.lessThanOrEqual(5.0));
    BooleanIndexing.and(z1, Conditions.greaterThanOrEqual(0.0));
}
 
Example #6
Source File: NDBaseTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testMatchConditionCount() {
    NDBase base = new NDBase();
    INDArray x = Nd4j.createFromArray(1.0, 1.0, 1.0, 0.0, 1.0, 1.0);
    INDArray y = base.matchConditionCount(x, Conditions.epsEquals(0.0));
    assertEquals(Nd4j.scalar(1L), y);

    x = Nd4j.eye(3);
    y = base.matchConditionCount(x, Conditions.epsEquals(1.0), true, 1);
    INDArray y_exp = Nd4j.createFromArray(new Long[][]{{1L}, {1L}, {1L}});
    assertEquals(y_exp, y);

    y = base.matchConditionCount(x, Conditions.epsEquals(1.0), true, 0);
    y_exp = Nd4j.createFromArray(new Long[][]{{1L, 1L, 1L}});
    assertEquals(y_exp, y);

    y = base.matchConditionCount(x, Conditions.epsEquals(1.0), false, 1);
    y_exp = Nd4j.createFromArray(1L, 1L, 1L);
    assertEquals(y_exp, y);

    y = base.matchConditionCount(x, Conditions.epsEquals(1.0), false, 0);
    assertEquals(y_exp, y);
}
 
Example #7
Source File: RandomProjectionLSH.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray bucket(INDArray query) {
    INDArray queryRes = rawBucketOf(query);

    if(numTables > 1) {
        INDArray entropyQueries = entropy(query);

        // loop, addi + conditionalreplace -> poor man's OR function
        for (int i = 0; i < numTables; i++) {
            INDArray row = entropyQueries.getRow(i, true);
            queryRes.addi(rawBucketOf(row));
        }
        BooleanIndexing.replaceWhere(queryRes, 1.0, Conditions.greaterThan(0.0));
    }

    return queryRes;
}
 
Example #8
Source File: RandomTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBinomialDistribution2() {
    Random random1 = Nd4j.getRandomFactory().getNewRandomInstance(119);
    Random random2 = Nd4j.getRandomFactory().getNewRandomInstance(119);

    INDArray z1 = Nd4j.create(DataType.FLOAT, 1000);
    INDArray z2 = Nd4j.zeros(DataType.FLOAT,1000);
    INDArray z1Dup = Nd4j.zeros(DataType.FLOAT,1000);

    INDArray probs = Nd4j.create(new float[] {0.25f, 0.43f, 0.55f, 0.43f, 0.25f});

    BinomialDistribution op1 = new BinomialDistribution(z1, 5, probs);
    BinomialDistribution op2 = new BinomialDistribution(z2, 5, probs);

    Nd4j.getExecutioner().exec(op1, random1);
    Nd4j.getExecutioner().exec(op2, random2);

    assertNotEquals(z1Dup, z1);

    assertEquals(z1, z2);

    BooleanIndexing.and(z1, Conditions.lessThanOrEqual(5.0));
    BooleanIndexing.and(z1, Conditions.greaterThanOrEqual(0.0));
}
 
Example #9
Source File: OpExecutionerUtil.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public static void checkForInf(INDArray z) {
    if (!OpProfiler.getInstance().getConfig().isCheckForINF())
        return;

    if(z.isEmpty() || !z.dataType().isFPType())
        return;

    int match = 0;
    if (!z.isScalar()) {
        MatchCondition condition = new MatchCondition(z, Conditions.isInfinite());
        match = Nd4j.getExecutioner().exec(condition).getInt(0);
    } else {
        if (z.data().dataType() == DataType.DOUBLE) {
            if (Double.isInfinite(z.getDouble(0)))
                match = 1;
        } else {
            if (Float.isInfinite(z.getFloat(0)))
                match = 1;
        }
    }

    if (match > 0)
        throw new ND4JOpProfilerException("P.A.N.I.C.! Op.Z() contains " + match + " Inf value(s)");

}
 
Example #10
Source File: SporadicTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
/**
 * This is special test that checks for memory alignment
 * @throws Exception
 */
@Test
@Ignore
public void testDTypeSpam() throws Exception {
    Random rnd = new Random();
    for(int i = 0; i < 100; i++) {
        DataTypeUtil.setDTypeForContext(DataBuffer.Type.FLOAT);
        float rand[] = new float[rnd.nextInt(10) + 1];
        for (int x = 0; x < rand.length; x++) {
            rand[x] = rnd.nextFloat();
        }
        Nd4j.getConstantHandler().getConstantBuffer(rand);

        int shape[] = new int[rnd.nextInt(3)+2];
        for (int x = 0; x < shape.length; x++) {
            shape[x] = rnd.nextInt(100) + 2;
        }

        DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE);
        INDArray array = Nd4j.rand(shape);
        BooleanIndexing.applyWhere(array, Conditions.lessThan(rnd.nextDouble()), rnd.nextDouble());
    }
}
 
Example #11
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testReplaceWhereScalar() {
    for (Condition c : new Condition[]{Conditions.lessThan(0.5), Conditions.greaterThan(0.5), Conditions.equals(0.5)}) {

        log.info("Testing condition: " + c.getClass().getSimpleName());
        INDArray inArr = Nd4j.rand(DataType.DOUBLE, 3, 4);
        SameDiff sd = SameDiff.create();
        SDVariable in = sd.var("in", inArr);
        SDVariable where = sd.replaceWhere(in, 10, c);

        INDArray exp = inArr.dup();
        BooleanIndexing.replaceWhere(exp, 10, c);

        SDVariable loss = where.std(true);

        TestCase tc = new TestCase(sd);

        String err = OpValidation.validate(tc);
        assertNull(err);
    }
}
 
Example #12
Source File: LayerHelperValidationUtil.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
private static INDArray relError(@NonNull INDArray a1, @NonNull INDArray a2, double minAbsError){
    long numNaN1 = Nd4j.getExecutioner().exec(new MatchCondition(a1, Conditions.isNan(), Integer.MAX_VALUE)).getInt(0);
    long numNaN2 = Nd4j.getExecutioner().exec(new MatchCondition(a2, Conditions.isNan(), Integer.MAX_VALUE)).getInt(0);
    Preconditions.checkState(numNaN1 == 0, "Array 1 has NaNs");
    Preconditions.checkState(numNaN2 == 0, "Array 2 has NaNs");

    INDArray abs1 = Transforms.abs(a1, true);
    INDArray abs2 = Transforms.abs(a2, true);
    INDArray absDiff = Transforms.abs(a1.sub(a2), false);

    //abs(a1-a2) < minAbsError ? 1 : 0
    INDArray greaterThanMinAbs = Transforms.abs(a1.sub(a2), false);
    BooleanIndexing.replaceWhere(greaterThanMinAbs, 0.0, Conditions.lessThan(minAbsError));
    BooleanIndexing.replaceWhere(greaterThanMinAbs, 1.0, Conditions.greaterThan(0.0));

    INDArray result = absDiff.divi(abs1.add(abs2));
    //Only way to have NaNs given there weren't any in original : both 0s
    BooleanIndexing.replaceWhere(result, 0.0, Conditions.isNan());
    //Finally, set to 0 if less than min abs error, or unchanged otherwise
    result.muli(greaterThanMinAbs);

    return result;
}
 
Example #13
Source File: ActivationRReLU.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray getActivation(INDArray in, boolean training) {
    if (training) {
        try(MemoryWorkspace ignored = Nd4j.getWorkspaceManager().scopeOutOfWorkspaces()) {
            this.alpha = Nd4j.rand(l, u, Nd4j.getRandom(), in.shape());
        }
        INDArray inTimesAlpha = in.mul(alpha);
        BooleanIndexing.replaceWhere(in, inTimesAlpha, Conditions.lessThan(0));
    } else {
        this.alpha = null;
        double a = 0.5 * (l + u);
        return Nd4j.getExecutioner().exec(new RectifiedLinear(in, a));
    }

    return in;
}
 
Example #14
Source File: LossMCXENT.java    From nd4j with Apache License 2.0 5 votes vote down vote up
private INDArray scoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if (labels.size(1) != preOutput.size(1)) {
        throw new IllegalArgumentException(
                        "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
                                        + " number of outputs (nOut = " + preOutput.size(1) + ") ");

    }

    INDArray output = activationFn.getActivation(preOutput.dup(), true);
    if(activationFn instanceof ActivationSoftmax && softmaxClipEps > 0.0){
        BooleanIndexing.replaceWhere(output, softmaxClipEps, Conditions.lessThan(softmaxClipEps));
        BooleanIndexing.replaceWhere(output, 1.0-softmaxClipEps, Conditions.greaterThan(1.0-softmaxClipEps));
    }
    INDArray scoreArr = Transforms.log(output, false).muli(labels);

    //Weighted loss function
    if (weights != null) {
        if (weights.length() != scoreArr.size(1)) {
            throw new IllegalStateException("Weights vector (length " + weights.length()
                            + ") does not match output.size(1)=" + preOutput.size(1));
        }
        scoreArr.muliRowVector(weights);
    }

    if (mask != null) {
        LossUtil.applyMask(scoreArr, mask);
    }
    return scoreArr;
}
 
Example #15
Source File: LossSquaredHinge.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray computeScoreArray(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask);
    BooleanIndexing.replaceWhere(scoreArr, 0.0, Conditions.lessThan(0.0));//max(0,1-y*yhat)
    scoreArr.muli(scoreArr);
    return scoreArr.sum(1);
}
 
Example #16
Source File: CpuFlexibleThreshold.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public DataBuffer compress(DataBuffer buffer) {
    INDArray temp = Nd4j.createArrayFromShapeBuffer(buffer, Nd4j.getShapeInfoProvider().createShapeInformation(new long[]{1, buffer.length()}, DataType.INT).getFirst());
    double max = temp.amaxNumber().doubleValue();

    int cntAbs = temp.scan(Conditions.absGreaterThanOrEqual(max - (max * threshold))).intValue();

    long originalLength = buffer.length() * Nd4j.sizeOfDataType(buffer.dataType());
    int compressedLength = cntAbs + 4;
    // first 3 elements contain header
    IntPointer pointer = new IntPointer(compressedLength);
    pointer.put(0, cntAbs);
    pointer.put(1, (int) buffer.length());
    pointer.put(2, Float.floatToIntBits(threshold)); // please note, this value will be ovewritten anyway
    pointer.put(3, 0);

    CompressionDescriptor descriptor = new CompressionDescriptor();
    descriptor.setCompressedLength(compressedLength * 4); // sizeOf(INT)
    descriptor.setOriginalLength(originalLength);
    descriptor.setOriginalElementSize(Nd4j.sizeOfDataType(buffer.dataType()));
    descriptor.setNumberOfElements(buffer.length());

    descriptor.setCompressionAlgorithm(getDescriptor());
    descriptor.setCompressionType(getCompressionType());

    CompressedDataBuffer cbuff = new CompressedDataBuffer(pointer, descriptor);

    Nd4j.getNDArrayFactory().convertDataEx(getBufferTypeEx(buffer), buffer.addressPointer(), DataTypeEx.FTHRESHOLD, pointer, buffer.length());

    Nd4j.getAffinityManager().tagLocation(buffer, AffinityManager.Location.HOST);

    return cbuff;
}
 
Example #17
Source File: BooleanIndexingTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCaRPairwiseTransform1() {
    INDArray array = Nd4j.create(new double[] {1, 2, 0, 4, 5});
    INDArray comp = Nd4j.create(new double[] {1, 2, 3, 4, 5});

    INDArray z = Nd4j.exec(new CompareAndReplace(array, comp, Conditions.lessThan(1)));

    assertEquals(comp, z);
}
 
Example #18
Source File: LossSquaredHinge.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if (labels.size(1) != preOutput.size(1)) {
        throw new IllegalArgumentException(
                        "Labels array numColumns (size(1) = " + labels.size(1) + ") does not match output layer"
                                        + " number of outputs (nOut = " + preOutput.size(1) + ") ");

    }
    INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask);

    INDArray bitMaskRowCol = scoreArr.dup();
    /*
        bit mask is 0 if 1-sigma(y*yhat) is neg, bit mask is 1 if 1-sigma(y*yhat) is +ve
     */
    BooleanIndexing.replaceWhere(bitMaskRowCol, 0.0, Conditions.lessThan(0.0));
    BooleanIndexing.replaceWhere(bitMaskRowCol, 1.0, Conditions.greaterThan(0.0));

    INDArray dLda = scoreArr.muli(2).muli(labels.neg());
    dLda.muli(bitMaskRowCol);

    if (mask != null && LossUtil.isPerOutputMasking(dLda, mask)) {
        //For *most* activation functions: we don't actually need to mask dL/da in addition to masking dL/dz later
        //but: some, like softmax, require both (due to dL/dz_i being a function of dL/da_j, for i != j)
        //We could add a special case for softmax (activationFn instanceof ActivationSoftmax) but that would be
        // error prone - though buy us a tiny bit of performance
        LossUtil.applyMask(dLda, mask);
    }

    INDArray gradients = activationFn.backprop(preOutput, dLda).getFirst(); //TODO activation functions with params

    if (mask != null) {
        LossUtil.applyMask(gradients, mask);
    }

    return gradients;
}
 
Example #19
Source File: BooleanIndexingTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLastIndex1() {
    INDArray arr = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 0});
    INDArray result = BooleanIndexing.lastIndex(arr, Conditions.greaterThanOrEqual(3));

    assertEquals(8, result.getDouble(0), 0.0);
}
 
Example #20
Source File: LossSquaredHinge.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray computeGradient(INDArray labels, INDArray preOutput, IActivation activationFn, INDArray mask) {
    if(!labels.equalShapes(preOutput)){
        Preconditions.throwEx("Labels and preOutput must have equal shapes: got shapes %s vs %s", labels.shape(), preOutput.shape());
    }
    labels = labels.castTo(preOutput.dataType());   //No-op if already correct dtype
    INDArray scoreArr = scoreArray(labels, preOutput, activationFn, mask);

    INDArray bitMaskRowCol = scoreArr.dup();
    /*
        bit mask is 0 if 1-sigma(y*yhat) is neg, bit mask is 1 if 1-sigma(y*yhat) is +ve
     */
    BooleanIndexing.replaceWhere(bitMaskRowCol, 0.0, Conditions.lessThan(0.0));
    BooleanIndexing.replaceWhere(bitMaskRowCol, 1.0, Conditions.greaterThan(0.0));

    INDArray dLda = scoreArr.muli(2).muli(labels.neg());
    dLda.muli(bitMaskRowCol);

    if (mask != null && LossUtil.isPerOutputMasking(dLda, mask)) {
        //For *most* activation functions: we don't actually need to mask dL/da in addition to masking dL/dz later
        //but: some, like softmax, require both (due to dL/dz_i being a function of dL/da_j, for i != j)
        //We could add a special case for softmax (activationFn instanceof ActivationSoftmax) but that would be
        // error prone - though buy us a tiny bit of performance
        LossUtil.applyMask(dLda, mask);
    }

    INDArray gradients = activationFn.backprop(preOutput, dLda).getFirst(); //TODO activation functions with params

    if (mask != null) {
        LossUtil.applyMask(gradients, mask);
    }

    return gradients;
}
 
Example #21
Source File: BooleanIndexingTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testFirstIndex1() {
    INDArray arr = Nd4j.create(new double[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 0});
    INDArray result = BooleanIndexing.firstIndex(arr, Conditions.greaterThanOrEqual(3));

    assertEquals(2, result.getDouble(0), 0.0);
}
 
Example #22
Source File: LossFunctionTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testClippingXENT() throws Exception {

    ILossFunction l1 = new LossBinaryXENT(0);
    ILossFunction l2 = new LossBinaryXENT();

    INDArray labels = Nd4j.getExecutioner().exec(new BernoulliDistribution(Nd4j.create(3,5), 0.5));
    INDArray preOut = Nd4j.valueArrayOf(3, 5, -1000.0);

    IActivation a = new ActivationSigmoid();

    double score1 = l1.computeScore(labels, preOut.dup(), a, null, false);
    assertTrue(Double.isNaN(score1));

    double score2 = l2.computeScore(labels, preOut.dup(), a, null, false);
    assertFalse(Double.isNaN(score2));

    INDArray grad1 = l1.computeGradient(labels, preOut.dup(), a, null);
    INDArray grad2 = l2.computeGradient(labels, preOut.dup(), a, null);

    MatchCondition c1 = new MatchCondition(grad1, Conditions.isNan());
    MatchCondition c2 = new MatchCondition(grad2, Conditions.isNan());
    int match1 = Nd4j.getExecutioner().exec(c1, Integer.MAX_VALUE).getInt(0);
    int match2 = Nd4j.getExecutioner().exec(c2, Integer.MAX_VALUE).getInt(0);

    assertTrue(match1 > 0);
    assertEquals(0, match2);
}
 
Example #23
Source File: BooleanIndexingTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testConditionalUpdate() {
    INDArray arr = Nd4j.linspace(-2, 2, 5);
    INDArray ones = Nd4j.ones(5);
    INDArray exp = Nd4j.create(new double[] {1, 1, 0, 1, 1});


    Nd4j.getExecutioner().exec(new CompareAndSet(ones, arr, ones, Conditions.equals(0.0)));

    assertEquals(exp, ones);
}
 
Example #24
Source File: BooleanIndexingTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMatchConditionAlongDimension3() throws Exception {
    INDArray array = Nd4j.ones(3, 10);
    array.getRow(2).assign(0.0).putScalar(0, 1.0);

    boolean result[] = BooleanIndexing.and(array, Conditions.lessThan(0.0), 1);
    boolean comp[] = new boolean[] {false, false, false};

    System.out.println("Result: " + Arrays.toString(result));
    assertArrayEquals(comp, result);
}
 
Example #25
Source File: TestWeightNoise.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDropConnectValues() {
    Nd4j.getRandom().setSeed(12345);

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .weightInit(WeightInit.ONES)
            .list()
            .layer(new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).build())
            .build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    Layer l = net.getLayer(0);
    DropConnect d = new DropConnect(0.5);

    INDArray outTest = d.getParameter(l, "W", 0, 0, false, LayerWorkspaceMgr.noWorkspaces());
    assertTrue(l.getParam("W") == outTest);    //Should be same object
    INDArray outTrain = d.getParameter(l, "W", 0, 0, true, LayerWorkspaceMgr.noWorkspaces());
    assertNotEquals(l.getParam("W"), outTrain);

    assertEquals(l.getParam("W"), Nd4j.ones(DataType.FLOAT, 10, 10));

    int countZeros = Nd4j.getExecutioner().exec(new MatchCondition(outTrain, Conditions.equals(0))).getInt(0);
    int countOnes = Nd4j.getExecutioner().exec(new MatchCondition(outTrain, Conditions.equals(1))).getInt(0);

    assertEquals(100, countZeros + countOnes);  //Should only be 0 or 2
    //Stochastic, but this should hold for most cases
    assertTrue(countZeros >= 25 && countZeros <= 75);
    assertTrue(countOnes >= 25 && countOnes <= 75);
}
 
Example #26
Source File: BooleanIndexingTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMatchConditionAlongDimension1() throws Exception {
    INDArray array = Nd4j.ones(3, 10);
    array.getRow(2).assign(0.0);

    boolean result[] = BooleanIndexing.and(array, Conditions.equals(0.0), 1);
    boolean comp[] = new boolean[] {false, false, true};

    System.out.println("Result: " + Arrays.toString(result));
    assertArrayEquals(comp, result);
}
 
Example #27
Source File: BooleanIndexingTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMatchConditionAllDimensions2() {
    INDArray array = Nd4j.create(new double[] {0, 1, 2, 3, Double.NaN, 5, 6, 7, 8, 9});

    int val = (int) Nd4j.getExecutioner().exec(new MatchCondition(array, Conditions.isNan()))
            .getDouble(0);

    assertEquals(1, val);
}
 
Example #28
Source File: BooleanIndexingTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCaRPairwiseTransform3() {
    INDArray x = Nd4j.create(new double[] {1, 2, 0, 4, 5});
    INDArray y = Nd4j.create(new double[] {2, 4, 3, 4, 5});
    INDArray comp = Nd4j.create(new double[] {2, 2, 3, 4, 5});

    INDArray z = Nd4j.exec(new CompareAndReplace(x, y, Conditions.lessThan(2)));

    assertEquals(comp, z);
}
 
Example #29
Source File: BooleanIndexingTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void test2dAnd3() {
    INDArray array = Nd4j.zeros(10, 10);

    array.slice(4).putScalar(2, 1e-5f);

    assertFalse(BooleanIndexing.and(array, Conditions.greaterThan(0f)));
}
 
Example #30
Source File: BooleanIndexingTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testCaRPairwiseTransform2() throws Exception {
    INDArray x = Nd4j.create(new double[] {1, 2, 0, 4, 5});
    INDArray y = Nd4j.create(new double[] {2, 4, 3, 4, 5});
    INDArray comp = Nd4j.create(new double[] {2, 4, 0, 4, 5});

    Nd4j.getExecutioner().exec(new CompareAndReplace(x, y, Conditions.epsNotEquals(0.0)));

    assertEquals(comp, x);
}