Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#ulike()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#ulike() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testLayerNorm() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray bias = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain).addRowVector(bias);
    final INDArray expOut = res.norm1();

    final int[] axis = new int[]{1};
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("input", standardized);
    SDVariable sdGain = sd.var("gain", gain);
    SDVariable sdBias = sd.var("bias", bias);
    SDVariable out = sd.nn.layerNorm(sdInput, sdGain, sdBias, true, axis);
    out.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err);
}
 
Example 2
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testLayerNormNoBias() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain);
    final INDArray expOut = res.norm1();

    final int[] axis = new int[]{1};
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("input", standardized);
    SDVariable sdGain = sd.var("gain", gain);
    SDVariable out = sd.nn.layerNorm(sdInput, sdGain, true, axis);
    out.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 3
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testScatterUpdate(){
    INDArray x = Nd4j.linspace(DataType.FLOAT, 1, 30, 1).reshape(10, 3);
    INDArray updates = Nd4j.create(new float[][]{
            {100, 101, 102},
            {200, 201, 202}});
    INDArray indices = Nd4j.createFromArray(2, 5);

    INDArray exp = x.dup();
    exp.putRow(2, updates.getRow(0));
    exp.putRow(5, updates.getRow(1));

    INDArray out = exp.ulike();
    Nd4j.exec(DynamicCustomOp.builder("scatter_upd")
            .addInputs(x, indices, updates)
            .addOutputs(out)
            .build());

    assertEquals(exp, out);
}
 
Example 4
Source File: CustomOpsTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testIsMaxView(){
    INDArray predictions = Nd4j.rand(DataType.FLOAT, 3, 4, 3, 2);

    INDArray row = predictions.get(NDArrayIndex.point(0), NDArrayIndex.all(), NDArrayIndex.point(0), NDArrayIndex.point(0));
    row = row.reshape(1, row.length());
    assertArrayEquals(new long[]{1, 4}, row.shape());

    val result1 = row.ulike();
    val result2 = row.ulike();

    Nd4j.exec(new IsMax(row.dup(), result1, 1));        //OK
    Nd4j.exec(new IsMax(row, result2, 1));              //C++ exception

    assertEquals(result1, result2);
}
 
Example 5
Source File: CustomOpsTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void test() throws Exception {

    INDArray in1 = Nd4j.create(DataType.BFLOAT16, 2, 3, 10, 1);//Nd4j.createFromArray(0.2019043,0.6464844,0.9116211,0.60058594,0.34033203,0.7036133,0.6772461,0.3815918,0.87353516,0.04650879,0.67822266,0.8618164,0.88378906,0.7573242,0.66796875,0.63427734,0.33764648,0.46923828,0.62939453,0.76464844,-0.8618164,-0.94873047,-0.9902344,-0.88916016,-0.86572266,-0.92089844,-0.90722656,-0.96533203,-0.97509766,-0.4975586,-0.84814453,-0.984375,-0.98828125,-0.95458984,-0.9472656,-0.91064453,-0.80859375,-0.83496094,-0.9140625,-0.82470703,0.4802246,0.45361328,0.28125,0.28320312,0.79345703,0.44604492,-0.30273438,0.11730957,0.56396484,0.73583984,0.1418457,-0.44848633,0.6923828,-0.40234375,0.40185547,0.48632812,0.14538574,0.4638672,0.13000488,0.5058594)
            //.castTo(DataType.BFLOAT16).reshape(2,3,10,1);
    INDArray in2 = Nd4j.create(DataType.BFLOAT16, 2, 3, 10, 1); //Nd4j.createFromArray(0.0,-0.13391113,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.1751709,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.51904297,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.5107422,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0)
            //.castTo(DataType.BFLOAT16).reshape(2,3,10,1);

    INDArray out = in1.ulike();

    Nd4j.exec(DynamicCustomOp.builder("maxpool2d_bp")
            .addInputs(in1, in2)
            .addOutputs(out)
            .addIntegerArguments(5,1,1,2,2,0,1,1,1,0,0)
            .build());

    Nd4j.getExecutioner().commit();
}
 
Example 6
Source File: ActivationPReLU.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public Pair<INDArray, INDArray> backprop(INDArray in, INDArray epsilon) {
    assertShape(in, epsilon);
    INDArray dLdalpha = alpha.ulike();
    INDArray outTemp = in.ulike();
    DynamicCustomOp.DynamicCustomOpsBuilder preluBp = DynamicCustomOp.builder("prelu_bp")
            .addInputs(in, alpha, epsilon)
            .addOutputs(outTemp, dLdalpha);

    if (sharedAxes != null) {
        for (long axis: sharedAxes) {
            preluBp.addIntegerArguments(axis);
        }
    }
    Nd4j.exec(preluBp.build());
    in.assign(outTemp);
    return new Pair<>(in, dLdalpha);
}
 
Example 7
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLayerNormOP() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray bias = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain).addRowVector(bias);

    final INDArray output = Nd4j.zerosLike(res);
    Nd4j.getExecutioner().exec(new LayerNorm(standardized, gain, bias, output, true, 1));

    assertEquals(res, output);
}
 
Example 8
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLayerNormOPNoBias() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain);

    final INDArray output = Nd4j.zerosLike(res);
    Nd4j.getExecutioner().exec(new LayerNorm(standardized, gain, output, true, 1));

    assertEquals(res, output);
}
 
Example 9
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLayerNormNoDeviation() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    for (int i = 0; i < 4; i++) {
        random.putScalar(1, i, 7);
    }

    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray bias = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain).addRowVector(bias);
    final INDArray expOut = res.norm1();

    final int[] axis = new int[]{1};
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("input", standardized);
    SDVariable sdGain = sd.var("gain", gain);
    SDVariable sdBias = sd.var("bias", bias);
    SDVariable out = sd.nn.layerNorm(sdInput, sdGain, sdBias, true, axis);
    out.norm1("out");

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("out", expOut)
            .gradCheckMask(Collections.singletonMap("input", random.neq(7)))
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 10
Source File: LossSparseMCXENT.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private INDArray toOneHot(INDArray labels, INDArray preOutput){
    Preconditions.checkState(labels.size(-1) == 1, "Labels for LossSparseMCXENT should be an array of integers " +
            "with first dimension equal to minibatch size, and last dimension having size 1. Got labels array with shape %ndShape", labels);
    INDArray oneHotLabels = preOutput.ulike();
    Nd4j.exec(new OneHot(labels.reshape(labels.length()), oneHotLabels, (int)preOutput.size(-1)));
    return oneHotLabels;
}
 
Example 11
Source File: GlobalPoolingLayer.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
private INDArray epsilonHelperFullArray(INDArray inputArray, INDArray epsilon, int[] poolDim) {

        //Broadcast: occurs on the remaining dimensions, after the pool dimensions have been removed.
        //TODO find a more efficient way to do this
        int[] broadcastDims = new int[inputArray.rank() - poolDim.length];
        int count = 0;
        for (int i = 0; i < inputArray.rank(); i++) {
            if (ArrayUtils.contains(poolDim, i))
                continue;
            broadcastDims[count++] = i;
        }

        switch (poolingType) {
            case MAX:
                INDArray isMax = Nd4j.exec(new IsMax(inputArray, inputArray.ulike(), poolDim))[0];
                return Nd4j.getExecutioner().exec(new BroadcastMulOp(isMax, epsilon, isMax, broadcastDims));
            case AVG:
                //if out = avg(in,dims) then dL/dIn = 1/N * dL/dOut
                int n = 1;
                for (int d : poolDim) {
                    n *= inputArray.size(d);
                }
                INDArray ret = inputArray.ulike();
                Nd4j.getExecutioner().exec(new BroadcastCopyOp(ret, epsilon, ret, broadcastDims));
                ret.divi(n);

                return ret;
            case SUM:
                INDArray retSum = inputArray.ulike();
                Nd4j.getExecutioner().exec(new BroadcastCopyOp(retSum, epsilon, retSum, broadcastDims));
                return retSum;
            case PNORM:
                int pnorm = layerConf().getPnorm();

                //First: do forward pass to get pNorm array
                INDArray abs = Transforms.abs(inputArray, true);
                Transforms.pow(abs, pnorm, false);

                INDArray pNorm = Transforms.pow(abs.sum(poolDim), 1.0 / pnorm);

                //dL/dIn = dL/dOut * dOut/dIn
                //dOut/dIn = in .* |in|^(p-2) /  ||in||_p^(p-1), where ||in||_p is the output p-norm

                INDArray numerator;
                if (pnorm == 2) {
                    numerator = inputArray.dup();
                } else {
                    INDArray absp2 = Transforms.pow(Transforms.abs(inputArray, true), pnorm - 2, false);
                    numerator = inputArray.mul(absp2);
                }

                INDArray denom = Transforms.pow(pNorm, pnorm - 1, false);
                denom.rdivi(epsilon);
                Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(numerator, denom, numerator, broadcastDims));

                return numerator;
            default:
                throw new RuntimeException("Unknown or not supported pooling type: " + poolingType + " " + layerId());
        }
    }
 
Example 12
Source File: CudaExecutioner.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
protected CudaContext invoke(ScalarOp op, OpContext oc) {
        long st = profilingConfigurableHookIn(op);

        checkForCompression(op);

        INDArray x = getX(op, oc);
        INDArray y = getY(op, oc);
        INDArray z = getZ(op, oc);

//        validateDataType(Nd4j.dataType(), op);

        if(z == null){
            switch (op.getOpType()) {
                case SCALAR:
                    z = x.ulike();
                    setZ(x.ulike(), op, oc);
                    break;
                case SCALAR_BOOL:
                    z = Nd4j.createUninitialized(DataType.BOOL, x.shape());
                    setZ(z, op, oc);
                    break;
                default:
                    throw new ND4JIllegalStateException("Unknown op type: [" + op.getOpType() +"]");
            }
        }

        if (x.length() != z.length())
            throw new ND4JIllegalStateException("op.X length should be equal to op.Y length: ["
                    + Arrays.toString(x.shapeInfoDataBuffer().asInt()) + "] != ["
                    + Arrays.toString(z.shapeInfoDataBuffer().asInt()) + "]");

        if (extraz.get() == null)
            extraz.set(new PointerPointer(32));

        if (CudaEnvironment.getInstance().getConfiguration().isDebug())
            lastOp.set(op.opName());

        if (op.dimensions() != null) {
            intercept(op, op.dimensions().toIntVector());
            return null;
        }

        val context = AtomicAllocator.getInstance().getDeviceContext();

        val hostXShapeInfo = x == null ? null : AddressRetriever.retrieveHostPointer(x.shapeInfoDataBuffer());
        val hostYShapeInfo = op.scalar() == null ? null : AddressRetriever.retrieveHostPointer(op.scalar().shapeInfoDataBuffer());
        val hostZShapeInfo = z == null ? null : AddressRetriever.retrieveHostPointer(z.shapeInfoDataBuffer());

        Pointer xShapeInfo = AtomicAllocator.getInstance().getPointer(x.shapeInfoDataBuffer(), context);
        Pointer extraArgs = op.extraArgs() != null ? AtomicAllocator.getInstance().getPointer(op.extraArgsDataBuff(op.getOpType() == Op.Type.SCALAR_BOOL ? x.dataType() : z.dataType()), context) : null;

        Pointer zShapeInfo = AtomicAllocator.getInstance().getPointer(z.shapeInfoDataBuffer(), context);

        PointerPointer xShapeInfoHostPointer = extraz.get().put(
                AddressRetriever.retrieveHostPointer(x.shapeInfoDataBuffer()), context.getOldStream(),
                AtomicAllocator.getInstance().getDeviceIdPointer(), context.getBufferAllocation(),
                context.getBufferReduction(), context.getBufferScalar(), context.getBufferSpecial(),
                hostYShapeInfo, hostZShapeInfo, null, null);

        val xb = x == null ? null : ((BaseCudaDataBuffer) x.data()).getOpaqueDataBuffer();
        val yb = op.scalar() == null ? null : ((BaseCudaDataBuffer) op.scalar().data()).getOpaqueDataBuffer();
        val zb = z == null ? null : ((BaseCudaDataBuffer) z.data()).getOpaqueDataBuffer();

        switch (op.getOpType()) {
            case SCALAR_BOOL:
                nativeOps.execScalarBool(xShapeInfoHostPointer, op.opNum(),
                        xb, (LongPointer) hostXShapeInfo, (LongPointer) xShapeInfo,
                        zb, (LongPointer) hostZShapeInfo, (LongPointer) zShapeInfo,
                        yb, (LongPointer) hostYShapeInfo, (LongPointer) AtomicAllocator.getInstance().getPointer(op.scalar().shapeInfoDataBuffer(), context),
                        extraArgs);
                break;
            case SCALAR:
                nativeOps.execScalar(xShapeInfoHostPointer, op.opNum(),
                        xb, (LongPointer) hostXShapeInfo, (LongPointer) xShapeInfo,
                        zb, (LongPointer) hostZShapeInfo, (LongPointer) zShapeInfo,
                        yb, (LongPointer) hostYShapeInfo, (LongPointer) AtomicAllocator.getInstance().getPointer(op.scalar().shapeInfoDataBuffer(), context),
                        extraArgs);
                break;
            default:
                throw new UnsupportedOperationException("Unknown op type: " + op.getOpType());
        }

        if (nativeOps.lastErrorCode() != 0)
            throw new RuntimeException(nativeOps.lastErrorMessage());

        profilingConfigurableHookOut(op, oc, st);

        return null;
    }
 
Example 13
Source File: BitwiseXor.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public BitwiseXor(INDArray x, INDArray y) {
    this(x, y,x.ulike());
}
 
Example 14
Source File: ShiftBits.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public ShiftBits(INDArray x, INDArray y) {
    this(x, y,x.ulike());
}
 
Example 15
Source File: CyclicRShiftBits.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public CyclicRShiftBits(INDArray input, INDArray shift) {
    this(input, shift,input.ulike());
}
 
Example 16
Source File: CyclicShiftBits.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public CyclicShiftBits(INDArray input, INDArray shift) {
    this(input, shift,input.ulike());
}
 
Example 17
Source File: ValidateMKLDNN.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void compareBatchNormBackward() throws Exception {
    assumeTrue(Nd4j.getBackend().getClass().getName().toLowerCase().contains("native"));

    Nd4j.getRandom().setSeed(12345);
    INDArray in = Nd4j.rand(DataType.FLOAT, 1, 3, 15, 15);
    INDArray mean = in.mean(0, 2, 3).reshape(1,3);
    INDArray var = in.var(0, 2, 3).reshape(1,3);
    INDArray eps = Nd4j.rand(DataType.FLOAT, in.shape());
    INDArray gamma = Nd4j.rand(DataType.FLOAT, 1,3);
    INDArray beta = Nd4j.rand(DataType.FLOAT, 1,3);
    double e = 1e-3;

    INDArray dLdIn = in.ulike();
    INDArray dLdm = mean.ulike();
    INDArray dLdv = var.ulike();
    INDArray dLdg = gamma.ulike();
    INDArray dLdb = beta.ulike();


    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .inferenceWorkspaceMode(WorkspaceMode.NONE)
            .trainingWorkspaceMode(WorkspaceMode.NONE)
            .list()
            .layer(new BatchNormalization.Builder().nIn(3).nOut(3).build())
            .build();
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    org.deeplearning4j.nn.layers.normalization.BatchNormalization bn = (org.deeplearning4j.nn.layers.normalization.BatchNormalization) net.getLayer(0);
    assertNotNull(bn.getHelper());
    System.out.println(bn.getHelper());

    net.output(in, true);
    bn.setInput(in, LayerWorkspaceMgr.noWorkspaces());
    Pair<Gradient,INDArray> pcudnn = net.backpropGradient(eps, LayerWorkspaceMgr.noWorkspaces());

    Field f = bn.getClass().getDeclaredField("helper");
    f.setAccessible(true);
    f.set(bn, null);
    assertNull(bn.getHelper());

    net.output(in, true);
    bn.setInput(in, LayerWorkspaceMgr.noWorkspaces());
    Pair<Gradient,INDArray> p = net.backpropGradient(eps, LayerWorkspaceMgr.noWorkspaces());

    INDArray dldin_dl4j = p.getSecond();
    INDArray dldin_helper = pcudnn.getSecond();

    assertTrue(dldin_dl4j.equalsWithEps(dldin_helper, 1e-5));
}
 
Example 18
Source File: RShiftBits.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public RShiftBits(INDArray input, INDArray shift) {
    this(input, shift,input.ulike());
}
 
Example 19
Source File: BitwiseAnd.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public BitwiseAnd(INDArray x, INDArray y) {
    this(x, y,x.ulike());
}
 
Example 20
Source File: Transforms.java    From deeplearning4j with Apache License 2.0 2 votes vote down vote up
/**
 * Element-wise power function - x^y, performed element-wise
 *
 * @param ndArray the ndarray to raise to the power of
 * @param power   the power to raise by
 * @param dup     if true:
 * @return the ndarray raised to this power
 */
public static INDArray pow(INDArray ndArray, INDArray power, boolean dup) {
    INDArray result = (dup ? ndArray.ulike() : ndArray);
    return exec(new PowPairwise(ndArray, power, result));
}