Java Code Examples for org.nd4j.linalg.api.shape.Shape#normalizeAxis()

The following examples show how to use org.nd4j.linalg.api.shape.Shape#normalizeAxis() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ShapeTestC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testAxisNormalization_1() throws Exception {
    val axis = new int[] {1, -2};
    val rank = 2;
    val exp = new int[] {0, 1};

    val norm = Shape.normalizeAxis(rank, axis);
    assertArrayEquals(exp, norm);
}
 
Example 2
Source File: ShapeTestC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testAxisNormalization_2() throws Exception {
    val axis = new int[] {1, -2, 0};
    val rank = 2;
    val exp = new int[] {0, 1};

    val norm = Shape.normalizeAxis(rank, axis);
    assertArrayEquals(exp, norm);
}
 
Example 3
Source File: ShapeTestC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test(expected = ND4JIllegalStateException.class)
public void testAxisNormalization_3() throws Exception {
    val axis = new int[] {1, -2, 2};
    val rank = 2;
    val exp = new int[] {0, 1};

    val norm = Shape.normalizeAxis(rank, axis);
    assertArrayEquals(exp, norm);
}
 
Example 4
Source File: ShapeTestC.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testAxisNormalization_4() throws Exception {
    val axis = new int[] {1, 2, 0};
    val rank = 3;
    val exp = new int[] {0, 1, 2};

    val norm = Shape.normalizeAxis(rank, axis);
    assertArrayEquals(exp, norm);
}
 
Example 5
Source File: ShapeTestC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testAxisNormalization_1() {
    val axis = new int[] {1, -2};
    val rank = 2;
    val exp = new int[] {0, 1};

    val norm = Shape.normalizeAxis(rank, axis);
    assertArrayEquals(exp, norm);
}
 
Example 6
Source File: ShapeTestC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testAxisNormalization_2() {
    val axis = new int[] {1, -2, 0};
    val rank = 2;
    val exp = new int[] {0, 1};

    val norm = Shape.normalizeAxis(rank, axis);
    assertArrayEquals(exp, norm);
}
 
Example 7
Source File: ShapeTestC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test(expected = ND4JIllegalStateException.class)
public void testAxisNormalization_3() {
    val axis = new int[] {1, -2, 2};
    val rank = 2;
    val exp = new int[] {0, 1};

    val norm = Shape.normalizeAxis(rank, axis);
    assertArrayEquals(exp, norm);
}
 
Example 8
Source File: ShapeTestC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testAxisNormalization_4() {
    val axis = new int[] {1, 2, 0};
    val rank = 3;
    val exp = new int[] {0, 1, 2};

    val norm = Shape.normalizeAxis(rank, axis);
    assertArrayEquals(exp, norm);
}
 
Example 9
Source File: BaseOp.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
protected void defineDimensions(int... dimensions){
    if (dimensions != null && dimensions.length > 0) {
        if(x != null) {
            dimensions = Shape.normalizeAxis(x.rank(), dimensions);
        }
    }

    if (dimensions == null || dimensions.length == 0)
        dimensions = new int[]{Integer.MAX_VALUE};

    try(MemoryWorkspace ws = Nd4j.getWorkspaceManager().scopeOutOfWorkspaces()) {
        this.dimensionz = Shape.ndArrayDimFromInt(dimensions);
    }
}
 
Example 10
Source File: NativeOpExecutioner.java    From nd4j with Apache License 2.0 4 votes vote down vote up
/**
 * ScalarOp along dimension
 * @param op
 * @param dimension
 */
private void invoke(ScalarOp op, int[] dimension) {
    dimension = Shape.normalizeAxis(op.x().rank(), dimension);
    // do tad magic
    /**
     * Returns the {@link Shape#createShapeInformation(int[], int[], int, int, char)}
     * and the associated offsets for each {@link INDArray#tensorAlongDimension(int, int...)}
     * The first item is the shape information. The second one is the offsets.
     */
    Pair<DataBuffer, DataBuffer> tadBuffers = tadManager.getTADOnlyShapeInfo(op.x(), dimension);

    Pointer hostTadShapeInfo = tadBuffers.getFirst().addressPointer();
    Pointer hostTadOffsets = tadBuffers.getSecond().addressPointer();

    Pointer devTadShapeInfoZ = null;
    Pointer devTadOffsetsZ = null;
    /**
     * Returns the {@link Shape#createShapeInformation(int[], int[], int, int, char)}
     * and the associated offsets for each {@link INDArray#tensorAlongDimension(int, int...)}
     * The first item is the shape information. The second one is the offsets.
     *
     * Note that this is the *result* TAD information. An op is always input (x) and output (z)
     * for result.
     * This is for assigning the result to of the operation along
     * the proper dimension.
     */
    Pair<DataBuffer, DataBuffer> tadBuffersZ = tadManager.getTADOnlyShapeInfo(op.z(), dimension);

    devTadShapeInfoZ = tadBuffersZ.getFirst().addressPointer();
    devTadOffsetsZ = tadBuffersZ.getSecond().addressPointer();

    if (extraz.get() == null)
        extraz.set(new PointerPointer(32));

    PointerPointer dummy = extraz.get().put(hostTadShapeInfo, hostTadOffsets, devTadShapeInfoZ, devTadOffsetsZ);


    if (op.x().data().dataType() == DataBuffer.Type.FLOAT) {
        loop.execScalarFloat(dummy, op.opNum(), (FloatPointer) op.x().data().addressPointer(),
                (LongPointer) op.x().shapeInfoDataBuffer().addressPointer(),
                (FloatPointer) op.z().data().addressPointer(),
                (LongPointer) op.z().shapeInfoDataBuffer().addressPointer(),
                (FloatPointer) op.y().data().addressPointer(), (FloatPointer) getPointerForExtraArgs(op),
                (IntPointer) Nd4j.getConstantHandler().getConstantBuffer(dimension).addressPointer(), dimension.length);
    } else if (op.x().data().dataType() == DataBuffer.Type.DOUBLE) {
        loop.execScalarDouble(dummy, op.opNum(), (DoublePointer) op.x().data().addressPointer(),
                (LongPointer) op.x().shapeInfoDataBuffer().addressPointer(),
                (DoublePointer) op.z().data().addressPointer(),
                (LongPointer) op.z().shapeInfoDataBuffer().addressPointer(),
                (DoublePointer) op.y().data().addressPointer(), (DoublePointer) getPointerForExtraArgs(op),
                (IntPointer) Nd4j.getConstantHandler().getConstantBuffer(dimension).addressPointer(), dimension.length);
    }
}
 
Example 11
Source File: NativeOpExecutioner.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray exec(BroadcastOp op, int... dimension) {
    long st = profilingHookIn(op);
    if(dimension == null)
        dimension = new int[] {Integer.MAX_VALUE};
    dimension = Shape.normalizeAxis(op.x().rank(), dimension);

    validateDataType(Nd4j.dataType(), op);

    for (int i = 0; i < dimension.length; i++)
        if (dimension[i] >= op.x().rank() && dimension[i] != Integer.MAX_VALUE)
            throw new ND4JIllegalStateException("Op target dimension " + Arrays.toString(dimension)
                    + " contains element that higher then rank of op.X: [" + op.x().rank() + "]");
    /**
     * Returns the {@link Shape#createShapeInformation(int[], int[], int, int, char)}
     * and the associated offsets for each {@link INDArray#tensorAlongDimension(int, int...)}
     * The first item is the shape information. The second one is the offsets.
     */
    Pair<DataBuffer, DataBuffer> tadBuffers = tadManager.getTADOnlyShapeInfo(op.x(), dimension);

    Pointer hostTadShapeInfo = tadBuffers.getFirst().addressPointer();
    Pointer hostTadOffsets = tadBuffers.getSecond().addressPointer();

    Pointer devTadShapeInfoZ = null;
    Pointer devTadOffsetsZ = null;

    //        if (!Arrays.equals(op.x().shape(),op.z().shape()) || !Arrays.equals(op.x().stride(),op.z().stride()) || op.x().ordering() != op.z().ordering()) {
    // that's the place where we're going to have second TAD in place
    Pair<DataBuffer, DataBuffer> tadBuffersZ = tadManager.getTADOnlyShapeInfo(op.z(), dimension);

    devTadShapeInfoZ = tadBuffersZ.getFirst().addressPointer();
    devTadOffsetsZ = tadBuffersZ.getSecond().addressPointer();
    /*
    log.info("Broascast dimension: {}", Arrays.toString(dimension));
    log.info("x shape: {}; x TAD: {}; comp TAD: {}", Arrays.toString(op.x().shapeInfoDataBuffer().asInt()), Arrays.toString(tadBuffers.getFirst().asInt()), Arrays.toString(op.x().tensorAlongDimension(0, dimension).shapeInfoDataBuffer().asInt()));
    log.info("z shape: {}; z TAD: {}", Arrays.toString(op.z().shapeInfoDataBuffer().asInt()), Arrays.toString(tadBuffersZ.getFirst().asInt()));
    log.info("y shape: {}", Arrays.toString(op.y().shapeInfoDataBuffer().asInt()));
    log.info("-------------");
    */

    if (extraz.get() == null)
        extraz.set(new PointerPointer(32));

    PointerPointer dummy = extraz.get().put(hostTadShapeInfo, hostTadOffsets, devTadShapeInfoZ, devTadOffsetsZ);

    Pointer dimensionAddress = constantHandler.getConstantBuffer(dimension).addressPointer();

    if (op.x().data().dataType() == DataBuffer.Type.DOUBLE) {
        loop.execBroadcastDouble(dummy, op.opNum(), (DoublePointer) op.x().data().addressPointer(),
                (LongPointer) op.x().shapeInfoDataBuffer().addressPointer(),
                (DoublePointer) op.y().data().addressPointer(),
                (LongPointer) op.y().shapeInfoDataBuffer().addressPointer(),
                (DoublePointer) op.z().data().addressPointer(),
                (LongPointer) op.z().shapeInfoDataBuffer().addressPointer(), (IntPointer) dimensionAddress,
                dimension.length);
    } else {
        loop.execBroadcastFloat(dummy, op.opNum(), (FloatPointer) op.x().data().addressPointer(),
                (LongPointer) op.x().shapeInfoDataBuffer().addressPointer(),
                (FloatPointer) op.y().data().addressPointer(),
                (LongPointer) op.y().shapeInfoDataBuffer().addressPointer(),
                (FloatPointer) op.z().data().addressPointer(),
                (LongPointer) op.z().shapeInfoDataBuffer().addressPointer(), (IntPointer) dimensionAddress,
                dimension.length);
    }

    return op.z();
}
 
Example 12
Source File: NativeOpExecutioner.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public INDArray exec(IndexAccumulation op, OpContext oc) {
    checkForCompression(op);

    INDArray x = getX(op, oc);
    INDArray z = getZ(op, oc);

    if (extraz.get() == null)
        extraz.set(new PointerPointer(32));

    val dimension = Shape.normalizeAxis(x.rank(), op.dimensions().toIntVector());

    if (x.isEmpty()) {
        for (val d:dimension) {
            Preconditions.checkArgument(x.shape()[d] != 0, "IndexReduce can't be issued along axis with 0 in shape");
        }
    }

    boolean keepDims = op.isKeepDims();
    long[] retShape = Shape.reductionShape(x, dimension, true, keepDims);

    if(z == null || x == z) {
        val ret = Nd4j.createUninitialized(DataType.LONG, retShape);

        setZ(ret, op, oc);
        z = ret;
    } else if(!Arrays.equals(retShape, z.shape())){
        throw new IllegalStateException("Z array shape does not match expected return type for op " + op
                + ": expected shape " + Arrays.toString(retShape) + ", z.shape()=" + Arrays.toString(z.shape()));
    }

    op.validateDataTypes();

    Pointer dimensionAddress = constantHandler.getConstantBuffer(dimension, DataType.INT).addressPointer();

    Pair<DataBuffer, DataBuffer> tadBuffers = tadManager.getTADOnlyShapeInfo(x, dimension);

    Pointer hostTadShapeInfo = tadBuffers.getFirst().addressPointer();

    DataBuffer offsets = tadBuffers.getSecond();
    Pointer hostTadOffsets = offsets == null ? null : offsets.addressPointer();

    PointerPointer dummy = extraz.get().put(hostTadShapeInfo, hostTadOffsets);

    long st = profilingConfigurableHookIn(op, tadBuffers.getFirst());

    val xb = ((BaseCpuDataBuffer) x.data()).getOpaqueDataBuffer();
    val zb = ((BaseCpuDataBuffer) z.data()).getOpaqueDataBuffer();

    if (z.isScalar()) {
        loop.execIndexReduceScalar(dummy, op.opNum(),
                    xb, (LongPointer) x.shapeInfoDataBuffer().addressPointer(), null,
                    getPointerForExtraArgs(op, x.dataType()),
                    zb, (LongPointer) z.shapeInfoDataBuffer().addressPointer(), null);
        } else {
            loop.execIndexReduce(dummy, op.opNum(),
                    xb, (LongPointer) x.shapeInfoDataBuffer().addressPointer(), null,
                    getPointerForExtraArgs(op, x.dataType()),
                    zb, (LongPointer) z.shapeInfoDataBuffer().addressPointer(), null,
                    ((BaseCpuDataBuffer) op.dimensions().data()).getOpaqueDataBuffer(), (LongPointer) op.dimensions().shapeInfoDataBuffer().addressPointer(), null);
        }

    if (loop.lastErrorCode() != 0)
        throw new RuntimeException(loop.lastErrorMessage());

    profilingConfigurableHookOut(op, oc, st);
    return getZ(op, oc);
}