Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#unsafeDuplication()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#unsafeDuplication() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RepeatVector.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray activate(boolean training, LayerWorkspaceMgr workspaceMgr) {
    assertInputSet(false);

    if (cacheMode == null)
        cacheMode = CacheMode.NONE;

    INDArray z = preOutput(training, false, workspaceMgr);
    if (training && cacheMode != CacheMode.NONE && workspaceMgr.hasConfiguration(ArrayType.FF_CACHE)
            && workspaceMgr.isWorkspaceOpen(ArrayType.FF_CACHE)) {
        try (MemoryWorkspace wsB = workspaceMgr.notifyScopeBorrowed(ArrayType.FF_CACHE)) {
            preOutput = z.unsafeDuplication();
        }
    }
    return z;
}
 
Example 2
Source File: Upsampling3D.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray activate(boolean training, LayerWorkspaceMgr workspaceMgr) {
    assertInputSet(false);
    applyDropOutIfNecessary(training, workspaceMgr);

    if (cacheMode == null)
        cacheMode = CacheMode.NONE;

    INDArray z = preOutput(training, false, workspaceMgr);

    // we do cache only if cache workspace exists. Skip otherwise
    if (training && cacheMode != CacheMode.NONE && workspaceMgr.hasConfiguration(ArrayType.FF_CACHE)
            && workspaceMgr.isWorkspaceOpen(ArrayType.FF_CACHE)) {
        try (MemoryWorkspace wsB = workspaceMgr.notifyScopeBorrowed(ArrayType.FF_CACHE)) {
            preOutput = z.unsafeDuplication();
        }
    }
    return z;
}
 
Example 3
Source File: Upsampling2D.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public INDArray activate(boolean training, LayerWorkspaceMgr workspaceMgr) {
    assertInputSet(false);
    applyDropOutIfNecessary(training, workspaceMgr);

    if (cacheMode == null)
        cacheMode = CacheMode.NONE;

    INDArray z = preOutput(training, false, workspaceMgr);

    // we do cache only if cache workspace exists. Skip otherwise
    if (training && cacheMode != CacheMode.NONE && workspaceMgr.hasConfiguration(ArrayType.FF_CACHE) && workspaceMgr.isWorkspaceOpen(ArrayType.FF_CACHE)) {
        try (MemoryWorkspace wsB = workspaceMgr.notifyScopeBorrowed(ArrayType.FF_CACHE)) {
            preOutput = z.unsafeDuplication();
        }
    }
    return z;
}
 
Example 4
Source File: MinMaxNormConstraint.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public void apply(INDArray param) {
    INDArray norm = param.norm2(dimensions);
    INDArray clipped = norm.unsafeDuplication();
    CustomOp op = DynamicCustomOp.builder("clipbyvalue")
            .addInputs(clipped)
            .callInplace(true)
            .addFloatingPointArguments(min, max)
            .build();
    Nd4j.getExecutioner().exec(op);

    norm.addi(epsilon);
    clipped.divi(norm);

    if(rate != 1.0){
        clipped.muli(rate).addi(norm.muli(1.0-rate));
    }

    Broadcast.mul(param, clipped, param, getBroadcastDims(dimensions, param.rank()) );
}
 
Example 5
Source File: WiredEncodingHandler.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
 * This method sends given message to all registered recipients
 *
 * @param message
 */
@Override
protected void sendMessage(@NonNull INDArray message, int iterationNumber, int epochNumber) {
    // here we'll send our stuff to other executores over the wire
    // and let's pray for udp broadcast availability

    // Send this message away
    // FIXME: do something with unsafe duplication, which is bad and used ONLY for local spark
    try (MemoryWorkspace wsO = Nd4j.getMemoryManager().scopeOutOfWorkspaces()) {
        long updateId = updatesCounter.getAndIncrement();

        val m = message.unsafeDuplication();
        ModelParameterServer.getInstance().sendUpdate(m, iterationNumber, epochNumber);
    }


    // heere we update local queue
    super.sendMessage(message, iterationNumber, epochNumber);
}
 
Example 6
Source File: MaxNormConstraint.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void apply(INDArray param){
    INDArray norm = param.norm2(dimensions);
    INDArray clipped = norm.unsafeDuplication();
    BooleanIndexing.replaceWhere(clipped, maxNorm, Conditions.greaterThan(maxNorm));
    norm.addi(epsilon);
    clipped.divi(norm);

    Broadcast.mul(param, clipped, param, getBroadcastDims(dimensions, param.rank()) );
}
 
Example 7
Source File: WorkspaceProviderTests.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testNestedWorkspacesOverlap2() throws Exception {
    Nd4j.getWorkspaceManager().setDefaultWorkspaceConfiguration(basicConfiguration);

    assertFalse(Nd4j.getWorkspaceManager().checkIfWorkspaceExists("WS1"));
    assertFalse(Nd4j.getWorkspaceManager().checkIfWorkspaceExists("WS2"));

    try (Nd4jWorkspace ws1 = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("WS1")
                    .notifyScopeEntered()) {
        INDArray array = Nd4j.create(new float[] {6f, 3f, 1f, 9f, 21f});
        INDArray array3 = null;

        long reqMem = 5 * Nd4j.sizeOfDataType();
        assertEquals(reqMem + reqMem % 8, ws1.getHostOffset());
        try (Nd4jWorkspace ws2 = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("WS2")
                        .notifyScopeEntered()) {

            INDArray array2 = Nd4j.create(new float[] {1f, 2f, 3f, 4f, 5f});

            reqMem = 5 * Nd4j.sizeOfDataType();
            assertEquals(reqMem + reqMem % 8, ws1.getHostOffset());
            assertEquals(reqMem + reqMem % 8, ws2.getHostOffset());

            try (Nd4jWorkspace ws3 = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("WS1")
                            .notifyScopeBorrowed()) {
                assertTrue(ws1 == ws3);
                assertTrue(ws1 == Nd4j.getMemoryManager().getCurrentWorkspace());

                array3 = array2.unsafeDuplication();
                assertTrue(ws1 == array3.data().getParentWorkspace());
                assertEquals(reqMem + reqMem % 8, ws2.getHostOffset());
                assertEquals((reqMem + reqMem % 8) * 2, ws1.getHostOffset());
            }

            log.info("Current workspace: {}", Nd4j.getMemoryManager().getCurrentWorkspace());
            assertTrue(ws2 == Nd4j.getMemoryManager().getCurrentWorkspace());

            assertEquals(reqMem + reqMem % 8, ws2.getHostOffset());
            assertEquals((reqMem + reqMem % 8) * 2, ws1.getHostOffset());

            assertEquals(15f, array3.sumNumber().floatValue(), 0.01f);
        }
    }

    log.info("------");

    assertNull(Nd4j.getMemoryManager().getCurrentWorkspace());
}
 
Example 8
Source File: WorkspaceProviderTests.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Ignore
@Test
public void testMemcpy1() {
    INDArray warmUp = Nd4j.create(100000);
    for (int x = 0; x < 5000; x++) {
        warmUp.addi(0.1);
    }

    WorkspaceConfiguration configuration =
                    WorkspaceConfiguration.builder().policyMirroring(MirroringPolicy.HOST_ONLY)
                                    .initialSize(1024L * 1024L * 1024L).policyLearning(LearningPolicy.NONE).build();

    INDArray array = Nd4j.createUninitialized(150000000);

    MemoryWorkspace workspace =
                    (Nd4jWorkspace) Nd4j.getWorkspaceManager().createNewWorkspace(configuration, "HOST");
    workspace.notifyScopeEntered();


    INDArray memcpy = array.unsafeDuplication(false);


    workspace.notifyScopeLeft();

}
 
Example 9
Source File: WorkspaceProviderTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testNestedWorkspacesOverlap2() {
    Nd4j.getWorkspaceManager().setDefaultWorkspaceConfiguration(basicConfiguration);

    assertFalse(Nd4j.getWorkspaceManager().checkIfWorkspaceExists("WS1"));
    assertFalse(Nd4j.getWorkspaceManager().checkIfWorkspaceExists("WS2"));

    try (Nd4jWorkspace ws1 = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("WS1")
                    .notifyScopeEntered()) {
        INDArray array = Nd4j.create(new double[] {6f, 3f, 1f, 9f, 21f});
        INDArray array3 = null;

        long reqMem = 5 * Nd4j.sizeOfDataType(DataType.DOUBLE);
        assertEquals(reqMem + reqMem % 8, ws1.getPrimaryOffset());
        try (Nd4jWorkspace ws2 = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("WS2")
                        .notifyScopeEntered()) {

            INDArray array2 = Nd4j.create(new double[] {1f, 2f, 3f, 4f, 5f});

            reqMem = 5 * Nd4j.sizeOfDataType(DataType.DOUBLE);
            assertEquals(reqMem + reqMem % 8, ws1.getPrimaryOffset());
            assertEquals(reqMem + reqMem % 8, ws2.getPrimaryOffset());

            try (Nd4jWorkspace ws3 = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("WS1")
                            .notifyScopeBorrowed()) {
                assertTrue(ws1 == ws3);
                assertTrue(ws1 == Nd4j.getMemoryManager().getCurrentWorkspace());

                array3 = array2.unsafeDuplication();
                assertTrue(ws1 == array3.data().getParentWorkspace());
                assertEquals(reqMem + reqMem % 8, ws2.getPrimaryOffset());
                assertEquals((reqMem + reqMem % 8) * 2, ws1.getPrimaryOffset());
            }

            log.info("Current workspace: {}", Nd4j.getMemoryManager().getCurrentWorkspace());
            assertTrue(ws2 == Nd4j.getMemoryManager().getCurrentWorkspace());

            assertEquals(reqMem + reqMem % 8, ws2.getPrimaryOffset());
            assertEquals((reqMem + reqMem % 8) * 2, ws1.getPrimaryOffset());

            assertEquals(15f, array3.sumNumber().floatValue(), 0.01f);
        }
    }

    log.info("------");

    assertNull(Nd4j.getMemoryManager().getCurrentWorkspace());
}
 
Example 10
Source File: WorkspaceProviderTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Ignore
@Test
public void testMemcpy1() {
    INDArray warmUp = Nd4j.create(100000);
    for (int x = 0; x < 5000; x++) {
        warmUp.addi(0.1);
    }

    WorkspaceConfiguration configuration =
                    WorkspaceConfiguration.builder().policyMirroring(MirroringPolicy.HOST_ONLY)
                                    .initialSize(1024L * 1024L * 1024L).policyLearning(LearningPolicy.NONE).build();

    INDArray array = Nd4j.createUninitialized(150000000);

    MemoryWorkspace workspace =
            Nd4j.getWorkspaceManager().createNewWorkspace(configuration, "HOST");
    workspace.notifyScopeEntered();


    INDArray memcpy = array.unsafeDuplication(false);


    workspace.notifyScopeLeft();

}
 
Example 11
Source File: CompressionTests.java    From nd4j with Apache License 2.0 3 votes vote down vote up
@Test
public void testThresholdCompression3() throws Exception {
    INDArray initial = Nd4j.create(new double[] {-1.0, -2.0, 0.0, 0.0, 1.0, 1.0});
    INDArray exp_0 = Nd4j.create(new double[] {-1.0 + 1e-3, -2.0 + 1e-3, 0.0, 0.0, 1.0 - 1e-3, 1.0 - 1e-3});
    INDArray exp_1 = Nd4j.create(new double[] {-1e-3, -1e-3, 0.0, 0.0, 1e-3, 1e-3});

    //Nd4j.getCompressor().getCompressor("THRESHOLD").configure(1e-3);
    INDArray compressed = Nd4j.getExecutioner().thresholdEncode(initial, 1e-3f);

    INDArray copy = compressed.unsafeDuplication();

    log.info("Initial array: {}", Arrays.toString(initial.data().asFloat()));

    assertEquals(exp_0, initial);

    INDArray decompressed = Nd4j.create(initial.length());
    Nd4j.getExecutioner().thresholdDecode(compressed, decompressed);

    log.info("Decompressed array: {}", Arrays.toString(decompressed.data().asFloat()));

    assertEquals(exp_1, decompressed);

    INDArray decompressed_copy = Nd4j.create(initial.length());
    Nd4j.getExecutioner().thresholdDecode(copy, decompressed_copy);

    assertFalse(decompressed == decompressed_copy);
    assertEquals(decompressed, decompressed_copy);
}
 
Example 12
Source File: CompressionTests.java    From deeplearning4j with Apache License 2.0 3 votes vote down vote up
@Test
public void testThresholdCompression3() {
    INDArray initial = Nd4j.create(new double[] {-1.0, -2.0, 0.0, 0.0, 1.0, 1.0});
    INDArray exp_0 = Nd4j.create(new double[] {-1.0 + 1e-3, -2.0 + 1e-3, 0.0, 0.0, 1.0 - 1e-3, 1.0 - 1e-3});
    INDArray exp_1 = Nd4j.create(new double[] {-1e-3, -1e-3, 0.0, 0.0, 1e-3, 1e-3});

    //Nd4j.getCompressor().getCompressor("THRESHOLD").configure(1e-3);
    INDArray compressed = Nd4j.getExecutioner().thresholdEncode(initial, 1e-3f);

    INDArray copy = compressed.unsafeDuplication();

    log.info("Initial array: {}", Arrays.toString(initial.data().asFloat()));

    assertEquals(exp_0, initial);

    INDArray decompressed = Nd4j.create(DataType.DOUBLE, initial.length());
    Nd4j.getExecutioner().thresholdDecode(compressed, decompressed);

    log.info("Decompressed array: {}", Arrays.toString(decompressed.data().asFloat()));

    assertEquals(exp_1, decompressed);

    INDArray decompressed_copy = Nd4j.create(DataType.DOUBLE, initial.length());
    Nd4j.getExecutioner().thresholdDecode(copy, decompressed_copy);

    assertFalse(decompressed == decompressed_copy);
    assertEquals(decompressed, decompressed_copy);
}