Java Code Examples for org.nd4j.linalg.factory.Nd4j#setDefaultDataTypes()

The following examples show how to use org.nd4j.linalg.factory.Nd4j#setDefaultDataTypes() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BaseND4JTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Before
public void beforeTest(){
    log.info("{}.{}", getClass().getSimpleName(), name.getMethodName());
    //Suppress ND4J initialization - don't need this logged for every test...
    System.setProperty(ND4JSystemProperties.LOG_INITIALIZATION, "false");
    System.setProperty(ND4JSystemProperties.ND4J_IGNORE_AVX, "true");
    Nd4j.getExecutioner().setProfilingMode(getProfilingMode());
    Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build());
    Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType());
    Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build());
    Nd4j.getExecutioner().enableDebugMode(false);
    Nd4j.getExecutioner().enableVerboseMode(false);
    int numThreads = numThreads();
    Preconditions.checkState(numThreads > 0, "Number of threads must be > 0");
    if(numThreads != Nd4j.getEnvironment().maxMasterThreads()) {
        Nd4j.getEnvironment().setMaxMasterThreads(numThreads);
    }
    startTime = System.currentTimeMillis();
    threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount();
}
 
Example 2
Source File: BaseDL4JTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Before
public void beforeTest(){
    log.info("{}.{}", getClass().getSimpleName(), name.getMethodName());
    //Suppress ND4J initialization - don't need this logged for every test...
    System.setProperty(ND4JSystemProperties.LOG_INITIALIZATION, "false");
    System.setProperty(ND4JSystemProperties.ND4J_IGNORE_AVX, "true");
    Nd4j.getExecutioner().setProfilingMode(getProfilingMode());
    Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build());
    Nd4j.setDefaultDataTypes(getDataType(), getDefaultFPDataType());
    Nd4j.getExecutioner().setProfilingConfig(ProfilerConfig.builder().build());
    Nd4j.getExecutioner().enableDebugMode(false);
    Nd4j.getExecutioner().enableVerboseMode(false);
    int numThreads = numThreads();
    Preconditions.checkState(numThreads > 0, "Number of threads must be > 0");
    if(numThreads != Nd4j.getEnvironment().maxMasterThreads()) {
        Nd4j.getEnvironment().setMaxMasterThreads(numThreads);
    }
    startTime = System.currentTimeMillis();
    threadCountBefore = ManagementFactory.getThreadMXBean().getThreadCount();
}
 
Example 3
Source File: SpecialTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void reproduceWorkspaceCrash_5(){
    val conf = WorkspaceConfiguration.builder().build();

    val ws = Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread(conf, "WS");

    INDArray arr = Nd4j.create(new double[]{1, 0, 0, 0, 1, 0, 0, 0, 0, 0}, new long[]{1, 10});

    Nd4j.setDefaultDataTypes(DataType.DOUBLE, DataType.DOUBLE);
    assertEquals(DataType.DOUBLE, arr.dataType());

    for( int i=0; i<100; i++ ) {
        try(val ws2 = ws.notifyScopeEntered()) {
            INDArray crash = arr.castTo(DataType.BOOL).castTo(DataType.DOUBLE);
            crash.dup();
        }
    }
}
 
Example 4
Source File: SpecialTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void reproduceWorkspaceCrash(){
        val conf = WorkspaceConfiguration.builder().build();

        val ws = Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread(conf, "WS");

        INDArray arr = Nd4j.create(new double[]{1, 0, 0, 0, 1, 0, 0, 0, 0, 0}, new long[]{1, 10});

        //assertNotEquals(Nd4j.defaultFloatingPointType(), arr.dataType());
        Nd4j.setDefaultDataTypes(DataType.DOUBLE, DataType.DOUBLE);

        for( int i=0; i<100; i++ ) {
            try(val ws2 = ws.notifyScopeEntered()) {
//                System.out.println("Iteration: " + i);
                INDArray ok = arr.eq(0.0);
                ok.dup();

                assertEquals(arr.dataType(), Nd4j.defaultFloatingPointType());
                assertEquals(DataType.DOUBLE, Nd4j.defaultFloatingPointType());
                INDArray crash = arr.eq(0.0).castTo(Nd4j.defaultFloatingPointType());
                crash.dup();        //Crashes here on i=1 iteration
            }
        }
    }
 
Example 5
Source File: CompressionTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testNoOpCompression1() {
    Nd4j.setDefaultDataTypes(DataType.FLOAT, DataType.FLOAT);
    INDArray array = Nd4j.linspace(1, 10000, 20000, DataType.FLOAT);
    INDArray exp = Nd4j.linspace(1, 10000, 20000, DataType.FLOAT);
    INDArray mps = Nd4j.linspace(1, 10000, 20000, DataType.FLOAT);

    BasicNDArrayCompressor.getInstance().setDefaultCompression("NOOP");

    INDArray compr = BasicNDArrayCompressor.getInstance().compress(array);

    assertEquals(DataType.COMPRESSED, compr.data().dataType());
    assertTrue(compr.isCompressed());

    INDArray decomp = BasicNDArrayCompressor.getInstance().decompress(compr);

    assertEquals(DataType.FLOAT, decomp.data().dataType());
    assertFalse(decomp.isCompressed());
    assertFalse(decomp.data() instanceof CompressedDataBuffer);
    assertFalse(exp.data() instanceof CompressedDataBuffer);
    assertFalse(exp.isCompressed());
    assertFalse(array.data() instanceof CompressedDataBuffer);

    assertEquals(exp, decomp);
}
 
Example 6
Source File: TestEigen.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSyev() {
    for(DataType dt : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
        //log.info("Datatype: {}", dt);
        Nd4j.setDefaultDataTypes(dt, dt);

        INDArray A = Nd4j.create(new float[][]{{1.96f, -6.49f, -0.47f, -7.20f, -0.65f},
                {-6.49f, 3.80f, -6.39f, 1.50f, -6.34f}, {-0.47f, -6.39f, 4.17f, -1.51f, 2.67f},
                {-7.20f, 1.50f, -1.51f, 5.70f, 1.80f}, {-0.65f, -6.34f, 2.67f, 1.80f, -7.10f}});

        INDArray B = A.dup();
        INDArray e = Eigen.symmetricGeneralizedEigenvalues(A);

        for (int i = 0; i < A.rows(); i++) {
            INDArray LHS = B.mmul(A.slice(i, 1).reshape(-1, 1));
            INDArray RHS = A.slice(i, 1).mul(e.getFloat(i));

            for (int j = 0; j < LHS.length(); j++) {
                assertEquals(LHS.getFloat(j), RHS.getFloat(j), 0.001f);
            }
        }
    }
}
 
Example 7
Source File: JsonSerdeTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testNDArrayTextSerializer() throws Exception {
        for(char order : new char[]{'c', 'f'}) {
            Nd4j.factory().setOrder(order);
            for (DataType globalDT : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
                Nd4j.setDefaultDataTypes(globalDT, globalDT);

                Nd4j.getRandom().setSeed(12345);
                INDArray in = Nd4j.rand(DataType.DOUBLE, 3, 4).muli(20).subi(10);

                val om = new ObjectMapper();

                for (DataType dt : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.LONG, DataType.INT, DataType.SHORT,
                        DataType.BYTE, DataType.UBYTE, DataType.BOOL, DataType.UTF8}) {

                    INDArray arr;
                    if(dt == DataType.UTF8){
                        arr = Nd4j.create("aaaaa", "bbbb", "ccc", "dd", "e", "f", "g", "h", "i", "j", "k", "l").reshape('c', 3, 4);
                    } else {
                        arr = in.castTo(dt);
                    }

                    TestClass tc = new TestClass(arr);

                    String s = om.writeValueAsString(tc);
//                    System.out.println(dt);
//                    System.out.println(s);
//                    System.out.println("\n\n\n");

                    TestClass deserialized = om.readValue(s, TestClass.class);
                    assertEquals(dt.toString(), tc, deserialized);
                }
            }
        }
    }
 
Example 8
Source File: BaseOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Before
public void beforeClass() {
    Nd4j.create(1);

    Nd4j.setDefaultDataTypes(DataType.DOUBLE, DataType.DOUBLE);
    Nd4j.getRandom().setSeed(123);
}
 
Example 9
Source File: WorkspaceProviderTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testNestedWorkspacesOverlap1() {
    Nd4j.setDefaultDataTypes(DataType.FLOAT, DataType.FLOAT);
    Nd4j.getWorkspaceManager().setDefaultWorkspaceConfiguration(basicConfiguration);
    try (Nd4jWorkspace ws1 = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("WS1")
                    .notifyScopeEntered()) {
        INDArray array = Nd4j.create(new float[] {1f, 2f, 3f, 4f, 5f});

        long reqMem = 5 * Nd4j.sizeOfDataType();
        assertEquals(reqMem + reqMem % 8, ws1.getPrimaryOffset());
        try (Nd4jWorkspace ws2 = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("WS2")
                        .notifyScopeEntered()) {

            INDArray array2 = Nd4j.create(new float[] {1f, 2f, 3f, 4f, 5f});

            reqMem = 5 * Nd4j.sizeOfDataType();
            assertEquals(reqMem + reqMem % 8, ws1.getPrimaryOffset());
            assertEquals(reqMem + reqMem % 8, ws2.getPrimaryOffset());

            try (Nd4jWorkspace ws3 = (Nd4jWorkspace) Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread("WS1")
                            .notifyScopeBorrowed()) {
                assertTrue(ws1 == ws3);

                INDArray array3 = Nd4j.create(new float[] {1f, 2f, 3f, 4f, 5f});

                assertEquals(reqMem + reqMem % 8, ws2.getPrimaryOffset());
                assertEquals((reqMem + reqMem % 8) * 2, ws1.getPrimaryOffset());
            }
        }
    }

    assertNull(Nd4j.getMemoryManager().getCurrentWorkspace());
}
 
Example 10
Source File: BasicWorkspaceTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDtypeLeverage(){

    for(DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
        for (DataType arrayDType : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
            Nd4j.setDefaultDataTypes(globalDtype, globalDtype);

            WorkspaceConfiguration configOuter = WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L)
                    .policyAllocation(AllocationPolicy.OVERALLOCATE).policyLearning(LearningPolicy.NONE).build();
            WorkspaceConfiguration configInner = WorkspaceConfiguration.builder().initialSize(10 * 1024L * 1024L)
                    .policyAllocation(AllocationPolicy.OVERALLOCATE).policyLearning(LearningPolicy.NONE).build();

            try (MemoryWorkspace ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(configOuter, "ws")) {
                INDArray arr = Nd4j.create(arrayDType, 3, 4);
                try (MemoryWorkspace wsInner = Nd4j.getWorkspaceManager().getAndActivateWorkspace(configOuter, "wsInner")) {
                    INDArray leveraged = arr.leverageTo("ws");
                    assertTrue(leveraged.isAttached());
                    assertEquals(arrayDType, leveraged.dataType());

                    INDArray detached = leveraged.detach();
                    assertFalse(detached.isAttached());
                    assertEquals(arrayDType, detached.dataType());
                }
            }
        }
    }
    Nd4j.getWorkspaceManager().destroyAllWorkspacesForCurrentThread();
}
 
Example 11
Source File: DTypeTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testDtypesModelVsGlobalDtypeRnn() {
    for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
        Nd4j.setDefaultDataTypes(globalDtype, globalDtype);
        for (DataType networkDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
            for (int outputLayer = 0; outputLayer < 3; outputLayer++) {
                assertEquals(globalDtype, Nd4j.dataType());
                assertEquals(globalDtype, Nd4j.defaultFloatingPointType());

                String msg = "Global dtype: " + globalDtype + ", network dtype: " + networkDtype + ", outputLayer=" + outputLayer;

                Layer ol;
                Layer secondLast;
                switch (outputLayer) {
                    case 0:
                        ol = new RnnOutputLayer.Builder().nOut(5).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
                        secondLast = new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build();
                        break;
                    case 1:
                        ol = new RnnLossLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build();
                        secondLast = new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build();
                        break;
                    case 2:
                        ol = new OutputLayer.Builder().nOut(5).build();
                        secondLast = new LastTimeStep(new SimpleRnn.Builder().nOut(5).activation(Activation.TANH).build());
                        break;
                    default:
                        throw new RuntimeException();
                }

                MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                        .dataType(networkDtype)
                        .convolutionMode(ConvolutionMode.Same)
                        .updater(new Adam(1e-2))
                        .list()
                        .layer(new LSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
                        .layer(new GravesLSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
                        .layer(new DenseLayer.Builder().nOut(5).build())
                        .layer(new GravesBidirectionalLSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build())
                        .layer(new Bidirectional(new LSTM.Builder().nIn(5).nOut(5).activation(Activation.TANH).build()))
                        .layer(new TimeDistributed(new DenseLayer.Builder().nIn(10).nOut(5).activation(Activation.TANH).build()))
                        .layer(new SimpleRnn.Builder().nIn(5).nOut(5).build())
                        .layer(new MaskZeroLayer.Builder().underlying(new SimpleRnn.Builder().nIn(5).nOut(5).build()).maskValue(0.0).build())
                        .layer(secondLast)
                        .layer(ol)
                        .build();

                MultiLayerNetwork net = new MultiLayerNetwork(conf);
                net.init();

                net.initGradientsView();
                assertEquals(msg, networkDtype, net.params().dataType());
                assertEquals(msg, networkDtype, net.getFlattenedGradients().dataType());
                assertEquals(msg, networkDtype, net.getUpdater(true).getStateViewArray().dataType());

                INDArray in = Nd4j.rand(networkDtype, 2, 5, 2);
                INDArray label;
                if (outputLayer == 2) {
                    label = TestUtils.randomOneHot(2, 5).castTo(networkDtype);
                } else {
                    label = TestUtils.randomOneHotTimeSeries(2, 5, 2).castTo(networkDtype);
                }


                INDArray out = net.output(in);
                assertEquals(msg, networkDtype, out.dataType());
                List<INDArray> ff = net.feedForward(in);
                for (int i = 0; i < ff.size(); i++) {
                    assertEquals(msg, networkDtype, ff.get(i).dataType());
                }

                net.setInput(in);
                net.setLabels(label);
                net.computeGradientAndScore();

                net.fit(new DataSet(in, label, Nd4j.ones(networkDtype, 2, 2), outputLayer == 2 ? null : Nd4j.ones(networkDtype, 2, 2)));

                logUsedClasses(net);

                //Now, test mismatched dtypes for input/labels:
                for (DataType inputLabelDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
                    INDArray in2 = in.castTo(inputLabelDtype);
                    INDArray label2 = label.castTo(inputLabelDtype);
                    net.output(in2);
                    net.setInput(in2);
                    net.setLabels(label2);
                    net.computeGradientAndScore();

                    net.fit(new DataSet(in2, label2));
                }
            }
        }
    }
}
 
Example 12
Source File: DTypeTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testCapsNetDtypes() {
    for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
        Nd4j.setDefaultDataTypes(globalDtype, globalDtype);
        for (DataType networkDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
            assertEquals(globalDtype, Nd4j.dataType());
            assertEquals(globalDtype, Nd4j.defaultFloatingPointType());

            String msg = "Global dtype: " + globalDtype + ", network dtype: " + networkDtype;

            int primaryCapsDim = 2;
            int primarpCapsChannel = 8;
            int capsule = 5;
            int minibatchSize = 8;
            int routing = 1;
            int capsuleDim = 4;
            int height = 6;
            int width = 6;
            int inputDepth = 4;

            MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .dataType(networkDtype)
                    .seed(123)
                    .updater(new NoOp())
                    .weightInit(new WeightInitDistribution(new UniformDistribution(-6, 6)))
                    .list()
                    .layer(new PrimaryCapsules.Builder(primaryCapsDim, primarpCapsChannel)
                            .kernelSize(3, 3)
                            .stride(2, 2)
                            .build())
                    .layer(new CapsuleLayer.Builder(capsule, capsuleDim, routing).build())
                    .layer(new CapsuleStrengthLayer.Builder().build())
                    .layer(new ActivationLayer.Builder(new ActivationSoftmax()).build())
                    .layer(new LossLayer.Builder(new LossNegativeLogLikelihood()).build())
                    .setInputType(InputType.convolutional(height, width, inputDepth))
                    .build();

            MultiLayerNetwork net = new MultiLayerNetwork(conf);
            net.init();

            INDArray in = Nd4j.rand(networkDtype, minibatchSize, inputDepth * height * width).mul(10)
                    .reshape(-1, inputDepth, height, width);
            INDArray label = Nd4j.zeros(networkDtype, minibatchSize, capsule);
            for (int i = 0; i < minibatchSize; i++) {
                label.putScalar(new int[]{i, i % capsule}, 1.0);
            }

            INDArray out = net.output(in);
            assertEquals(msg, networkDtype, out.dataType());
            List<INDArray> ff = net.feedForward(in);
            for (int i = 0; i < ff.size(); i++) {
                String s = msg + " - layer " + (i - 1) + " - " + (i == 0 ? "input" : net.getLayer(i - 1).conf().getLayer().getClass().getSimpleName());
                assertEquals(s, networkDtype, ff.get(i).dataType());
            }

            net.setInput(in);
            net.setLabels(label);
            net.computeGradientAndScore();

            net.fit(new DataSet(in, label));

            logUsedClasses(net);

            //Now, test mismatched dtypes for input/labels:
            for (DataType inputLabelDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
                INDArray in2 = in.castTo(inputLabelDtype);
                INDArray label2 = label.castTo(inputLabelDtype);
                net.output(in2);
                net.setInput(in2);
                net.setLabels(label2);
                net.computeGradientAndScore();

                net.fit(new DataSet(in2, label2));
            }
        }
    }
}
 
Example 13
Source File: DTypeTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testLocallyConnected() {
    for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
        Nd4j.setDefaultDataTypes(globalDtype, globalDtype);
        for (DataType networkDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
            assertEquals(globalDtype, Nd4j.dataType());
            assertEquals(globalDtype, Nd4j.defaultFloatingPointType());

            INDArray[] in = null;
            for (int test = 0; test < 2; test++) {
                String msg = "Global dtype: " + globalDtype + ", network dtype: " + networkDtype + ", test=" + test;

                ComputationGraphConfiguration.GraphBuilder b = new NeuralNetConfiguration.Builder()
                        .dataType(networkDtype)
                        .seed(123)
                        .updater(new NoOp())
                        .weightInit(WeightInit.XAVIER)
                        .convolutionMode(ConvolutionMode.Same)
                        .graphBuilder();

                INDArray label;
                switch (test) {
                    case 0:
                        b.addInputs("in")
                                .addLayer("1", new LSTM.Builder().nOut(5).build(), "in")
                                .addLayer("2", new LocallyConnected1D.Builder().kernelSize(2).nOut(4).build(), "1")
                                .addLayer("out", new RnnOutputLayer.Builder().nOut(10).build(), "2")
                                .setOutputs("out")
                                .setInputTypes(InputType.recurrent(5, 2));
                        in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 2)};
                        label = TestUtils.randomOneHotTimeSeries(2, 10, 2);
                        break;
                    case 1:
                        b.addInputs("in")
                                .addLayer("1", new ConvolutionLayer.Builder().kernelSize(2, 2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
                                .addLayer("2", new LocallyConnected2D.Builder().kernelSize(2, 2).nOut(5).build(), "1")
                                .addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2")
                                .setOutputs("out")
                                .setInputTypes(InputType.convolutional(8, 8, 1));
                        in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 8, 8)};
                        label = TestUtils.randomOneHot(2, 10).castTo(networkDtype);
                        break;
                    default:
                        throw new RuntimeException();
                }

                ComputationGraph net = new ComputationGraph(b.build());
                net.init();

                INDArray out = net.outputSingle(in);
                assertEquals(msg, networkDtype, out.dataType());
                Map<String, INDArray> ff = net.feedForward(in, false);
                for (Map.Entry<String, INDArray> e : ff.entrySet()) {
                    if (e.getKey().equals("in"))
                        continue;
                    String s = msg + " - layer: " + e.getKey();
                    assertEquals(s, networkDtype, e.getValue().dataType());
                }

                net.setInputs(in);
                net.setLabels(label);
                net.computeGradientAndScore();

                net.fit(new MultiDataSet(in, new INDArray[]{label}));

                logUsedClasses(net);

                //Now, test mismatched dtypes for input/labels:
                for (DataType inputLabelDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
                    INDArray[] in2 = new INDArray[in.length];
                    for (int i = 0; i < in.length; i++) {
                        in2[i] = in[i].castTo(inputLabelDtype);
                    }
                    INDArray label2 = label.castTo(inputLabelDtype);
                    net.output(in2);
                    net.setInputs(in2);
                    net.setLabels(label2);
                    net.computeGradientAndScore();

                    net.fit(new MultiDataSet(in2, new INDArray[]{label2}));
                }
            }
        }
    }
}
 
Example 14
Source File: ValidateMKLDNN.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test @Ignore   //https://github.com/deeplearning4j/deeplearning4j/issues/7272
public void validateLRN() {

    //Only run test if using nd4j-native backend
    assumeTrue(Nd4j.getBackend().getClass().getName().toLowerCase().contains("native"));
    Nd4j.setDefaultDataTypes(DataType.FLOAT, DataType.FLOAT);
    Nd4j.getRandom().setSeed(12345);

    int[] inputSize = {-1, 3, 16, 16};
    int[] stride = {1, 1};
    int[] kernel = {2, 2};
    ConvolutionMode cm = ConvolutionMode.Truncate;

    double[] a = new double[]{1e-4, 1e-4, 1e-3, 1e-3};
    double[] b = new double[]{0.75, 0.9, 0.75, 0.75};
    double[] n = new double[]{5, 3, 3, 4};
    double[] k = new double[]{2, 2.5, 2.75, 2};

    for (int minibatch : new int[]{1, 3}) {
        for( int i=0; i<a.length; i++ ) {
            System.out.println("+++++ MINIBATCH = " + minibatch + ", TEST=" + i + " +++++");


            inputSize[0] = minibatch;
            INDArray f = Nd4j.rand(Nd4j.defaultFloatingPointType(), inputSize);
            INDArray l = TestUtils.randomOneHot(minibatch, 10).castTo(DataType.FLOAT);

            MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .updater(new Adam(0.01))
                    .convolutionMode(cm)
                    .weightInit(new NormalDistribution(0,1))
                    .seed(12345)
                    .list()
                    .layer(new ConvolutionLayer.Builder().activation(Activation.TANH)
                            .kernelSize(kernel)
                            .stride(stride)
                            .padding(0, 0)
                            .nOut(3)
                            .build())
                    .layer(new LocalResponseNormalization.Builder()
                            .alpha(a[i])
                            .beta(b[i])
                            .n(n[i])
                            .k(k[i])
                            .cudnnAllowFallback(false).build())
                    .layer(new OutputLayer.Builder().nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .setInputType(InputType.convolutional(inputSize[2], inputSize[3], inputSize[1]))
                    .build();

            MultiLayerNetwork netWith = new MultiLayerNetwork(conf.clone());
            netWith.init();

            MultiLayerNetwork netWithout = new MultiLayerNetwork(conf.clone());
            netWithout.init();

            LayerHelperValidationUtil.TestCase tc = LayerHelperValidationUtil.TestCase.builder()
                    .allowHelpersForClasses(Collections.<Class<?>>singletonList(org.deeplearning4j.nn.layers.normalization.LocalResponseNormalization.class))
                    .testForward(true)
                    .testScore(true)
                    .testBackward(true)
                    .testTraining(true)
                    .features(f)
                    .labels(l)
                    .data(new SingletonDataSetIterator(new DataSet(f, l)))
                    //Very infrequent minor differences - as far as I can tell, just numerical precision issues...
                    .minAbsError(1e-3)
                    .maxRelError(1e-2)
                    .build();

            LayerHelperValidationUtil.validateMLN(netWith, tc);

            System.out.println("/////////////////////////////////////////////////////////////////////////////");
        }
    }
}
 
Example 15
Source File: TestDL4JLocalExecution.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void before(){
    Nd4j.setDefaultDataTypes(DataType.FLOAT, DataType.FLOAT);
}
 
Example 16
Source File: TestBertIterator.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test(timeout = 20000L)
public void testMinibatchPadding() throws Exception {
    Nd4j.setDefaultDataTypes(DataType.FLOAT, DataType.FLOAT);
    int minibatchSize = 3;
    TestSentenceHelper testHelper = new TestSentenceHelper(minibatchSize);
    INDArray zeros = Nd4j.create(DataType.INT, 1, 16);
    INDArray expF = Nd4j.create(DataType.INT, 1, 16);
    INDArray expM = Nd4j.create(DataType.INT, 1, 16);
    Map<String, Integer> m = testHelper.getTokenizer().getVocab();
    for (int i = 0; i < minibatchSize; i++) {
        List<String> tokens = testHelper.getTokenizedSentences().get(i);
        INDArray expFTemp = Nd4j.create(DataType.INT, 1, 16);
        INDArray expMTemp = Nd4j.create(DataType.INT, 1, 16);
        System.out.println(tokens);
        for (int j = 0; j < tokens.size(); j++) {
            String token = tokens.get(j);
            if (!m.containsKey(token)) {
                throw new IllegalStateException("Unknown token: \"" + token + "\"");
            }
            int idx = m.get(token);
            expFTemp.putScalar(0, j, idx);
            expMTemp.putScalar(0, j, 1);
        }
        if (i == 0) {
            expF = expFTemp.dup();
            expM = expMTemp.dup();
        } else {
            expF = Nd4j.vstack(expF.dup(), expFTemp);
            expM = Nd4j.vstack(expM.dup(), expMTemp);
        }
    }

    expF = Nd4j.vstack(expF, zeros);
    expM = Nd4j.vstack(expM, zeros);
    INDArray expL = Nd4j.createFromArray(new float[][]{{0, 1}, {1, 0}, {0, 1}, {0, 0}});
    INDArray expLM = Nd4j.create(DataType.FLOAT, 4, 1);
    expLM.putScalar(0, 0, 1);
    expLM.putScalar(1, 0, 1);
    expLM.putScalar(2, 0, 1);

    //--------------------------------------------------------------

    BertIterator b = BertIterator.builder()
            .tokenizer(testHelper.getTokenizer())
            .lengthHandling(BertIterator.LengthHandling.FIXED_LENGTH, 16)
            .minibatchSize(minibatchSize + 1)
            .padMinibatches(true)
            .sentenceProvider(testHelper.getSentenceProvider())
            .featureArrays(BertIterator.FeatureArrays.INDICES_MASK_SEGMENTID)
            .vocabMap(testHelper.getTokenizer().getVocab())
            .task(BertIterator.Task.SEQ_CLASSIFICATION)
            .build();

    MultiDataSet mds = b.next();
    long[] expShape = {4, 16};
    assertArrayEquals(expShape, mds.getFeatures(0).shape());
    assertArrayEquals(expShape, mds.getFeatures(1).shape());
    assertArrayEquals(expShape, mds.getFeaturesMaskArray(0).shape());

    long[] lShape = {4, 2};
    long[] lmShape = {4, 1};
    assertArrayEquals(lShape, mds.getLabels(0).shape());
    assertArrayEquals(lmShape, mds.getLabelsMaskArray(0).shape());

    assertEquals(expF, mds.getFeatures(0));
    assertEquals(expM, mds.getFeaturesMaskArray(0));
    assertEquals(expL, mds.getLabels(0));
    assertEquals(expLM, mds.getLabelsMaskArray(0));

    assertEquals(expF, b.featurizeSentences(testHelper.getSentences()).getFirst()[0]);
    assertEquals(expM, b.featurizeSentences(testHelper.getSentences()).getSecond()[0]);
}
 
Example 17
Source File: TestMultiLayerSpace.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@BeforeClass
public static void before(){
    Nd4j.setDefaultDataTypes(DataType.FLOAT, DataType.FLOAT);
}
 
Example 18
Source File: EvaluationBinaryTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testEvaluationBinary() {
    //Compare EvaluationBinary to Evaluation class
    DataType dtypeBefore = Nd4j.defaultFloatingPointType();
    EvaluationBinary first = null;
    String sFirst = null;
    try {
        for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.INT}) {
            Nd4j.setDefaultDataTypes(globalDtype, globalDtype.isFPType() ? globalDtype : DataType.DOUBLE);
            for (DataType lpDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {

                Nd4j.getRandom().setSeed(12345);

                int nExamples = 50;
                int nOut = 4;
                long[] shape = {nExamples, nOut};

                INDArray labels = Nd4j.getExecutioner().exec(new BernoulliDistribution(Nd4j.createUninitialized(lpDtype, shape), 0.5));

                INDArray predicted = Nd4j.rand(lpDtype, shape);
                INDArray binaryPredicted = predicted.gt(0.5);

                EvaluationBinary eb = new EvaluationBinary();
                eb.eval(labels, predicted);

                //System.out.println(eb.stats());

                double eps = 1e-6;
                for (int i = 0; i < nOut; i++) {
                    INDArray lCol = labels.getColumn(i,true);
                    INDArray pCol = predicted.getColumn(i,true);
                    INDArray bpCol = binaryPredicted.getColumn(i,true);

                    int countCorrect = 0;
                    int tpCount = 0;
                    int tnCount = 0;
                    for (int j = 0; j < lCol.length(); j++) {
                        if (lCol.getDouble(j) == bpCol.getDouble(j)) {
                            countCorrect++;
                            if (lCol.getDouble(j) == 1) {
                                tpCount++;
                            } else {
                                tnCount++;
                            }
                        }
                    }
                    double acc = countCorrect / (double) lCol.length();

                    Evaluation e = new Evaluation();
                    e.eval(lCol, pCol);

                    assertEquals(acc, eb.accuracy(i), eps);
                    assertEquals(e.accuracy(), eb.scoreForMetric(ACCURACY, i), eps);
                    assertEquals(e.precision(1), eb.scoreForMetric(PRECISION, i), eps);
                    assertEquals(e.recall(1), eb.scoreForMetric(RECALL, i), eps);
                    assertEquals(e.f1(1), eb.scoreForMetric(F1, i), eps);
                    assertEquals(e.falseAlarmRate(), eb.scoreForMetric(FAR, i), eps);
                    assertEquals(e.falsePositiveRate(1), eb.falsePositiveRate(i), eps);


                    assertEquals(tpCount, eb.truePositives(i));
                    assertEquals(tnCount, eb.trueNegatives(i));

                    assertEquals((int) e.truePositives().get(1), eb.truePositives(i));
                    assertEquals((int) e.trueNegatives().get(1), eb.trueNegatives(i));
                    assertEquals((int) e.falsePositives().get(1), eb.falsePositives(i));
                    assertEquals((int) e.falseNegatives().get(1), eb.falseNegatives(i));

                    assertEquals(nExamples, eb.totalCount(i));

                    String s = eb.stats();
                    if(first == null) {
                        first = eb;
                        sFirst = s;
                    } else {
                        assertEquals(first, eb);
                        assertEquals(sFirst, s);
                    }
                }
            }
        }
    } finally {
        Nd4j.setDefaultDataTypes(dtypeBefore, dtypeBefore);
    }
}
 
Example 19
Source File: DTypeTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testAttentionDTypes() {
    for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
        Nd4j.setDefaultDataTypes(globalDtype, globalDtype);
        for (DataType networkDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
            assertEquals(globalDtype, Nd4j.dataType());
            assertEquals(globalDtype, Nd4j.defaultFloatingPointType());

            String msg = "Global dtype: " + globalDtype + ", network dtype: " + networkDtype;

            int mb = 3;
            int nIn = 3;
            int nOut = 5;
            int tsLength = 4;
            int layerSize = 8;
            int numQueries = 6;

            INDArray in = Nd4j.rand(networkDtype, new long[]{mb, nIn, tsLength});
            INDArray labels = TestUtils.randomOneHot(mb, nOut);

            MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                    .dataType(networkDtype)
                    .activation(Activation.TANH)
                    .updater(new NoOp())
                    .weightInit(WeightInit.XAVIER)
                    .list()
                    .layer(new LSTM.Builder().nOut(layerSize).build())
                    .layer(new SelfAttentionLayer.Builder().nOut(8).nHeads(2).projectInput(true).build())
                    .layer(new LearnedSelfAttentionLayer.Builder().nOut(8).nHeads(2).nQueries(numQueries).projectInput(true).build())
                    .layer(new RecurrentAttentionLayer.Builder().nIn(layerSize).nOut(layerSize).nHeads(1).projectInput(false).hasBias(false).build())
                    .layer(new GlobalPoolingLayer.Builder().poolingType(PoolingType.MAX).build())
                    .layer(new OutputLayer.Builder().nOut(nOut).activation(Activation.SOFTMAX)
                            .lossFunction(LossFunctions.LossFunction.MCXENT).build())
                    .setInputType(InputType.recurrent(nIn))
                    .build();

            MultiLayerNetwork net = new MultiLayerNetwork(conf);
            net.init();

            INDArray out = net.output(in);
            assertEquals(msg, networkDtype, out.dataType());
            List<INDArray> ff = net.feedForward(in);
            for (int i = 0; i < ff.size(); i++) {
                String s = msg + " - layer " + (i - 1) + " - " + (i == 0 ? "input" : net.getLayer(i - 1).conf().getLayer().getClass().getSimpleName());
                assertEquals(s, networkDtype, ff.get(i).dataType());
            }

            net.setInput(in);
            net.setLabels(labels);
            net.computeGradientAndScore();

            net.fit(new DataSet(in, labels));

            logUsedClasses(net);

            //Now, test mismatched dtypes for input/labels:
            for (DataType inputLabelDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
                INDArray in2 = in.castTo(inputLabelDtype);
                INDArray label2 = labels.castTo(inputLabelDtype);
                net.output(in2);
                net.setInput(in2);
                net.setLabels(label2);
                net.computeGradientAndScore();

                net.fit(new DataSet(in2, label2));
            }
        }
    }
}
 
Example 20
Source File: EvaluationCalibrationTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testReliabilityDiagram() {

        DataType dtypeBefore = Nd4j.defaultFloatingPointType();
        EvaluationCalibration first = null;
        String sFirst = null;
        try {
            for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.INT}) {
                Nd4j.setDefaultDataTypes(globalDtype, globalDtype.isFPType() ? globalDtype : DataType.DOUBLE);
                for (DataType lpDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {


                    //Test using 5 bins - format: binary softmax-style output
                    //Note: no values fall in fourth bin

                    //[0, 0.2)
                    INDArray bin0Probs = Nd4j.create(new double[][]{{1.0, 0.0}, {0.9, 0.1}, {0.85, 0.15}}).castTo(lpDtype);
                    INDArray bin0Labels = Nd4j.create(new double[][]{{1.0, 0.0}, {1.0, 0.0}, {0.0, 1.0}}).castTo(lpDtype);

                    //[0.2, 0.4)
                    INDArray bin1Probs = Nd4j.create(new double[][]{{0.80, 0.20}, {0.7, 0.3}, {0.65, 0.35}}).castTo(lpDtype);
                    INDArray bin1Labels = Nd4j.create(new double[][]{{1.0, 0.0}, {0.0, 1.0}, {1.0, 0.0}}).castTo(lpDtype);

                    //[0.4, 0.6)
                    INDArray bin2Probs = Nd4j.create(new double[][]{{0.59, 0.41}, {0.5, 0.5}, {0.45, 0.55}}).castTo(lpDtype);
                    INDArray bin2Labels = Nd4j.create(new double[][]{{1.0, 0.0}, {0.0, 1.0}, {0.0, 1.0}}).castTo(lpDtype);

                    //[0.6, 0.8)
                    //Empty

                    //[0.8, 1.0]
                    INDArray bin4Probs = Nd4j.create(new double[][]{{0.0, 1.0}, {0.1, 0.9}}).castTo(lpDtype);
                    INDArray bin4Labels = Nd4j.create(new double[][]{{0.0, 1.0}, {0.0, 1.0}}).castTo(lpDtype);


                    INDArray probs = Nd4j.vstack(bin0Probs, bin1Probs, bin2Probs, bin4Probs);
                    INDArray labels = Nd4j.vstack(bin0Labels, bin1Labels, bin2Labels, bin4Labels);

                    EvaluationCalibration ec = new EvaluationCalibration(5, 5);
                    ec.eval(labels, probs);

                    for (int i = 0; i < 1; i++) {
                        double[] avgBinProbsClass;
                        double[] fracPos;
                        if (i == 0) {
                            //Class 0: needs to be handled a little differently, due to threshold/edge cases (0.8, etc)
                            avgBinProbsClass = new double[]{0.05, (0.59 + 0.5 + 0.45) / 3, (0.65 + 0.7) / 2.0,
                                    (0.8 + 0.85 + 0.9 + 1.0) / 4};
                            fracPos = new double[]{0.0 / 2.0, 1.0 / 3, 1.0 / 2, 3.0 / 4};
                        } else {
                            avgBinProbsClass = new double[]{bin0Probs.getColumn(i).meanNumber().doubleValue(),
                                    bin1Probs.getColumn(i).meanNumber().doubleValue(),
                                    bin2Probs.getColumn(i).meanNumber().doubleValue(),
                                    bin4Probs.getColumn(i).meanNumber().doubleValue()};

                            fracPos = new double[]{bin0Labels.getColumn(i).sumNumber().doubleValue() / bin0Labels.size(0),
                                    bin1Labels.getColumn(i).sumNumber().doubleValue() / bin1Labels.size(0),
                                    bin2Labels.getColumn(i).sumNumber().doubleValue() / bin2Labels.size(0),
                                    bin4Labels.getColumn(i).sumNumber().doubleValue() / bin4Labels.size(0)};
                        }

                        org.nd4j.evaluation.curves.ReliabilityDiagram rd = ec.getReliabilityDiagram(i);

                        double[] x = rd.getMeanPredictedValueX();
                        double[] y = rd.getFractionPositivesY();

                        assertArrayEquals(avgBinProbsClass, x, 1e-3);
                        assertArrayEquals(fracPos, y, 1e-3);

                        String s = ec.stats();
                        if(first == null) {
                            first = ec;
                            sFirst = s;
                        } else {
//                            assertEquals(first, ec);
                            assertEquals(sFirst, s);
                            assertTrue(first.getRDiagBinPosCount().equalsWithEps(ec.getRDiagBinPosCount(), lpDtype == DataType.HALF ? 1e-3 : 1e-5));  //Lower precision due to fload
                            assertTrue(first.getRDiagBinTotalCount().equalsWithEps(ec.getRDiagBinTotalCount(), lpDtype == DataType.HALF ? 1e-3 : 1e-5));
                            assertTrue(first.getRDiagBinSumPredictions().equalsWithEps(ec.getRDiagBinSumPredictions(), lpDtype == DataType.HALF ? 1e-3 : 1e-5));
                            assertArrayEquals(first.getLabelCountsEachClass(), ec.getLabelCountsEachClass());
                            assertArrayEquals(first.getPredictionCountsEachClass(), ec.getPredictionCountsEachClass());
                            assertTrue(first.getProbHistogramOverall().equalsWithEps(ec.getProbHistogramOverall(), lpDtype == DataType.HALF ? 1e-3 : 1e-5));
                            assertTrue(first.getProbHistogramByLabelClass().equalsWithEps(ec.getProbHistogramByLabelClass(), lpDtype == DataType.HALF ? 1e-3 : 1e-5));
                        }
                    }
                }
            }
        } finally {
            Nd4j.setDefaultDataTypes(dtypeBefore, dtypeBefore);
        }
    }