Java Code Examples for org.nd4j.linalg.api.buffer.DataType#FLOAT

The following examples show how to use org.nd4j.linalg.api.buffer.DataType#FLOAT . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SpecialTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void reproduceWorkspaceCrash_3(){
        val conf = WorkspaceConfiguration.builder().build();

        val ws = Nd4j.getWorkspaceManager().getWorkspaceForCurrentThread(conf, "WS");
        val dtypes = new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.LONG, DataType.INT, DataType.SHORT, DataType.BYTE, DataType.UBYTE, DataType.BOOL};
        for (val dX : dtypes) {
            for (val dZ: dtypes) {
                try(val ws2 = ws.notifyScopeEntered()) {
                    val array = Nd4j.create(dX, 2, 5).assign(1);
//                    log.info("Trying to cast {} to {}", dX, dZ);
                    val casted = array.castTo(dZ);
                    val exp = Nd4j.create(dZ, 2, 5).assign(1);
                    assertEquals(exp, casted);

                    Nd4j.getExecutioner().commit();
                }
            }
        }
    }
 
Example 2
Source File: BaseLevel1.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
 * computes a vector-vector dot product.
 *
 * @param n number of accessed element
 * @param alpha
 * @param X an INDArray
 * @param Y an INDArray
 * @return the vector-vector dot product of X and Y
 */
@Override
public double dot(long n, double alpha, INDArray X, INDArray Y) {
    if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL)
        OpProfiler.getInstance().processBlasCall(false, X, Y);

    if (X.data().dataType() == DataType.DOUBLE) {
        DefaultOpExecutioner.validateDataType(DataType.DOUBLE, X, Y);
        return ddot(n, X, BlasBufferUtil.getBlasStride(X), Y, BlasBufferUtil.getBlasStride(Y));
    } else if (X.data().dataType() == DataType.FLOAT) {
        DefaultOpExecutioner.validateDataType(DataType.FLOAT, X, Y);
        return sdot(n, X, BlasBufferUtil.getBlasStride(X), Y, BlasBufferUtil.getBlasStride(Y));
    } else {
        DefaultOpExecutioner.validateDataType(DataType.HALF, X, Y);
        return hdot(n, X, BlasBufferUtil.getBlasStride(X), Y, BlasBufferUtil.getBlasStride(Y));
    }

}
 
Example 3
Source File: BaseLevel1.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public void axpy(long n, double alpha, DataBuffer x, int offsetX, int incrX, DataBuffer y, int offsetY, int incrY) {
    if (supportsDataBufferL1Ops()) {
        if (x.dataType() == DataType.DOUBLE) {
            daxpy(n, alpha, x, offsetX, incrX, y, offsetY, incrY);
        } else if (x.dataType() == DataType.FLOAT) {
            saxpy(n, (float) alpha, x, offsetX, incrX, y, offsetY, incrY);
        } else {
            haxpy(n, (float) alpha, x, offsetX, incrX, y, offsetY, incrY);
        }
    } else {
        long[] shapex = {1, n};
        long[] shapey = {1, n};
        long[] stridex = {incrX, incrX};
        long[] stridey = {incrY, incrY};
        INDArray arrX = Nd4j.create(x, shapex, stridex, offsetX, 'c');
        INDArray arrY = Nd4j.create(x, shapey, stridey, offsetY, 'c');
        axpy(n, alpha, arrX, arrY);
    }
}
 
Example 4
Source File: PythonNumpyJobTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Parameterized.Parameters(name = "{index}: Testing with DataType={0}")
public static DataType[] params() {
    return new DataType[]{
            DataType.BOOL,
            DataType.FLOAT16,
            DataType.BFLOAT16,
            DataType.FLOAT,
            DataType.DOUBLE,
            DataType.INT8,
            DataType.INT16,
            DataType.INT32,
            DataType.INT64,
            DataType.UINT8,
            DataType.UINT16,
            DataType.UINT32,
            DataType.UINT64
    };
}
 
Example 5
Source File: ToStringTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testToStringScalars(){
    DataType[] dataTypes = new DataType[]{DataType.FLOAT, DataType.DOUBLE, DataType.BOOL, DataType.INT, DataType.UINT32};
    String[] strs = new String[]{"1.0000", "1.0000", "true", "1", "1"};

    for(int dt=0; dt<5; dt++ ) {
        for (int i = 0; i < 5; i++) {
            long[] shape = ArrayUtil.nTimes(i, 1L);
            INDArray scalar = Nd4j.scalar(1.0f).castTo(dataTypes[dt]).reshape(shape);
            String str = scalar.toString();
            StringBuilder sb = new StringBuilder();
            for (int j = 0; j < i; j++) {
                sb.append("[");
            }
            sb.append(strs[dt]);
            for (int j = 0; j < i; j++) {
                sb.append("]");
            }
            String exp = sb.toString();
            assertEquals("Rank: " + i + ", DT: " + dataTypes[dt], exp, str);
        }
    }
}
 
Example 6
Source File: PythonNumpyCollectionsTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Parameterized.Parameters(name = "{index}: Testing with DataType={0}")
public static DataType[] params() {
    return new DataType[]{
            DataType.BOOL,
            DataType.FLOAT16,
            //DataType.BFLOAT16,
            DataType.FLOAT,
            DataType.DOUBLE,
            DataType.INT8,
            DataType.INT16,
            DataType.INT32,
            DataType.INT64,
            DataType.UINT8,
            DataType.UINT16,
            DataType.UINT32,
            DataType.UINT64
    };
}
 
Example 7
Source File: TestNativeImageLoader.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testDataTypes_2() throws Exception {
    val dtypes = new DataType[]{DataType.FLOAT, DataType.HALF, DataType.SHORT, DataType.INT};

    val dt = Nd4j.dataType();

    for (val dtype: dtypes) {
        Nd4j.setDataType(dtype);
        int w3 = 123, h3 = 77, ch3 = 3;
        val loader = new NativeImageLoader(h3, w3, 1);
        File f3 = new ClassPathResource("datavec-data-image/testimages/class0/2.jpg").getFile();
        val array = loader.asMatrix(f3);

        assertEquals(dtype, array.dataType());
    }

    Nd4j.setDataType(dt);
}
 
Example 8
Source File: ArrayOptionsHelper.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public static DataType convertToDataType(org.tensorflow.framework.DataType dataType) {
    switch (dataType) {
        case DT_UINT16:
            return DataType.UINT16;
        case DT_UINT32:
            return DataType.UINT32;
        case DT_UINT64:
            return DataType.UINT64;
        case DT_BOOL:
            return DataType.BOOL;
        case DT_BFLOAT16:
            return DataType.BFLOAT16;
        case DT_FLOAT:
            return DataType.FLOAT;
        case DT_INT32:
            return DataType.INT;
        case DT_INT64:
            return DataType.LONG;
        case DT_INT8:
            return DataType.BYTE;
        case DT_INT16:
            return DataType.SHORT;
        case DT_DOUBLE:
            return DataType.DOUBLE;
        case DT_UINT8:
            return DataType.UBYTE;
        case DT_HALF:
            return DataType.HALF;
        case DT_STRING:
            return DataType.UTF8;
        default:
            throw new UnsupportedOperationException("Unknown TF data type: [" + dataType.name() + "]");
    }
}
 
Example 9
Source File: SpecialTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void reproduceWorkspaceCrash_2(){
        val dtypes = new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.LONG, DataType.INT, DataType.SHORT, DataType.BYTE, DataType.UBYTE, DataType.BOOL};
        for (val dX : dtypes) {
            for (val dZ: dtypes) {
                val array = Nd4j.create(dX, 2, 5).assign(1);

//                log.info("Trying to cast {} to {}", dX, dZ);
                val casted = array.castTo(dZ);

                val exp = Nd4j.create(dZ, 2, 5).assign(1);
                assertEquals(exp, casted);
            }
        }
    }
 
Example 10
Source File: BaseBlasWrapper.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray axpy(double da, INDArray dx, INDArray dy) {
    LinAlgExceptions.assertVector(dx, dy);

    if (dx.data().dataType() == DataType.FLOAT)
        return axpy((float) da, dx, dy);
    level1().axpy(dx.length(), da, dx, dy);
    return dy;
}
 
Example 11
Source File: LocallyConnectedLayerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testLocallyConnected(){
        for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
            Nd4j.setDefaultDataTypes(globalDtype, globalDtype);
            for (DataType networkDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
                assertEquals(globalDtype, Nd4j.dataType());
                assertEquals(globalDtype, Nd4j.defaultFloatingPointType());

                for (int test = 0; test < 2; test++) {
                    String msg = "Global dtype: " + globalDtype + ", network dtype: " + networkDtype + ", test=" + test;

                    ComputationGraphConfiguration.GraphBuilder b = new NeuralNetConfiguration.Builder()
                            .dataType(networkDtype)
                            .seed(123)
                            .updater(new NoOp())
                            .weightInit(WeightInit.XAVIER)
                            .convolutionMode(ConvolutionMode.Same)
                            .graphBuilder();

                    INDArray[] in;
                    INDArray label;
                    switch (test){
                        case 0:
                            b.addInputs("in")
                                    .addLayer("1", new LSTM.Builder().nOut(5).build(), "in")
                                    .addLayer("2", new LocallyConnected1D.Builder().kernelSize(2).nOut(4).build(), "1")
                                    .addLayer("out", new RnnOutputLayer.Builder().nOut(10).build(), "2")
                                    .setOutputs("out")
                                    .setInputTypes(InputType.recurrent(5, 4));
                            in = new INDArray[]{Nd4j.rand(networkDtype, 2, 5, 4)};
                            label = TestUtils.randomOneHotTimeSeries(2, 10, 4).castTo(networkDtype);
                            break;
                        case 1:
                            b.addInputs("in")
                                    .addLayer("1", new ConvolutionLayer.Builder().kernelSize(2,2).nOut(5).convolutionMode(ConvolutionMode.Same).build(), "in")
                                    .addLayer("2", new LocallyConnected2D.Builder().kernelSize(2,2).nOut(5).build(), "1")
                                    .addLayer("out", new OutputLayer.Builder().nOut(10).build(), "2")
                                    .setOutputs("out")
//                                    .setInputTypes(InputType.convolutional(28, 28, 1));
//                            in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 28, 28)};
                                    .setInputTypes(InputType.convolutional(8, 8, 1));
                            in = new INDArray[]{Nd4j.rand(networkDtype, 2, 1, 8, 8)};
                            label = TestUtils.randomOneHot(2, 10).castTo(networkDtype);
                            break;
                        default:
                            throw new RuntimeException();
                    }

                    ComputationGraph net = new ComputationGraph(b.build());
                    net.init();

                    INDArray out = net.outputSingle(in);
                    assertEquals(msg, networkDtype, out.dataType());
                    Map<String, INDArray> ff = net.feedForward(in, false);
                    for (Map.Entry<String, INDArray> e : ff.entrySet()) {
                        if (e.getKey().equals("in"))
                            continue;
                        String s = msg + " - layer: " + e.getKey();
                        assertEquals(s, networkDtype, e.getValue().dataType());
                    }

                    net.setInputs(in);
                    net.setLabels(label);
                    net.computeGradientAndScore();

                    net.fit(new MultiDataSet(in, new INDArray[]{label}));
                }
            }
        }
    }
 
Example 12
Source File: ConvDataFormatTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Parameterized.Parameters(name = "{0}")
public static Object[] params(){
    return new DataType[]{DataType.FLOAT, DataType.DOUBLE};
}
 
Example 13
Source File: RegressionTest100b6.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getDataType() {
    return DataType.FLOAT;
}
 
Example 14
Source File: CudaFloatDataBuffer.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
/**
 * Initialize the opType of this buffer
 */
@Override
protected void initTypeAndSize() {
    elementSize = 4;
    type = DataType.FLOAT;
}
 
Example 15
Source File: SubsamplingLayerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getDataType(){
    return DataType.FLOAT;
}
 
Example 16
Source File: CapsNetMNISTTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getDataType(){
    return DataType.FLOAT;
}
 
Example 17
Source File: TestCompGraphUnsupervised.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getDataType() {
    return DataType.FLOAT;
}
 
Example 18
Source File: EvalTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testEval2() {

    DataType dtypeBefore = Nd4j.defaultFloatingPointType();
    Evaluation first = null;
    String sFirst = null;
    try {
        for (DataType globalDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF, DataType.INT}) {
            Nd4j.setDefaultDataTypes(globalDtype, globalDtype.isFPType() ? globalDtype : DataType.DOUBLE);
            for (DataType lpDtype : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {

                //Confusion matrix:
                //actual 0      20      3
                //actual 1      10      5

                Evaluation evaluation = new Evaluation(Arrays.asList("class0", "class1"));
                INDArray predicted0 = Nd4j.create(new double[]{1, 0}, new long[]{1, 2}).castTo(lpDtype);
                INDArray predicted1 = Nd4j.create(new double[]{0, 1}, new long[]{1, 2}).castTo(lpDtype);
                INDArray actual0 = Nd4j.create(new double[]{1, 0}, new long[]{1, 2}).castTo(lpDtype);
                INDArray actual1 = Nd4j.create(new double[]{0, 1}, new long[]{1, 2}).castTo(lpDtype);
                for (int i = 0; i < 20; i++) {
                    evaluation.eval(actual0, predicted0);
                }

                for (int i = 0; i < 3; i++) {
                    evaluation.eval(actual0, predicted1);
                }

                for (int i = 0; i < 10; i++) {
                    evaluation.eval(actual1, predicted0);
                }

                for (int i = 0; i < 5; i++) {
                    evaluation.eval(actual1, predicted1);
                }

                assertEquals(20, evaluation.truePositives().get(0), 0);
                assertEquals(3, evaluation.falseNegatives().get(0), 0);
                assertEquals(10, evaluation.falsePositives().get(0), 0);
                assertEquals(5, evaluation.trueNegatives().get(0), 0);

                assertEquals((20.0 + 5) / (20 + 3 + 10 + 5), evaluation.accuracy(), 1e-6);

                String s = evaluation.stats();

                if(first == null) {
                    first = evaluation;
                    sFirst = s;
                } else {
                    assertEquals(first, evaluation);
                    assertEquals(sFirst, s);
                }
            }
        }
    } finally {
        Nd4j.setDefaultDataTypes(dtypeBefore, dtypeBefore);
    }
}
 
Example 19
Source File: DTypeTests.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testComputationGraphTypeConversion() {

    for (DataType dt : new DataType[]{DataType.DOUBLE, DataType.FLOAT, DataType.HALF}) {
        Nd4j.setDefaultDataTypes(dt, dt);

        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(12345)
                .weightInit(WeightInit.XAVIER)
                .updater(new Adam(0.01))
                .dataType(DataType.DOUBLE)
                .graphBuilder()
                .addInputs("in")
                .layer("l0", new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "in")
                .layer("l1", new DenseLayer.Builder().activation(Activation.TANH).nIn(10).nOut(10).build(), "l0")
                .layer("out", new OutputLayer.Builder().nIn(10).nOut(10).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build(), "l1")
                .setOutputs("out")
                .build();

        ComputationGraph net = new ComputationGraph(conf);
        net.init();

        INDArray inD = Nd4j.rand(DataType.DOUBLE, 1, 10);
        INDArray lD = Nd4j.create(DataType.DOUBLE, 1, 10);
        net.fit(new DataSet(inD, lD));

        INDArray outDouble = net.outputSingle(inD);
        net.setInput(0, inD);
        net.setLabels(lD);
        net.computeGradientAndScore();
        double scoreDouble = net.score();
        INDArray grads = net.getFlattenedGradients();
        INDArray u = net.getUpdater().getStateViewArray();
        assertEquals(DataType.DOUBLE, net.params().dataType());
        assertEquals(DataType.DOUBLE, grads.dataType());
        assertEquals(DataType.DOUBLE, u.dataType());


        ComputationGraph netFloat = net.convertDataType(DataType.FLOAT);
        netFloat.initGradientsView();
        assertEquals(DataType.FLOAT, netFloat.params().dataType());
        assertEquals(DataType.FLOAT, netFloat.getFlattenedGradients().dataType());
        assertEquals(DataType.FLOAT, netFloat.getUpdater(true).getStateViewArray().dataType());
        INDArray inF = inD.castTo(DataType.FLOAT);
        INDArray lF = lD.castTo(DataType.FLOAT);
        INDArray outFloat = netFloat.outputSingle(inF);
        netFloat.setInput(0, inF);
        netFloat.setLabels(lF);
        netFloat.computeGradientAndScore();
        double scoreFloat = netFloat.score();
        INDArray gradsFloat = netFloat.getFlattenedGradients();
        INDArray uFloat = netFloat.getUpdater().getStateViewArray();

        assertEquals(scoreDouble, scoreFloat, 1e-6);
        assertEquals(outDouble.castTo(DataType.FLOAT), outFloat);
        assertEquals(grads.castTo(DataType.FLOAT), gradsFloat);
        INDArray uCast = u.castTo(DataType.FLOAT);
        assertTrue(uCast.equalsWithEps(uFloat, 1e-4));

        ComputationGraph netFP16 = net.convertDataType(DataType.HALF);
        netFP16.initGradientsView();
        assertEquals(DataType.HALF, netFP16.params().dataType());
        assertEquals(DataType.HALF, netFP16.getFlattenedGradients().dataType());
        assertEquals(DataType.HALF, netFP16.getUpdater(true).getStateViewArray().dataType());

        INDArray inH = inD.castTo(DataType.HALF);
        INDArray lH = lD.castTo(DataType.HALF);
        INDArray outHalf = netFP16.outputSingle(inH);
        netFP16.setInput(0, inH);
        netFP16.setLabels(lH);
        netFP16.computeGradientAndScore();
        double scoreHalf = netFP16.score();
        INDArray gradsHalf = netFP16.getFlattenedGradients();
        INDArray uHalf = netFP16.getUpdater().getStateViewArray();

        assertEquals(scoreDouble, scoreHalf, 1e-4);
        boolean outHalfEq = outDouble.castTo(DataType.HALF).equalsWithEps(outHalf, 1e-3);
        assertTrue(outHalfEq);
        boolean gradsHalfEq = grads.castTo(DataType.HALF).equalsWithEps(gradsHalf, 1e-3);
        assertTrue(gradsHalfEq);
        INDArray uHalfCast = u.castTo(DataType.HALF);
        assertTrue(uHalfCast.equalsWithEps(uHalf, 1e-4));
    }
}
 
Example 20
Source File: RegressionTest071.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public DataType getDataType(){
    return DataType.FLOAT;
}