Java Code Examples for org.nd4j.linalg.factory.Nd4j#createFromArray()

The following examples show how to use org.nd4j.linalg.factory.Nd4j#createFromArray() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SpecialTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testBatchToSpace() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 4;
    int[] inputShape = new int[]{miniBatch, 1, 1, 1};

    int M = 2;

    INDArray input = Nd4j.randn(inputShape).castTo(DataType.DOUBLE);
    INDArray blocks = Nd4j.createFromArray(2, 2);
    INDArray crops = Nd4j.createFromArray(0, 0, 0, 0).reshape(2,2);

    INDArray expOut = Nd4j.create(DataType.DOUBLE, 1, 2, 2, 1);
    DynamicCustomOp op = DynamicCustomOp.builder("batch_to_space_nd")
            .addInputs(input, blocks, crops)
            .addOutputs(expOut).build();
    Nd4j.getExecutioner().execAndReturn(op);
}
 
Example 2
Source File: ReductionOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testNormMax() {

    SameDiff sameDiff = SameDiff.create();

    INDArray in = Nd4j.linspace(1, 12, 12).reshape(3, 4);
    SDVariable input = sameDiff.var(in);
    INDArray expected = Nd4j.createFromArray(new double[]{
            9.0000,   10.0000,   11.0000,   12.0000
    });

    SDVariable output = new NormMax(sameDiff, input, false, new int[]{0}).outputVariable();

    TestCase tc = new TestCase(sameDiff)
            .gradientCheck(true)
            .expectedOutput(output.name(), expected);

    String err = OpValidation.validate(tc);
    assertNull(err);
}
 
Example 3
Source File: NDBaseTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testArgmin() {
    //Copy Paste from argmax, replaced with argmin.
    NDBase base = new NDBase();

    INDArray x = Nd4j.createFromArray(new double[][]{{0.75, 0.5, 0.25}, {0.5, 0.75, 0.25}, {0.5, 0.25, 0.75}});
    INDArray y = base.argmin(x, 0); //with default keepdims
    INDArray y_exp = Nd4j.createFromArray(1L, 2L, 0L);
    assertEquals(y_exp, y);

    y = base.argmin(x, false, 0); //with explicit keepdims false
    assertEquals(y_exp, y);

    y = base.argmin(x, true, 0); //with keepdims true
    y_exp = Nd4j.createFromArray(new long[][]{{1L, 2L, 0L}}); //expect different shape.
    assertEquals(y_exp, y);
}
 
Example 4
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testPermute3(){
        INDArray in = Nd4j.linspace(DataType.FLOAT, 1, 6, 1).reshape(3,2);
        INDArray permute = Nd4j.createFromArray(1,0);

//        System.out.println(in);

        SameDiff sd = SameDiff.create();
        SDVariable v = sd.var(in);
        SDVariable v2 = sd.constant(permute);

        SDVariable out = v.permute(v2);

        INDArray exp = in.transpose();
        INDArray outArr = out.eval();
        assertEquals(exp, outArr);
    }
 
Example 5
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testTopK1() {
    INDArray x = Nd4j.createFromArray(0.0, 0.0, 0.0, 10.0, 0.0);
    INDArray k = Nd4j.scalar(1);
    INDArray outValue = Nd4j.create(DataType.DOUBLE, 1);
    INDArray outIdx = Nd4j.create(DataType.INT, 1);

    Nd4j.exec(DynamicCustomOp.builder("top_k")
            .addInputs(x, k)
            .addOutputs(outValue, outIdx)
            .addBooleanArguments(false) //not sorted
            .addIntegerArguments(1)
            .build());

    INDArray expValue = Nd4j.createFromArray(10.0);
    INDArray expIdx = Nd4j.createFromArray(3);

    assertEquals(expValue, outValue);
    assertEquals(expIdx, outIdx);
}
 
Example 6
Source File: NDBaseTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testNorm2() {
    NDBase base = new NDBase();
    INDArray x = Nd4j.eye(3).castTo(DataType.FLOAT);
    INDArray y = base.norm2(x, 0);
    INDArray  y_exp = Nd4j.createFromArray(1.0f, 1.0f, 1.0f);
    assertEquals(y_exp, y);

    y = base.norm2(x, true, 0);
    y_exp = Nd4j.createFromArray(new float[][]{{1.0f, 1.0f, 1.0f}});
    assertEquals(y_exp, y);
}
 
Example 7
Source File: TensorFlowImportTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testArgMaxImport_1() throws Exception {
    val graph = TFGraphMapper.importGraph(new ClassPathResource("/tf_graphs/argmax.pb.txt").getInputStream());

    log.info(graph.asFlatPrint());
    val result = graph.outputAll(null).get(graph.outputs().get(0));

    val exp = Nd4j.createFromArray(new long[]{2, 2, 2});

    assertEquals(exp, result);
}
 
Example 8
Source File: ByteOrderTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testVectorEncoding_1() {
    val scalar = Nd4j.createFromArray(new float[]{1, 2, 3, 4, 5});

    FlatBufferBuilder bufferBuilder = new FlatBufferBuilder(0);
    val fb = scalar.toFlatArray(bufferBuilder);
    bufferBuilder.finish(fb);
    val db = bufferBuilder.dataBuffer();

    val flat = FlatArray.getRootAsFlatArray(db);

    val restored = Nd4j.createFromFlatArray(flat);

    assertEquals(scalar, restored);
}
 
Example 9
Source File: NDBaseTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testScatterMax() {
    NDBase base = new NDBase();

    //from testScatterOpGradients.
    INDArray x = Nd4j.ones(DataType.DOUBLE, 20, 10).add(1.0);
    INDArray indices = Nd4j.createFromArray(3, 4, 5, 10, 18);
    INDArray updates = Nd4j.ones(DataType.DOUBLE, 5, 10).add(1.0);
    INDArray y = base.scatterMax(x,indices, updates);

    y = y.getColumn(0);
    INDArray  y_exp = Nd4j.createFromArray(2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0);
    assertEquals(y_exp, y);
}
 
Example 10
Source File: SDLinalgTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLogdet() {
    INDArray x = Nd4j.createFromArray(new double[]{
            4,12,-16,12,37,-43,-16,-43,98, 4,1.2,-1.6,1.2,3.7,-4.3,-1.6,-4.3,9.8
    }).reshape(2,3,3);
    INDArray expected = Nd4j.createFromArray(new double[]{3.5835189, 4.159008});

    SDVariable sdx = sameDiff.var(x);

    SDVariable res = sameDiff.linalg().logdet(sdx);
    assertEquals(expected, res.eval());
}
 
Example 11
Source File: RandomTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testPoisson(){
    Nd4j.getRandom().setSeed(12345);
    INDArray shape = Nd4j.createFromArray(new int[] {1,3});
    INDArray alpha = Nd4j.rand(1,3);
    val randomPoisson = new RandomPoisson(shape, alpha);
    INDArray[] res = Nd4j.exec(randomPoisson);

    val randomPoisson1 = new RandomPoisson(shape, alpha);
    INDArray[] res1 = Nd4j.exec(randomPoisson1);
    assertEquals(res[0], res1[0]);
}
 
Example 12
Source File: RandomOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testUniformRankSimple() {

        INDArray arr = Nd4j.createFromArray(new double[]{100.0});
//        OpTestCase tc = new OpTestCase(DynamicCustomOp.builder("randomuniform")
//                .addInputs(arr)
//                .addOutputs(Nd4j.createUninitialized(new long[]{100}))
//                .addFloatingPointArguments(0.0, 1.0)
//                .build());

//        OpTestCase tc = new OpTestCase(new DistributionUniform(arr, Nd4j.createUninitialized(new long[]{100}), 0, 1));
        OpTestCase tc = new OpTestCase(new RandomBernoulli(arr, Nd4j.createUninitialized(new long[]{100}), 0.5));

        tc.expectedOutput(0, LongShapeDescriptor.fromShape(new long[]{100}, DataType.FLOAT), in -> {
            double min = in.minNumber().doubleValue();
            double max = in.maxNumber().doubleValue();
            double mean = in.meanNumber().doubleValue();
            if (min >= 0 && max <= 1 && (in.length() == 1 || Math.abs(mean - 0.5) < 0.2))
                return null;
            return "Failed: min = " + min + ", max = " + max + ", mean = " + mean;
        });

        String err = OpValidation.validate(tc);
        assertNull(err);

        double d = arr.getDouble(0);

        assertEquals(100.0, d, 0.0);

    }
 
Example 13
Source File: NDBaseTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLt() {
    NDBase base = new NDBase();
    INDArray x1 = Nd4j.zeros(DataType.DOUBLE, 3, 3);
    INDArray x = Nd4j.ones(DataType.DOUBLE, 3, 3);
    INDArray y = base.lt(x1, x);
    INDArray y_exp = Nd4j.createFromArray(new boolean[][]{{true, true, true}, {true, true, true}, {true, true, true}});
    assertEquals(y_exp, y);
}
 
Example 14
Source File: BasicBroadcastTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void basicBroadcastTest_1() {
    val x = Nd4j.create(DataType.FLOAT, 3, 5);
    val y = Nd4j.createFromArray(new float[]{1.f, 1.f, 1.f, 1.f, 1.f});
    val e = Nd4j.create(DataType.FLOAT, 3, 5).assign(1.f);

    // inplace setup
    val op = new AddOp(new INDArray[]{x, y}, new INDArray[]{x});

    Nd4j.exec(op);

    assertEquals(e, x);
}
 
Example 15
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testTriuOp() {

    SameDiff sd = SameDiff.create();
    SDVariable input = sd.var(Nd4j.createFromArray(new double[][]{{1,2,3}, {4,5,6}, {7,8,9},{10,11,12}}));
    SDVariable out = new Triu(sd, input,-1).outputVariable();
    out.markAsLoss();
    INDArray expected = Nd4j.createFromArray(new double[][]{{1,2,3}, {4,5,6}, {0,8,9},{0,0,12}});
    String err = OpValidation.validate(new TestCase(sd)
             .expectedOutput("triu", expected)
             .gradientCheck(true));
    assertNull(err);

}
 
Example 16
Source File: NDBaseTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSum() {
    NDBase base = new NDBase();
    INDArray x = Nd4j.linspace(DataType.DOUBLE, 1.0, 1.0, 9).reshape(3,3);
    INDArray y = base.sum(x, 0);
    INDArray y_exp = Nd4j.createFromArray(12.0, 15.0, 18.0);
    assertEquals(y_exp, y);

    y = base.sum(x, true, 0);
    assertEquals(y_exp.reshape(1,3), y);
}
 
Example 17
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testDeConv3dBasic() {
    int nIn = 4;
    int nOut = 3;
    int kH = 2;
    int kW = 2;
    int kD = 2;

    int mb = 3;
    int imgH = 5;
    int imgW = 5;
    int imgT = 5;

    SameDiff sd = SameDiff.create();
    INDArray inArr = Nd4j.rand(new long[]{mb, nIn, 5, 5, 5});
    INDArray wArr = Nd4j.rand(kD, kH, kW, nOut, nIn);

    SDVariable in = sd.var("in", inArr);
    SDVariable w = sd.var("W", wArr);

    DeConv3DConfig conv3DConfig = DeConv3DConfig.builder()
            .kH(kH).kW(kW).kD(kD)
            .sD(1).sH(1).sW(1)
            .dH(1).dW(1).dD(1)
            .isSameMode(true)
            .dataFormat(DeConv3DConfig.NCDHW)
            .build();

    SDVariable out = sd.cnn().deconv3d(in, w, conv3DConfig);
    out = sd.nn().tanh("loss", out).shape().rename("out");

    sd.setLossVariables("loss");

    //Expected conv3d size, NOT same mode: out = (in - k)/d + 1 = (28-2+0)/1+1 = 27
    //Expected conv3d size, WITH same mode: out = in/stride
    // reversed this for deconv3d
    INDArray outArr = Nd4j.createFromArray(new long[]{mb, nOut, imgT, imgH, imgW});

    TestCase tc = new TestCase(sd)
            .expectedOutput("out", outArr)
            .gradientCheck(true);
    String err = OpValidation.validate(tc);
    assertNull(err);
}
 
Example 18
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testReverseSequence() {
    SameDiff sameDiff = SameDiff.create();
    float[] input_data = new float[]{
            1, 2, 3,
            4, 5, 6,
            7, 8, 9,
            0, 0, 0,
            0, 0, 0,

            1, 2, 3,
            4, 5, 6,
            0, 0, 0,
            0, 0, 0,
            0, 0, 0
    };
    float[] expected_output = new float[]{
            7, 8, 9,
            4, 5, 6,
            1, 2, 3,
            0, 0, 0,
            0, 0, 0,

            4, 5, 6,
            1, 2, 3,
            0, 0, 0,
            0, 0, 0,
            0, 0, 0
    };
    INDArray arr1 = Nd4j.create(input_data, new long[]{2, 5, 3}).castTo(DataType.DOUBLE);
    INDArray seqLenArr = Nd4j.createFromArray(3, 2);
    SDVariable x = sameDiff.constant("x", arr1);
    SDVariable seq_lengths = sameDiff.constant("seq_lengths", seqLenArr);
    SDVariable result = sameDiff.reverseSequence(x, seq_lengths, 1, 0);
    INDArray expected = Nd4j.create(expected_output, new long[]{2, 5, 3}).castTo(DataType.DOUBLE);
    assertArrayEquals(arr1.shape(), result.eval().shape());
    assertEquals(expected, result.eval());

    SDVariable loss = sameDiff.standardDeviation(result, true);
    String err = OpValidation.validate(new TestCase(sameDiff)
            .expected(result.name(), expected)
            .gradientCheck(false));
    assertNull(err);
}
 
Example 19
Source File: BroadcastTo.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public BroadcastTo(@NonNull INDArray input, @NonNull long[] shape, @NonNull INDArray output){
    this(input, Nd4j.createFromArray(shape), output);
}
 
Example 20
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testConv1dForward() {
    int nIn = 2;
    int nOut = 1;
    int kernel = 3;
    int batchSize = 10;
    int sequenceSize = 5;

    SameDiff sd = SameDiff.create();

    INDArray inArr = Nd4j.linspace(0, nIn * batchSize * sequenceSize, nIn * batchSize * sequenceSize)
            .reshape(batchSize, nIn, sequenceSize);

    INDArray wArr = Nd4j.linspace(0, kernel * nIn * nOut, kernel * nIn * nOut)
            .reshape(kernel, nIn, nOut);

    SDVariable in = sd.var("in", inArr);
    SDVariable w = sd.var("w", wArr);

    SDVariable res = sd.cnn.conv1d(in, w, Conv1DConfig.builder().k(kernel).paddingMode(PaddingMode.VALID).build());

    INDArray expected = Nd4j.createFromArray(
            new double[][][]{
                    {{82.42424f, 100.60606f, 118.78788f}},
                    {{264.2424f, 282.4242f, 300.6061f}},
                    {{446.0606f, 464.2424f, 482.424f}},
                    {{627.8788f, 646.0606f, 664.2424f}},
                    {{809.6970f, 827.8788f, 846.0606f}},
                    {{991.5152f, 1009.69696f, 1027.8788f}},
                    {{1173.3333f, 1191.5152f, 1209.6970f}},
                    {{1355.1515f, 1373.3333f, 1391.5153f}},
                    {{1536.9697f, 1555.1515f, 1573.3333f}},
                    {{1718.7878f, 1736.9697f, 1755.1515f}}
            }
    );

    TestCase tc = new TestCase(sd).gradientCheck(false).expectedOutput(res.name(), expected);
    String err = OpValidation.validate(tc);

    assertNull(err);
}