Java Code Examples for org.nd4j.linalg.factory.Nd4j

The following examples show how to use org.nd4j.linalg.factory.Nd4j. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: deeplearning4j   Source File: BaseLevel2.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * spr performs a rank-1 update of an n-by-n packed symmetric matrix a:
 * a := alpha*x*x' + a.
 *
 * @param order
 * @param Uplo
 * @param alpha
 * @param X
 * @param Ap
 */
@Override
public void spr(char order, char Uplo, double alpha, INDArray X, INDArray Ap) {
    if (Nd4j.getExecutioner().getProfilingMode() == OpExecutioner.ProfilingMode.ALL)
        OpProfiler.getInstance().processBlasCall(false, Ap, X);


    if (X.length() > Integer.MAX_VALUE)
        throw new ND4JArraySizeException();

    if (X.data().dataType() == DataType.DOUBLE) {
        DefaultOpExecutioner.validateDataType(DataType.DOUBLE, X);
        dspr(order, Uplo, (int) X.length(), alpha, X, X.stride(-1), Ap);
    } else {
        DefaultOpExecutioner.validateDataType(DataType.FLOAT, X);
        sspr(order, Uplo, (int) X.length(), (float) alpha, X, X.stride(-1), Ap);
    }

    OpExecutionerUtil.checkForAny(Ap);
}
 
Example 2
Source Project: nd4j   Source File: AggregatesTests.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testBatchedAggregate1() throws Exception {
    INDArray arrayX1 = Nd4j.ones(10);
    INDArray arrayY1 = Nd4j.zeros(10);

    INDArray arrayX2 = Nd4j.ones(10);
    INDArray arrayY2 = Nd4j.zeros(10);

    INDArray exp1 = Nd4j.create(10).assign(1f);
    INDArray exp2 = Nd4j.create(10).assign(1f);

    AggregateAxpy axpy1 = new AggregateAxpy(arrayX1, arrayY1, 1.0f);
    AggregateAxpy axpy2 = new AggregateAxpy(arrayX2, arrayY2, 1.0f);

    List<Aggregate> batch = new ArrayList<>();
    batch.add(axpy1);
    batch.add(axpy2);

    Nd4j.getExecutioner().exec(batch);

    assertEquals(exp1, arrayY1);
    assertEquals(exp2, arrayY2);
}
 
Example 3
Source Project: nd4j   Source File: PreProcessor3D4DTest.java    License: Apache License 2.0 6 votes vote down vote up
public Construct4dDataSet(int nExamples, int nChannels, int height, int width) {

            INDArray allImages = Nd4j.rand(new int[] {nExamples, nChannels, height, width});
            allImages.get(NDArrayIndex.all(), NDArrayIndex.point(1), NDArrayIndex.all(), NDArrayIndex.all()).muli(100)
                            .addi(200);
            allImages.get(NDArrayIndex.all(), NDArrayIndex.point(2), NDArrayIndex.all(), NDArrayIndex.all()).muli(0.001)
                            .subi(10);

            INDArray labels = Nd4j.linspace(1, nChannels, nChannels).reshape(nChannels, 1);
            sampleDataSet = new DataSet(allImages, labels);

            expectedMean = allImages.mean(0, 2, 3);
            expectedStd = allImages.std(0, 2, 3);

            expectedLabelMean = labels.mean(0);
            expectedLabelStd = labels.std(0);

            expectedMin = allImages.min(0, 2, 3);
            expectedMax = allImages.max(0, 2, 3);
        }
 
Example 4
Source Project: deeplearning4j   Source File: ReductionOpValidation.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testNormMax() {

    SameDiff sameDiff = SameDiff.create();

    INDArray in = Nd4j.linspace(1, 12, 12).reshape(3, 4);
    SDVariable input = sameDiff.var(in);
    INDArray expected = Nd4j.createFromArray(new double[]{
            9.0000,   10.0000,   11.0000,   12.0000
    });

    SDVariable output = new NormMax(sameDiff, input, false, new int[]{0}).outputVariable();

    TestCase tc = new TestCase(sameDiff)
            .gradientCheck(true)
            .expectedOutput(output.name(), expected);

    String err = OpValidation.validate(tc);
    assertNull(err);
}
 
Example 5
Source Project: nd4j   Source File: ShufflesTests.java    License: Apache License 2.0 6 votes vote down vote up
public boolean compareColumn(INDArray newData) {
    float[] newMap = measureState(newData);

    if (newMap.length != map.length) {
        System.out.println("Different map lengths");
        return false;
    }

    if (Arrays.equals(map, newMap)) {
        System.out.println("Maps are equal");
        return false;
    }

    for (int x = 0; x < newData.rows(); x++) {
        INDArray column = newData.getColumn(x);
        double val = column.getDouble(0);
        for (int y = 0; y < column.lengthLong(); y++ ) {
            if (Math.abs(column.getFloat(y) - val) > Nd4j.EPS_THRESHOLD) {
                System.out.print("Different data in a column: " + column.getFloat(y));
                return false;
            }
        }
    }

    return true;
}
 
Example 6
Source Project: nd4j   Source File: Float16.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected CompressedDataBuffer compressPointer(DataBuffer.TypeEx srcType, Pointer srcPointer, int length,
                int elementSize) {

    BytePointer ptr = new BytePointer(length * 2);
    CompressionDescriptor descriptor = new CompressionDescriptor();
    descriptor.setCompressedLength(length * 2);
    descriptor.setOriginalLength(length * elementSize);
    descriptor.setOriginalElementSize(elementSize);
    descriptor.setNumberOfElements(length);

    descriptor.setCompressionAlgorithm(getDescriptor());
    descriptor.setCompressionType(getCompressionType());

    CompressedDataBuffer buffer = new CompressedDataBuffer(ptr, descriptor);

    Nd4j.getNDArrayFactory().convertDataEx(srcType, srcPointer, DataBuffer.TypeEx.FLOAT16, ptr, length);

    return buffer;
}
 
Example 7
Source Project: deeplearning4j   Source File: CyclicWorkspaceTests.java    License: Apache License 2.0 6 votes vote down vote up
@Test
    public void testBasicMechanics_1() {
        val fShape = new long[]{128, 784};
        val lShape = new long[] {128, 10};
        val prefetchSize = 24;
        val configuration = WorkspaceConfiguration.builder().minSize(10 * 1024L * 1024L)
                .overallocationLimit(prefetchSize + 1).policyReset(ResetPolicy.ENDOFBUFFER_REACHED)
                .policyLearning(LearningPolicy.FIRST_LOOP).policyAllocation(AllocationPolicy.OVERALLOCATE)
                .policySpill(SpillPolicy.REALLOCATE).build();

        for (int e = 0; e < 100; e++) {
            try (val ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(configuration, "randomNameHere" + 119)) {
                val fArray = Nd4j.create(fShape).assign(e);
                val lArray = Nd4j.create(lShape).assign(e);

//                log.info("Current offset: {}; Current size: {};", ws.getCurrentOffset(), ws.getCurrentSize());
            }
        }
    }
 
Example 8
Source Project: deeplearning4j   Source File: FailingSameDiffTests.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testExecutionDifferentShapesTransform(){
    OpValidationSuite.ignoreFailing();
    SameDiff sd = SameDiff.create();
    SDVariable in = sd.var("in", Nd4j.linspace(1,12,12, DataType.DOUBLE).reshape(3,4));

    SDVariable tanh = sd.math().tanh(in);
    INDArray exp = Transforms.tanh(in.getArr(), true);

    INDArray out = tanh.eval();
    assertEquals(exp, out);

    //Now, replace with minibatch 5:
    in.setArray(Nd4j.linspace(1,20,20, DataType.DOUBLE).reshape(5,4));
    INDArray out2 = tanh.eval();
    assertArrayEquals(new long[]{5,4}, out2.shape());

    exp = Transforms.tanh(in.getArr(), true);
    assertEquals(exp, out2);
}
 
Example 9
Source Project: nd4j   Source File: BaseComplexNDArray.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Returns an ndarray with 1 if the element is epsilon equals
 *
 * @param other the number to compare
 * @return a copied ndarray with the given
 * binary conditions
 */
@Override
public IComplexNDArray epsi(IComplexNumber other) {
    IComplexNDArray linear = linearView();
    double otherVal = other.realComponent().doubleValue();
    for (int i = 0; i < linearView().length(); i++) {
        IComplexNumber n = linear.getComplex(i);
        double real = n.realComponent().doubleValue();
        double diff = Math.abs(real - otherVal);
        if (diff <= Nd4j.EPS_THRESHOLD)
            linear.putScalar(i, Nd4j.createDouble(1, 0));
        else
            linear.putScalar(i, Nd4j.createDouble(0, 0));
    }

    return this;
}
 
Example 10
Source Project: deeplearning4j   Source File: TransformOpValidation.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testDepthToSpace() {
    Nd4j.getRandom().setSeed(1337);

    int miniBatch = 128;
    int blockSize = 4;
    int[] inputShape = new int[]{miniBatch, 2, 2, blockSize * blockSize};

    INDArray input = Nd4j.randn(inputShape);
    SameDiff sd = SameDiff.create();
    SDVariable sdInput = sd.var("in", inputShape);

    INDArray expOut = Nd4j.create(miniBatch, 2 * blockSize, 2 * blockSize, 1);
    DynamicCustomOp op = new DepthToSpace(input, expOut, blockSize, DataFormat.NHWC);
    Nd4j.getExecutioner().exec(op);

    sd.associateArrayWithVariable(input, sdInput);

    SDVariable t = sd.cnn().depthToSpace("dts", sdInput, blockSize, DataFormat.NHWC);
    SDVariable loss = sd.mean("loss", t);

    String err = OpValidation.validate(new TestCase(sd)
            .expectedOutput("dts", expOut)
            .gradientCheck(true));
    assertNull(err, err);
}
 
Example 11
Source Project: nd4j   Source File: CheckUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static boolean checkDivManually(INDArray first, INDArray second, double maxRelativeDifference,
                double minAbsDifference) {
    //No apache commons element-wise division, but can do this manually

    INDArray result = first.div(second);
    long[] shape = first.shape();

    INDArray expected = Nd4j.zeros(first.shape());

    for (int i = 0; i < shape[0]; i++) {
        for (int j = 0; j < shape[1]; j++) {
            double v = first.getDouble(i, j) / second.getDouble(i, j);
            expected.putScalar(new int[] {i, j}, v);
        }
    }
    if (!checkShape(expected, result))
        return false;
    boolean ok = checkEntries(expected, result, maxRelativeDifference, minAbsDifference);
    if (!ok) {
        INDArray onCopies = Shape.toOffsetZeroCopy(first).mul(Shape.toOffsetZeroCopy(second));
        printFailureDetails(first, second, expected, result, onCopies, "div");
    }
    return ok;
}
 
Example 12
Source Project: deeplearning4j   Source File: BackTrackLineSearchTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSingleMaxLineSearch() throws Exception {
    double score1, score2;

    OutputLayer layer = getIrisLogisticLayerConfig(Activation.SOFTMAX, 100,
                    LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD);
    int nParams = (int)layer.numParams();
    layer.setBackpropGradientsViewArray(Nd4j.create(1, nParams));
    layer.setInput(irisData.getFeatures(), LayerWorkspaceMgr.noWorkspaces());
    layer.setLabels(irisData.getLabels());
    layer.computeGradientAndScore(LayerWorkspaceMgr.noWorkspaces());
    score1 = layer.score();

    BackTrackLineSearch lineSearch =
                    new BackTrackLineSearch(layer, new NegativeDefaultStepFunction(), layer.getOptimizer());
    double step = lineSearch.optimize(layer.params(), layer.gradient().gradient(), layer.gradient().gradient(), LayerWorkspaceMgr.noWorkspacesImmutable());

    assertEquals(1.0, step, 1e-3);
}
 
Example 13
Source Project: deeplearning4j   Source File: TestMultiLayerSpace.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testILossFunctionGetsSet() {
    ILossFunction lossFunction = new LossMCXENT(Nd4j.create(new float[] {1f, 2f}, new long[]{1,2}));

    MultiLayerConfiguration expected =
                    new NeuralNetConfiguration.Builder().updater(new Sgd(0.005)).seed(12345).list()
                                    .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build())
                                    .layer(1, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(2,
                                                    new OutputLayer.Builder().lossFunction(lossFunction)
                                                            .activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
                                    .build();

    MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345)
                    .addLayer(new DenseLayerSpace.Builder().nIn(10).nOut(10).build(), new FixedValue<>(2)) //2 identical layers
                    .addLayer(new OutputLayerSpace.Builder().iLossFunction(lossFunction).activation(Activation.SOFTMAX).nIn(10).nOut(5).build())
                    .build();

    int nParams = mls.numParameters();
    assertEquals(0, nParams);

    MultiLayerConfiguration conf = mls.getValue(new double[0]).getMultiLayerConfiguration();

    assertEquals(expected, conf);
}
 
Example 14
Source Project: deeplearning4j   Source File: SameDiffTests.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testScatterDiv() {
    INDArray arr1 = Nd4j.ones(3, 3);
    INDArray arr2 = Nd4j.createFromArray(0, 1);
    INDArray arr3 = Nd4j.ones(2, 3).assign(2);
    INDArray expected = Nd4j.create(new float[]{0.5f, 0.5f, 0.5f,
                    0.5f, 0.5f, 0.5f,
                    1.0f, 1.0f, 1.0f},
            new long[]{3, 3}).castTo(Nd4j.defaultFloatingPointType());

    SameDiff sd = SameDiff.create();
    SDVariable refs = sd.var("refs", arr1);
    SDVariable idxs = sd.constant("idxs", arr2);
    SDVariable upds = sd.placeHolder("upds", arr3.dataType(), arr3.shape());
    upds.setArray(arr3);

    SDVariable result = sd.scatterDiv(refs, idxs, upds);
    assertArrayEquals(new long[]{3, 3}, result.eval().shape());
    assertEquals(expected, result.eval());
}
 
Example 15
Source Project: nd4j   Source File: BasicWorkspaceTests.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testMmap2() throws Exception {
    // we don't support MMAP on cuda yet
    if (Nd4j.getExecutioner().getClass().getName().toLowerCase().contains("cuda"))
        return;

    File tmp = File.createTempFile("tmp", "fdsfdf");
    tmp.deleteOnExit();
    Nd4jWorkspace.fillFile(tmp, 100000);

    WorkspaceConfiguration mmap = WorkspaceConfiguration.builder()
            .policyLocation(LocationPolicy.MMAP)
            .tempFilePath(tmp.getAbsolutePath())
            .build();

    MemoryWorkspace ws = Nd4j.getWorkspaceManager().getAndActivateWorkspace(mmap, "M3");

    INDArray mArray = Nd4j.create(100);
    mArray.assign(10f);

    assertEquals(1000f, mArray.sumNumber().floatValue(), 1e-5);

    ws.notifyScopeLeft();
}
 
Example 16
Source Project: deeplearning4j   Source File: MinMaxSerializerStrategy.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public NormalizerMinMaxScaler restore(@NonNull InputStream stream) throws IOException {
    DataInputStream dis = new DataInputStream(stream);

    boolean fitLabels = dis.readBoolean();
    double targetMin = dis.readDouble();
    double targetMax = dis.readDouble();

    NormalizerMinMaxScaler result = new NormalizerMinMaxScaler(targetMin, targetMax);
    result.fitLabel(fitLabels);
    result.setFeatureStats(Nd4j.read(dis), Nd4j.read(dis));
    if (fitLabels) {
        result.setLabelStats(Nd4j.read(dis), Nd4j.read(dis));
    }

    return result;
}
 
Example 17
Source Project: nd4j   Source File: CudaAccumTests.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testStdev1(){
    double[][] ind = {{5.1, 3.5, 1.4}, {4.9, 3.0, 1.4}, {4.7, 3.2, 1.3}};
    INDArray in = Nd4j.create(ind);
    INDArray stdev = in.std(1);

    INDArray exp = Nd4j.create(new double[]{1.8556220880, 1.7521415468, 1.7039170559});

    assertEquals(exp,stdev);
}
 
Example 18
Source Project: deeplearning4j   Source File: SpecialTests.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testDimensionalThings1() {
    INDArray x = Nd4j.rand(new int[] {20, 30, 50});
    INDArray y = Nd4j.rand(x.shape());

    INDArray result = transform(x, y);
}
 
Example 19
Source Project: nd4j   Source File: LongShapeTests.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testLongShape_1() {
    val exp = new long[]{2, 5, 3, 3, 1, 0, 1, 99};

    val array = Nd4j.createUninitialized(5, 3);
    val buffer = array.shapeInfoDataBuffer();

    val java = buffer.asLong();

    assertArrayEquals(exp, java);
    assertEquals(8, buffer.getElementSize());
}
 
Example 20
Source Project: nd4j   Source File: CudaBlasTests.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testForAlex() throws Exception {
    int[][] shape1s = new int[][]{{10240, 10240}};
    int[][] shape2s = new int[][]{{10240, 10240}};

    int[] nTestsArr = new int[]{5};

    for(int test=0; test<shape1s.length; test++ ) {

        int[] shape1 = shape1s[test];
        int[] shape2 = shape2s[test];

        int nTests = nTestsArr[test];

        INDArray c1 = Nd4j.create(shape1, 'c');
        INDArray c2 = Nd4j.create(shape2, 'c');

        CudaContext context = (CudaContext) AtomicAllocator.getInstance().getDeviceContext().getContext();

        AtomicAllocator.getInstance().getPointer(c1, context);
        AtomicAllocator.getInstance().getPointer(c2, context);

        //CC
        long startCC = System.currentTimeMillis();
        for (int i = 0; i < nTests; i++) {
            c1.mmul(c2);
        }
        long endCC = System.currentTimeMillis();
        System.out.println("cc");


        System.out.println("mmul: " + Arrays.toString(shape1) + "x" + Arrays.toString(shape2) + ", " + nTests + " runs");
        System.out.println("cc: " + (endCC - startCC));
    }
}
 
Example 21
Source Project: deeplearning4j   Source File: OpExecutionerTests.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testIMax() {
    INDArray arr = Nd4j.linspace(1, 10, 10, DataType.DOUBLE);
    ArgMax imax = new ArgMax(arr);
    assertEquals(9, Nd4j.getExecutioner().exec(imax)[0].getInt(0));

    arr.muli(-1);
    imax = new ArgMax(arr);
    int maxIdx = Nd4j.getExecutioner().exec(imax)[0].getInt(0);
    assertEquals(0, maxIdx);
}
 
Example 22
Source Project: deeplearning4j   Source File: DM.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void finish() {
    if (cbow != null && cbow.getBatch() != null && !cbow.getBatch().isEmpty()) {
        Nd4j.getExecutioner().exec(cbow.getBatch());
        cbow.getBatch().clear();
    }
}
 
Example 23
Source Project: dl4j-tutorials   Source File: Gan11Exemple.java    License: MIT License 5 votes vote down vote up
public void trainGen(INDArray nInput, int n) {
    INDArray EmptyRInput = Nd4j.zeros(1, channel, height, width).addi(-9999);
    INDArray[] features = new INDArray[] { nInput, EmptyRInput };

    INDArray trueLabel = Nd4j.ones(new long[] { 1, 1 });

    INDArray[] labels = new INDArray[] { trueLabel };
    this.freeze(gRate, 0);
    net.fit(features, labels);
}
 
Example 24
Source Project: deeplearning4j   Source File: OpExecutionerTests.java    License: Apache License 2.0 5 votes vote down vote up
@Test
    public void testDropoutInverted() {
        INDArray array = Nd4j.linspace(1, 100, 100, DataType.DOUBLE);
        INDArray result = Nd4j.create(DataType.DOUBLE, 100);

        DropOutInverted dropOut = new DropOutInverted(array, result, 0.65);
        Nd4j.getExecutioner().exec(dropOut);

//        System.out.println("Src array: " + array);
//        System.out.println("Res array: " + result);

        assertNotEquals(array, result);
    }
 
Example 25
Source Project: ml-models   Source File: DeepGL.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * constructs a parallel centrality solver
 *
 * @param graph               the graph iface
 * @param executorService     the executor service
 * @param concurrency         desired number of threads to spawn
 * @param pruningLambda
 * @param diffusionIterations
 */
public DeepGL(HeavyGraph graph, ExecutorService executorService, int concurrency, int iterations, double pruningLambda, int diffusionIterations) {
    this.graph = graph;
    this.nodeCount = Math.toIntExact(graph.nodeCount());
    this.executorService = executorService;
    this.concurrency = concurrency;
    this.embedding = Nd4j.create(nodeCount, 3 + graph.availableNodeProperties().size());
    this.numNeighbourhoods = 3;
    this.iterations = iterations;
    this.pruningLambda = pruningLambda;
    this.diffusionIterations = diffusionIterations;
}
 
Example 26
Source Project: nd4j   Source File: JaccardDistance.java    License: Apache License 2.0 5 votes vote down vote up
public JaccardDistance(INDArray x, INDArray y) {
    super(x, y);
    passThrough = Nd4j.getExecutioner().executionMode() == OpExecutioner.ExecutionMode.JAVA;
    extraArgs = new Object[2];
    extraArgs[0] = 0.0f;
    extraArgs[1] = 0.0f;
}
 
Example 27
Source Project: nd4j   Source File: OnnxGraphMapper.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public INDArray getNDArrayFromTensor(String tensorName, OnnxProto3.TypeProto.Tensor tensorProto, OnnxProto3.GraphProto graph) {
    DataBuffer.Type type = dataTypeForTensor(tensorProto);
    if(!tensorProto.isInitialized()) {
        throw new ND4JIllegalStateException("Unable to retrieve ndarray. Tensor was not initialized");
    }

    OnnxProto3.TensorProto tensor = null;
    for(int i = 0; i < graph.getInitializerCount(); i++) {
        val initializer = graph.getInitializer(i);
        if(initializer.getName().equals(tensorName)) {
            tensor = initializer;
            break;
        }
    }

    if(tensor == null)
        return null;

    ByteString bytes = tensor.getRawData();
    ByteBuffer byteBuffer = bytes.asReadOnlyByteBuffer().order(ByteOrder.nativeOrder());
    ByteBuffer directAlloc = ByteBuffer.allocateDirect(byteBuffer.capacity()).order(ByteOrder.nativeOrder());
    directAlloc.put(byteBuffer);
    directAlloc.rewind();
    long[] shape = getShapeFromTensor(tensorProto);
    DataBuffer buffer = Nd4j.createBuffer(directAlloc,type, ArrayUtil.prod(shape));
    INDArray arr = Nd4j.create(buffer).reshape(shape);
    return arr;
}
 
Example 28
Source Project: konduit-serving   Source File: YOLOOutputAdapter.java    License: Apache License 2.0 5 votes vote down vote up
public YOLOOutputAdapter(double threshold, Labels labels, int numLabels) {
    this.threshold = threshold;
    inputShape = new int[]{3, 608, 608};
    this.labels = labels;
    this.numLabels = numLabels;
    boundingBoxPriors = Nd4j.create(YOLO2.DEFAULT_PRIOR_BOXES).castTo(DataType.FLOAT);
    gridWidth = DarknetHelper.getGridWidth(inputShape);
    gridHeight = DarknetHelper.getGridHeight(inputShape);

}
 
Example 29
Source Project: StockPrediction   Source File: StockDataSetIterator.java    License: MIT License 5 votes vote down vote up
@Override
public DataSet next(int num) {
    if (exampleStartOffsets.size() == 0) throw new NoSuchElementException();
    int actualMiniBatchSize = Math.min(num, exampleStartOffsets.size());
    INDArray input = Nd4j.create(new int[] {actualMiniBatchSize, VECTOR_SIZE, exampleLength}, 'f');
    INDArray label;
    if (category.equals(PriceCategory.ALL)) label = Nd4j.create(new int[] {actualMiniBatchSize, VECTOR_SIZE, exampleLength}, 'f');
    else label = Nd4j.create(new int[] {actualMiniBatchSize, predictLength, exampleLength}, 'f');
    for (int index = 0; index < actualMiniBatchSize; index++) {
        int startIdx = exampleStartOffsets.removeFirst();
        int endIdx = startIdx + exampleLength;
        StockData curData = train.get(startIdx);
        StockData nextData;
        for (int i = startIdx; i < endIdx; i++) {
            int c = i - startIdx;
            input.putScalar(new int[] {index, 0, c}, (curData.getOpen() - minArray[0]) / (maxArray[0] - minArray[0]));
            input.putScalar(new int[] {index, 1, c}, (curData.getClose() - minArray[1]) / (maxArray[1] - minArray[1]));
            input.putScalar(new int[] {index, 2, c}, (curData.getLow() - minArray[2]) / (maxArray[2] - minArray[2]));
            input.putScalar(new int[] {index, 3, c}, (curData.getHigh() - minArray[3]) / (maxArray[3] - minArray[3]));
            input.putScalar(new int[] {index, 4, c}, (curData.getVolume() - minArray[4]) / (maxArray[4] - minArray[4]));
            nextData = train.get(i + 1);
            if (category.equals(PriceCategory.ALL)) {
                label.putScalar(new int[] {index, 0, c}, (nextData.getOpen() - minArray[1]) / (maxArray[1] - minArray[1]));
                label.putScalar(new int[] {index, 1, c}, (nextData.getClose() - minArray[1]) / (maxArray[1] - minArray[1]));
                label.putScalar(new int[] {index, 2, c}, (nextData.getLow() - minArray[2]) / (maxArray[2] - minArray[2]));
                label.putScalar(new int[] {index, 3, c}, (nextData.getHigh() - minArray[3]) / (maxArray[3] - minArray[3]));
                label.putScalar(new int[] {index, 4, c}, (nextData.getVolume() - minArray[4]) / (maxArray[4] - minArray[4]));
            } else {
                label.putScalar(new int[]{index, 0, c}, feedLabel(nextData));
            }
            curData = nextData;
        }
        if (exampleStartOffsets.size() == 0) break;
    }
    return new DataSet(input, label);
}
 
Example 30
Source Project: nd4j   Source File: ComplexNumberTests.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testLogarithmFloat() {
    IComplexDouble test = Nd4j.createDouble(1, 1);
    IComplexDouble test2 = Nd4j.createDouble(1, 1);
    IComplexNumber result = test.pow(test2);
    assertEquals(result.realComponent(), 0.3465736);
    assertEquals(result.imaginaryComponent(), 0.7853982);
}