Java Code Examples for org.nd4j.linalg.api.ndarray.INDArray#putRow()

The following examples show how to use org.nd4j.linalg.api.ndarray.INDArray#putRow() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MiscOpValidation.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testScatterUpdate(){
    INDArray x = Nd4j.linspace(DataType.FLOAT, 1, 30, 1).reshape(10, 3);
    INDArray updates = Nd4j.create(new float[][]{
            {100, 101, 102},
            {200, 201, 202}});
    INDArray indices = Nd4j.createFromArray(2, 5);

    INDArray exp = x.dup();
    exp.putRow(2, updates.getRow(0));
    exp.putRow(5, updates.getRow(1));

    INDArray out = exp.ulike();
    Nd4j.exec(DynamicCustomOp.builder("scatter_upd")
            .addInputs(x, indices, updates)
            .addOutputs(out)
            .build());

    assertEquals(exp, out);
}
 
Example 2
Source File: NDArrayTestsFortran.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testPutRowGetRowOrdering() {
        INDArray row1 = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2);
        INDArray put = Nd4j.create(new double[] {5, 6});
        row1.putRow(1, put);

//        System.out.println(row1);
        row1.toString();

        INDArray row1Fortran = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2);
        INDArray putFortran = Nd4j.create(new double[] {5, 6});
        row1Fortran.putRow(1, putFortran);
        assertEquals(row1, row1Fortran);
        INDArray row1CTest = row1.getRow(1);
        INDArray row1FortranTest = row1Fortran.getRow(1);
        assertEquals(row1CTest, row1FortranTest);



    }
 
Example 3
Source File: UpdaterTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testNesterovs() {
    int rows = 10;
    int cols = 2;


    NesterovsUpdater grad = new NesterovsUpdater(new Nesterovs(0.5, 0.9));
    grad.setStateViewArray(Nd4j.zeros(1, rows * cols), new long[] {rows, cols}, 'c', true);
    INDArray W = Nd4j.zeros(rows, cols);
    Distribution dist = Nd4j.getDistributions().createNormal(1, 1);
    for (int i = 0; i < W.rows(); i++)
        W.putRow(i, Nd4j.create(dist.sample(W.columns())));

    for (int i = 0; i < 5; i++) {
        //            String learningRates = String.valueOf("\nAdagrad\n " + grad.applyUpdater(W, i)).replaceAll(";", "\n");
        //            System.out.println(learningRates);
        W.addi(Nd4j.randn(rows, cols));
    }
}
 
Example 4
Source File: NDArrayTestsFortran.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testPutRowGetRowOrdering() {
    INDArray row1 = Nd4j.linspace(1, 4, 4).reshape(2, 2);
    INDArray put = Nd4j.create(new double[] {5, 6});
    row1.putRow(1, put);

    System.out.println(row1);

    INDArray row1Fortran = Nd4j.linspace(1, 4, 4).reshape(2, 2);
    INDArray putFortran = Nd4j.create(new double[] {5, 6});
    row1Fortran.putRow(1, putFortran);
    assertEquals(row1, row1Fortran);
    INDArray row1CTest = row1.getRow(1);
    INDArray row1FortranTest = row1Fortran.getRow(1);
    assertEquals(row1CTest, row1FortranTest);



}
 
Example 5
Source File: NDArrayTestsFortran.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testPutRowFortran() {
    INDArray row1 = Nd4j.linspace(1, 4, 4, DataType.DOUBLE).reshape(2, 2).castTo(DataType.DOUBLE);
    INDArray put = Nd4j.create(new double[] {5, 6});
    row1.putRow(1, put);

    INDArray row1Fortran = Nd4j.create(new double[][] {{1, 3}, {2, 4}});
    INDArray putFortran = Nd4j.create(new double[] {5, 6});
    row1Fortran.putRow(1, putFortran);
    assertEquals(row1, row1Fortran);
}
 
Example 6
Source File: NativeOpExecutionerTest.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testDebugEdgeCase2(){
    DataTypeUtil.setDTypeForContext(DataBuffer.Type.DOUBLE);
    INDArray l1 = Nd4j.create(new double[]{-0.2585039112684677,-0.005179485353710878,0.4348343401770497,0.020356532375728764,-0.1970793298488186});
    INDArray l2 = Nd4j.create(2,l1.size(1));

    INDArray p1 = Nd4j.create(new double[]{1.3979850406519119,0.6169451410155852,1.128993957530918,0.21000426084450596,0.3171215178932696});
    INDArray p2 = Nd4j.create(2, p1.size(1));

    for( int i=0; i<2; i++ ){
        l2.putRow(i, l1);
        p2.putRow(i, p1);
    }

    INDArray norm2_1 = l1.norm2(1);
    INDArray temp1 = p1.mul(l1);
    INDArray out1 = temp1.diviColumnVector(norm2_1);

    INDArray norm2_2 = l2.norm2(1);
    INDArray temp2 = p2.mul(l2);
    INDArray out2 = temp2.diviColumnVector(norm2_2);

    System.out.println("norm2_1: " + Arrays.toString(norm2_1.data().asDouble()));
    System.out.println("norm2_2: " + Arrays.toString(norm2_2.data().asDouble()));

    System.out.println("temp1: " + Arrays.toString(temp1.data().asDouble()));
    System.out.println("temp2: " + Arrays.toString(temp2.data().asDouble()));

    //Outputs here should be identical:
    System.out.println(Arrays.toString(out1.data().asDouble()));
    System.out.println(Arrays.toString(out2.getRow(0).dup().data().asDouble()));
}
 
Example 7
Source File: IrisUtils.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
private static void addRow(INDArray ret, int row, String[] line) {
    double[] vector = new double[4];
    for (int i = 0; i < 4; i++)
        vector[i] = Double.parseDouble(line[i]);

    ret.putRow(row, Nd4j.create(vector));
}
 
Example 8
Source File: BaseNDArrayFactory.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Rotate a matrix 90 degrees
 *
 * @param toRotate the matrix to rotate
 * @return the rotated matrix
 */
@Override
public void rot90(INDArray toRotate) {
    if (!toRotate.isMatrix())
        throw new IllegalArgumentException("Only rotating matrices");

    INDArray start = toRotate.transpose();
    for (int i = 0; i < start.rows(); i++)
        start.putRow(i, reverse(start.getRow(i)));

}
 
Example 9
Source File: WordConverter.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
public static INDArray toLabelMatrix(List<String> labels, List<Window> windows) {
    int columns = labels.size();
    INDArray ret = Nd4j.create(windows.size(), columns);
    for (int i = 0; i < ret.rows(); i++) {
        ret.putRow(i, FeatureUtil.toOutcomeVector(labels.indexOf(windows.get(i).getLabel()), labels.size()));
    }
    return ret;
}
 
Example 10
Source File: ShapeTestsC.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testPutRow() {
        INDArray matrix = Nd4j.create(new double[][] {{1, 2}, {3, 4}});
        for (int i = 0; i < matrix.rows(); i++) {
            INDArray row = matrix.getRow(i);
//            System.out.println(matrix.getRow(i));
        }
        matrix.putRow(1, Nd4j.create(new double[] {1, 2}));
        assertEquals(matrix.getRow(0), matrix.getRow(1));
    }
 
Example 11
Source File: ReductionBpOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testMeanAlongDim0BP() {
    //Reduction along dimension
    //Inputs/outputs as before - but note that the output is no longer a scalar

    //Note: when reducing [3,4] along dimension 0 -> 4 TADs of length 3 -> N=3 -> dL/dIn_i = dL/dOut * 1/3
    //We have one epsilon/gradient for each of the 4 TADs -> dL/dOut length is 4

    for (boolean keepDims : new boolean[]{false, true}) {
        long[] reducedShape_0 = (keepDims ? new long[]{1, 4} : new long[]{4});
        INDArray preReduceInput = Nd4j.linspace(1, 12, 12).reshape('c', 3, 4);
        INDArray dLdOut_0 = Nd4j.create(new double[]{1, 2, 3, 4}, reducedShape_0);
        INDArray dLdInExpected_0 = Nd4j.createUninitialized(preReduceInput.shape());
        for (int i = 0; i < 3; i++) {
            dLdInExpected_0.putRow(i, dLdOut_0.div(3));
        }

        String msg = "keepDims=" + keepDims;
        log.info("Starting test: " + msg);

        INDArray dLdIn = Nd4j.createUninitialized(3, 4);
        String err = OpValidation.validate(new OpTestCase(new MeanBp(preReduceInput, dLdOut_0, dLdIn, keepDims, 0))
                .expectedOutput(0, dLdInExpected_0));

        assertNull(err);
    }
}
 
Example 12
Source File: Nd4jGetAndSetParts.java    From dl4j-tutorials with MIT License 5 votes vote down vote up
public static void main(String[] args) {
    INDArray nd = Nd4j.create(new float[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, new int[]{2, 6});
    System.out.println("原始数组");
    System.out.println(nd);

    /*
        获取一行
     */
    System.out.println("获取数组中的一行");
    INDArray singleRow = nd.getRow(0);
    System.out.println(singleRow);

    /*
        获取多行
     */
    System.out.println("获取数组中的多行");
    INDArray multiRows = nd.getRows(0, 1);
    System.out.println(multiRows);

    /*
        替换其中的一行
     */
    System.out.println("替换原有数组中的一行");
    INDArray replaceRow = Nd4j.create(new float[]{1, 3, 5, 7, 9, 11});
    nd.putRow(0, replaceRow);
    System.out.println(nd);
}
 
Example 13
Source File: LabelLastTimeStepPreProcessor.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void preProcess(DataSet toPreProcess) {

    INDArray label3d = toPreProcess.getLabels();
    Preconditions.checkState(label3d.rank() == 3, "LabelLastTimeStepPreProcessor expects rank 3 labels, got rank %s labels with shape %ndShape", label3d.rank(), label3d);

    INDArray lMask = toPreProcess.getLabelsMaskArray();
    //If no mask: assume that examples for each minibatch are all same length
    INDArray labels2d;
    if(lMask == null){
        labels2d = label3d.get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(label3d.size(2)-1)).dup();
    } else {
        //Use the label mask to work out the last time step...
        INDArray lastIndex = BooleanIndexing.lastIndex(lMask, Conditions.greaterThan(0), 1);
        long[] idxs = lastIndex.data().asLong();

        //Now, extract out:
        labels2d = Nd4j.create(DataType.FLOAT, label3d.size(0), label3d.size(1));

        //Now, get and assign the corresponding subsets of 3d activations:
        for (int i = 0; i < idxs.length; i++) {
            long lastStepIdx = idxs[i];
            Preconditions.checkState(lastStepIdx >= 0, "Invalid last time step index: example %s in minibatch is entirely masked out" +
                    " (label mask is all 0s, meaning no label data is present for this example)", i);
            //TODO can optimize using reshape + pullRows
            labels2d.putRow(i, label3d.get(NDArrayIndex.point(i), NDArrayIndex.all(), NDArrayIndex.point(lastStepIdx)));
        }
    }

    toPreProcess.setLabels(labels2d);
    toPreProcess.setLabelsMaskArray(null);  //Remove label mask if present
}
 
Example 14
Source File: ManualTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testImage() throws Exception {
    INDArray array = Nd4j.create(11, 13);
    for (int i = 0; i < array.rows(); i++) {
        array.putRow(i, Nd4j.create(new double[] {0.0f, 0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f, 0.9f, 1.0f,
                        1.2f, 1.3f}));
    }
    writeImage(array, new File("test.png"));
}
 
Example 15
Source File: BaseNDArrayFactory.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * This method produces concatenated array, that consist from tensors, fetched from source array, against some dimension and specified indexes
 *
 * @param source source tensor
 * @param sourceDimension dimension of source tensor
 * @param indexes indexes from source array
 * @return
 */
@Override
public INDArray pullRows(INDArray source, int sourceDimension, int[] indexes, char order) {
    Shape.assertValidOrder(order);
    long vectorLength = source.shape()[sourceDimension];
    INDArray ret = Nd4j.createUninitialized(new long[] {indexes.length, vectorLength}, order);

    for (int cnt = 0; cnt < indexes.length; cnt++) {
        ret.putRow(cnt, source.tensorAlongDimension((int) indexes[cnt], sourceDimension));
    }

    return ret;
}
 
Example 16
Source File: FeatureUtil.java    From nd4j with Apache License 2.0 5 votes vote down vote up
/**
 * Divides each row by its max
 *
 * @param toScale the matrix to divide by its row maxes
 */
public static void scaleByMax(INDArray toScale) {
    INDArray scale = toScale.max(1);
    for (int i = 0; i < toScale.rows(); i++) {
        double scaleBy = scale.getDouble(i);
        toScale.putRow(i, toScale.getRow(i).divi(scaleBy));
    }
}
 
Example 17
Source File: GradientCheckTestsComputationGraph.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
    public void testLSTMWithLastTimeStepVertex() {

        Nd4j.getRandom().setSeed(12345);
        ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
                        .dataType(DataType.DOUBLE)
                        .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                        .dist(new NormalDistribution(0, 1))
                        .updater(new NoOp()).graphBuilder().addInputs("input").setOutputs("out")
                        .addLayer("lstm1", new LSTM.Builder().nIn(3).nOut(4).activation(Activation.TANH).build(),
                                        "input")
                        .addVertex("lastTS", new LastTimeStepVertex("input"), "lstm1")
                        .addLayer("out", new OutputLayer.Builder().nIn(4).nOut(2).activation(Activation.SOFTMAX)
                                        .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "lastTS")
                        .build();

        ComputationGraph graph = new ComputationGraph(conf);
        graph.init();

        Random r = new Random(12345);
        INDArray input = Nd4j.rand(new int[] {2, 3, 4});
        INDArray labels = TestUtils.randomOneHot(2, 2); //Here: labels are 2d (due to LastTimeStepVertex)

        if (PRINT_RESULTS) {
            System.out.println("testLSTMWithLastTimeStepVertex()");
//            for (int j = 0; j < graph.getNumLayers(); j++)
//                System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams());
        }

        //First: test with no input mask array
        boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input})
                .labels(new INDArray[]{labels}));

        String msg = "testLSTMWithLastTimeStepVertex()";
        assertTrue(msg, gradOK);

        //Second: test with input mask arrays.
        INDArray inMask = Nd4j.zeros(3, 4);
        inMask.putRow(0, Nd4j.create(new double[] {1, 1, 0, 0}));
        inMask.putRow(1, Nd4j.create(new double[] {1, 1, 1, 0}));
        inMask.putRow(2, Nd4j.create(new double[] {1, 1, 1, 1}));
        gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input})
                .labels(new INDArray[]{labels}).inputMask(new INDArray[]{inMask}));

        assertTrue(msg, gradOK);
        TestUtils.testModelSerialization(graph);
    }
 
Example 18
Source File: LastTimeStepVertex.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray doForward(boolean training, LayerWorkspaceMgr workspaceMgr) {
    //First: get the mask arrays for the given input, if any
    INDArray[] inputMaskArrays = graph.getInputMaskArrays();
    INDArray mask = (inputMaskArrays != null ? inputMaskArrays[inputIdx] : null);

    //Then: work out, from the mask array, which time step of activations we want, extract activations
    //Also: record where they came from (so we can do errors later)
    fwdPassShape = inputs[0].shape();

    INDArray out;
    if (mask == null) {
        //No mask array -> extract same (last) column for all
        long lastTS = inputs[0].size(2) - 1;
        out = inputs[0].get(NDArrayIndex.all(), NDArrayIndex.all(), NDArrayIndex.point(lastTS));
        out = workspaceMgr.dup(ArrayType.ACTIVATIONS, out);
        fwdPassTimeSteps = null; //Null -> last time step for all examples
    } else {
        val outShape = new long[] {inputs[0].size(0), inputs[0].size(1)};
        out = workspaceMgr.create(ArrayType.ACTIVATIONS, inputs[0].dataType(), outShape);

        //Want the index of the last non-zero entry in the mask array.
        //Check a little here by using mulRowVector([0,1,2,3,...]) and argmax
        long maxTsLength = fwdPassShape[2];
        INDArray row = Nd4j.linspace(0, maxTsLength - 1, maxTsLength, mask.dataType());
        INDArray temp = mask.mulRowVector(row);
        INDArray lastElementIdx = Nd4j.argMax(temp, 1);
        fwdPassTimeSteps = new int[(int)fwdPassShape[0]];
        for (int i = 0; i < fwdPassTimeSteps.length; i++) {
            fwdPassTimeSteps[i] = (int) lastElementIdx.getDouble(i);
        }

        //Now, get and assign the corresponding subsets of 3d activations:
        for (int i = 0; i < fwdPassTimeSteps.length; i++) {
            out.putRow(i, inputs[0].get(NDArrayIndex.point(i), NDArrayIndex.all(),
                            NDArrayIndex.point(fwdPassTimeSteps[i])));
        }
    }

    return out;
}
 
Example 19
Source File: GanCnnInputPreProcessor.java    From dl4j-tutorials with MIT License 4 votes vote down vote up
@Override
public INDArray preProcess(INDArray input, int miniBatchSize, LayerWorkspaceMgr workspaceMgr) {
	// [1 , numChannels * 2, inputHeight, inputWidth]
	this.shape = input.shape();
	// System.out.println("input = " + input);
	// System.out.println("input.sumNumber() = " + input.sumNumber());
	if (printLog) {
		System.out.println("this.shape = " + Arrays.toString(this.shape));
	}
	// Input: 4d activations (CNN)
	// Output: 4d activations (CNN)
	if (input.rank() != 4) {
		throw new IllegalArgumentException(
				"Invalid input: expect CNN activations with rank 4 (received input with shape " + Arrays.toString(input.shape()) + ")");
	}

	if (input.ordering() != 'c' || !Shape.hasDefaultStridesForShape(input)) {
		input = input.dup('c');
		// input = workspaceMgr.dup(ArrayType.ACTIVATIONS, input, 'c');
	}

	// 将2张CNN转为1张CNN
	INDArray newInput = Nd4j.zeros(shape[0], shape[1] / 2, shape[2], shape[3]);
	for (int i = 0; i < shape[0]; i++) {
		// [numChannels * 2, inputHeight, inputWidth]: z + r
		INDArray multyImage = input.get(NDArrayIndex.point(i), NDArrayIndex.all());
		// System.out.println("multyImage.sumNumber() = " + multyImage.sumNumber());
		// [numChannels * 1, inputHeight, inputWidth]
		INDArray newMultyImage = newInput.getRow(i);

		int newRowIndex = 0;
		for (int j = 0; j < shape[1] / 2; j++) {
			// [inputHeight, inputWidth]
			INDArray rImageWH = null;
			if (j == 0) {
				// 第一步,读取rImageWH,并判断它是否为空
				// "z-input", "r-input"
				rImageWH = multyImage.get(NDArrayIndex.point(j + shape[1] / 2), NDArrayIndex.all());
				// System.out.println("rImageWH.sumNumber() = " + rImageWH.sumNumber());
				double firstPixelValue = rImageWH.getDouble(0, 0);
				if (firstPixelValue != -9999) {
					this.isRInputEmpty = false;
				} else {
					this.isRInputEmpty = true;
				}
			}

			if (!this.isRInputEmpty) {
				if (rImageWH == null) {
					rImageWH = multyImage.get(NDArrayIndex.point(j + shape[1] / 2), NDArrayIndex.all());
				}
				// System.out.println("newRowIndex = " + newRowIndex);
				newMultyImage.putRow(newRowIndex, rImageWH);
				// System.out.println("newMultyImage.sumNumber() = " + newMultyImage.sumNumber());
			} else {
				INDArray zImageWH = multyImage.get(NDArrayIndex.point(j), NDArrayIndex.all());
				newMultyImage.putRow(newRowIndex, zImageWH);
			}
			newRowIndex++;
		}

		newInput.putRow(i, newMultyImage);
	}
	// System.out.println("newInput = " + newInput);
	// System.out.println("newInput.sumNumber() = " + newInput.sumNumber());

	// return workspaceMgr.leverageTo(ArrayType.ACTIVATIONS, newInput);
	if (save) {
		ImageUtils.save("/myself/tmp/dl4j/gan/data/train/0/0.jpg", newInput.dup().mul(255));
	}
	return newInput;
}
 
Example 20
Source File: GridExecutionerTest.java    From nd4j with Apache License 2.0 2 votes vote down vote up
@Test
    public void testReverseFlow1() throws Exception {
        CudaGridExecutioner executioner = ((CudaGridExecutioner) Nd4j.getExecutioner());

        INDArray put = Nd4j.create(new double[]{5, 6});

        INDArray row1 = Nd4j.linspace(1, 4, 4);

        AllocationPoint point = AtomicAllocator.getInstance().getAllocationPoint(row1);

        assertEquals(0, executioner.getQueueLength());
        assertEquals(true, point.isActualOnHostSide());
        assertEquals(false, point.isActualOnDeviceSide());

        System.out.println("A: --------------------------");

        row1 = row1.reshape(2, 2);

        assertEquals(true, point.isActualOnHostSide());
        assertEquals(false, point.isActualOnDeviceSide());

        System.out.println("B: --------------------------");

//        ((CudaGridExecutioner) Nd4j.getExecutioner()).flushQueueBlocking();

        row1.putRow(1, put);

        assertEquals(true, point.isActualOnHostSide());
        assertEquals(false, point.isActualOnDeviceSide());

        System.out.println("C: --------------------------");

        assertEquals(1, executioner.getQueueLength());

        executioner.flushQueueBlocking();

        assertEquals(0, executioner.getQueueLength());

        assertEquals(false, point.isActualOnHostSide());
        assertEquals(true, point.isActualOnDeviceSide());

        System.out.println("D: --------------------------");


    //    ((CudaGridExecutioner) Nd4j.getExecutioner()).flushQueueBlocking();

        //System.out.println(row1);
        assertArrayEquals(new float[]{1, 2, 5, 6}, row1.data().asFloat(), 0.1f);

    }