Java Code Examples for org.nd4j.linalg.factory.Nd4j#zerosLike()

The following examples show how to use org.nd4j.linalg.factory.Nd4j#zerosLike() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CudaExecutionerTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSoftmax1D_1() throws Exception {
    INDArray input1T = Nd4j.create(new double[]{ -0.75, 0.58, 0.42, 1.03, -0.61, 0.19, -0.37, -0.40, -1.42, -0.04});
    INDArray input1 = Nd4j.create(new double[]{ -0.75, 0.58, 0.42, 1.03, -0.61, 0.19, -0.37, -0.40, -1.42, -0.04});
    INDArray input2 = Nd4j.zerosLike(input1);
    Nd4j.copy(input1, input2);
    INDArray output1 = Nd4j.create(1, 10);
    INDArray output1T = Nd4j.create(1, 10);

    System.out.println("FA --------------------");
    Nd4j.getExecutioner().exec(new OldSoftMax(input1, output1));
    Nd4j.getExecutioner().exec(new OldSoftMax(input1T, output1T));
    System.out.println("FB --------------------");

    System.out.println("Softmax = " + output1);
    INDArray output2 = Nd4j.create(1,10);
    Nd4j.getExecutioner().exec(new SoftMaxDerivative(input2, output2));
    System.out.println("Softmax Derivative = " + output2);

    INDArray assertion1 = Nd4j.create(new double[]{0.04, 0.16, 0.14, 0.26, 0.05, 0.11, 0.06, 0.06, 0.02, 0.09});

    assertArrayEquals(assertion1.data().asFloat(), output1.data().asFloat(), 0.01f);
    assertArrayEquals(assertion1.data().asFloat(), output1T.data().asFloat(), 0.01f);

}
 
Example 2
Source File: CudaTransformsTests.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSoftmax1D_1() throws Exception {
    INDArray input1T = Nd4j.create(new double[]{ -0.75, 0.58, 0.42, 1.03, -0.61, 0.19, -0.37, -0.40, -1.42, -0.04}).transpose();
    INDArray input1 = Nd4j.create(new double[]{ -0.75, 0.58, 0.42, 1.03, -0.61, 0.19, -0.37, -0.40, -1.42, -0.04});
    INDArray input2 = Nd4j.zerosLike(input1);
    Nd4j.copy(input1, input2);
    INDArray output1 = Nd4j.create(1, 10);
    INDArray output1T = Nd4j.create(1, 10);

    System.out.println("FA --------------------");
    Nd4j.getExecutioner().exec(new OldSoftMax(input1, output1));
    Nd4j.getExecutioner().exec(new OldSoftMax(input1T, output1T));
    System.out.println("FB --------------------");

    System.out.println("Softmax = " + output1);
    INDArray output2 = Nd4j.create(1,10);
    Nd4j.getExecutioner().exec(new SoftMaxDerivative(input2, output2));
    System.out.println("Softmax Derivative = " + output2);

    INDArray assertion1 = Nd4j.create(new double[]{0.04, 0.16, 0.14, 0.26, 0.05, 0.11, 0.06, 0.06, 0.02, 0.09});

    assertArrayEquals(assertion1.data().asFloat(), output1.data().asFloat(), 0.01f);
    assertArrayEquals(assertion1.data().asFloat(), output1T.data().asFloat(), 0.01f);

}
 
Example 3
Source File: NativeOpExecutionerTest.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSoftmax1D_1() throws Exception {
    INDArray input1T = Nd4j.create(new double[]{ -0.75, 0.58, 0.42, 1.03, -0.61, 0.19, -0.37, -0.40, -1.42, -0.04});
    INDArray input1 = Nd4j.create(new double[]{ -0.75, 0.58, 0.42, 1.03, -0.61, 0.19, -0.37, -0.40, -1.42, -0.04});
    INDArray input2 = Nd4j.zerosLike(input1);
    Nd4j.copy(input1, input2);
    INDArray output1 = Nd4j.create(1, 10);
    INDArray output1T = Nd4j.create(1, 10);

    System.out.println("FA --------------------");
    Nd4j.getExecutioner().exec(new OldSoftMax(input1, output1));
    Nd4j.getExecutioner().exec(new OldSoftMax(input1T, output1T));
    System.out.println("FB --------------------");

    System.out.println("Softmax = " + output1);
    INDArray output2 = Nd4j.create(1,10);
    Nd4j.getExecutioner().exec(new SoftMaxDerivative(input2, output2));
    System.out.println("Softmax Derivative = " + output2);

    INDArray assertion1 = Nd4j.create(new double[]{0.04, 0.16, 0.14, 0.26, 0.05, 0.11, 0.06, 0.06, 0.02, 0.09});

    assertArrayEquals(assertion1.data().asFloat(), output1.data().asFloat(), 0.01f);
    assertArrayEquals(assertion1.data().asFloat(), output1T.data().asFloat(), 0.01f);

}
 
Example 4
Source File: CudaTransformsTests.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testSoftmax2D_T() {
    INDArray input1 = Nd4j.create(1000).transpose();
    INDArray input2 = Nd4j.zerosLike(input1);
    Nd4j.copy(input1, input2);
    INDArray output1 = Nd4j.create(1, 1000);
    Nd4j.getExecutioner().exec(new OldSoftMax(input1, output1));
    System.out.println("Softmax = " + output1);
    INDArray output2 = Nd4j.create(1,1000);
    Nd4j.getExecutioner().exec(new SoftMaxDerivative(input2, output2));
    System.out.println("Softmax Derivative = " + output2);

    assertEquals(1.0f, output1.sumNumber().floatValue(), 0.01f);
    assertEquals(1.0f, output2.sumNumber().floatValue(), 0.01f);
}
 
Example 5
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLayerNormOP() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray bias = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain).addRowVector(bias);

    final INDArray output = Nd4j.zerosLike(res);
    Nd4j.getExecutioner().exec(new LayerNorm(standardized, gain, bias, output, true, 1));

    assertEquals(res, output);
}
 
Example 6
Source File: LayerOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testLayerNormOPNoBias() {
    final INDArray random = Nd4j.rand(DataType.DOUBLE, 10, 4);
    final INDArray standardized = random.ulike();
    Nd4j.getExecutioner().exec(new Standardize(random, standardized, 1));

    final INDArray gain = Nd4j.rand(DataType.DOUBLE, 4);
    final INDArray res = standardized.mulRowVector(gain);

    final INDArray output = Nd4j.zerosLike(res);
    Nd4j.getExecutioner().exec(new LayerNorm(standardized, gain, output, true, 1));

    assertEquals(res, output);
}
 
Example 7
Source File: TransformOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testStandardizeOP() {
    final INDArray random = Nd4j.rand(new int[]{10, 4});

    final int[] axis = new int[]{1};
    final INDArray means = random.mean(axis);
    final INDArray std = random.std(false, axis);
    final INDArray res = random.subColumnVector(means).divColumnVector(std);

    final INDArray output = Nd4j.zerosLike(res);
    Nd4j.getExecutioner().exec(new Standardize(random, output, 1));

    assertEquals(res, output);
}
 
Example 8
Source File: MtcnnUtil.java    From mtcnn-java with Apache License 2.0 4 votes vote down vote up
/**
 * Non Maximum Suppression - greedily selects the boxes with high confidence. Keep the boxes that have overlap area
 * below the threshold and discards the others.
 *
 * original code:
 *  - https://github.com/kpzhang93/MTCNN_face_detection_alignment/blob/master/code/codes/MTCNNv2/nms.m
 *  - https://github.com/davidsandberg/facenet/blob/master/src/align/detect_face.py#L687
 *
 * @param boxes nd array with bounding boxes: [[x1, y1, x2, y2 score]]
 * @param threshold NMS threshold -  retain overlap <= thresh
 * @param nmsType NMS method to apply. Available values ('Min', 'Union')
 * @return Returns the NMS result
 */
public static INDArray nonMaxSuppression(INDArray boxes, double threshold, NonMaxSuppressionType nmsType) {

	if (boxes.isEmpty()) {
		return Nd4j.empty();
	}

	// TODO Try to prevent following duplications!
	INDArray x1 = boxes.get(all(), point(0)).dup();
	INDArray y1 = boxes.get(all(), point(1)).dup();
	INDArray x2 = boxes.get(all(), point(2)).dup();
	INDArray y2 = boxes.get(all(), point(3)).dup();
	INDArray s = boxes.get(all(), point(4)).dup();

	//area = (x2 - x1 + 1) * (y2 - y1 + 1)
	INDArray area = (x2.sub(x1).add(1)).mul(y2.sub(y1).add(1));

	// sorted_s = np.argsort(s)
	INDArray sortedS = Nd4j.sortWithIndices(s, 0, SORT_ASCENDING)[0];

	INDArray pick = Nd4j.zerosLike(s);
	int counter = 0;

	while (sortedS.size(0) > 0) {

		if (sortedS.size(0) == 1) {
			pick.put(counter++, sortedS.dup());
			break;
		}

		long lastIndex = sortedS.size(0) - 1;
		INDArray i = sortedS.get(point(lastIndex), all()); // last element
		INDArray idx = sortedS.get(interval(0, lastIndex), all()).transpose(); // all until last excluding
		pick.put(counter++, i.dup());

		INDArray xx1 = Transforms.max(x1.get(idx), x1.get(i).getInt(0));
		INDArray yy1 = Transforms.max(y1.get(idx), y1.get(i).getInt(0));
		INDArray xx2 = Transforms.min(x2.get(idx), x2.get(i).getInt(0));
		INDArray yy2 = Transforms.min(y2.get(idx), y2.get(i).getInt(0));

		// w = np.maximum(0.0, xx2 - xx1 + 1)
		// h = np.maximum(0.0, yy2 - yy1 + 1)
		// inter = w * h
		INDArray w = Transforms.max(xx2.sub(xx1).add(1), 0.0f);
		INDArray h = Transforms.max(yy2.sub(yy1).add(1), 0.0f);
		INDArray inter = w.mul(h);

		// if method is 'Min':
		//   o = inter / np.minimum(area[i], area[idx])
		// else:
		//   o = inter / (area[i] + area[idx] - inter)
		int areaI = area.get(i).getInt(0);
		INDArray o = (nmsType == NonMaxSuppressionType.Min) ?
				inter.div(Transforms.min(area.get(idx), areaI)) :
				inter.div(area.get(idx).add(areaI).sub(inter));

		INDArray oIdx = MtcnnUtil.getIndexWhereVector(o, value -> value <= threshold);
		//INDArray oIdx = getIndexWhereVector2(o, Conditions.lessThanOrEqual(threshold));

		if (oIdx.isEmpty()) {
			break;
		}

		sortedS = Nd4j.expandDims(sortedS.get(oIdx), 0).transpose();
	}

	//pick = pick[0:counter]
	return (counter == 0) ? Nd4j.empty() : pick.get(interval(0, counter));
}
 
Example 9
Source File: Variance.java    From nd4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray noOp() {
    return Nd4j.zerosLike(x());
}
 
Example 10
Source File: Variance.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray noOp() {
    return Nd4j.zerosLike(x());
}
 
Example 11
Source File: KerasBatchNormalization.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
/**
 * Set weights for layer.
 *
 * @param weights Map from parameter name to INDArray.
 */
@Override
public void setWeights(Map<String, INDArray> weights) throws InvalidKerasConfigurationException {
    this.weights = new HashMap<>();
    if (center) {
        if (weights.containsKey(PARAM_NAME_BETA))
            this.weights.put(BatchNormalizationParamInitializer.BETA, weights.get(PARAM_NAME_BETA));
        else
            throw new InvalidKerasConfigurationException("Parameter " + PARAM_NAME_BETA + " does not exist in weights");
    } else {
        INDArray dummyBeta = Nd4j.zerosLike(weights.get(PARAM_NAME_BETA));
        this.weights.put(BatchNormalizationParamInitializer.BETA, dummyBeta);
    }
    if (scale) {
        if (weights.containsKey(PARAM_NAME_GAMMA))
            this.weights.put(BatchNormalizationParamInitializer.GAMMA, weights.get(PARAM_NAME_GAMMA));
        else
            throw new InvalidKerasConfigurationException(
                    "Parameter " + PARAM_NAME_GAMMA + " does not exist in weights");
    } else {
        INDArray dummyGamma = weights.containsKey(PARAM_NAME_GAMMA)
                ? Nd4j.onesLike(weights.get(PARAM_NAME_GAMMA))
                : Nd4j.onesLike(weights.get(PARAM_NAME_BETA));
        this.weights.put(BatchNormalizationParamInitializer.GAMMA, dummyGamma);
    }
    if (weights.containsKey(conf.getLAYER_FIELD_BATCHNORMALIZATION_MOVING_MEAN()))
        this.weights.put(BatchNormalizationParamInitializer.GLOBAL_MEAN, weights.get(conf.getLAYER_FIELD_BATCHNORMALIZATION_MOVING_MEAN()));
    else
        throw new InvalidKerasConfigurationException(
                "Parameter " + conf.getLAYER_FIELD_BATCHNORMALIZATION_MOVING_MEAN() + " does not exist in weights");
    if (weights.containsKey(conf.getLAYER_FIELD_BATCHNORMALIZATION_MOVING_VARIANCE()))
        this.weights.put(BatchNormalizationParamInitializer.GLOBAL_VAR, weights.get(conf.getLAYER_FIELD_BATCHNORMALIZATION_MOVING_VARIANCE()));
    else
        throw new InvalidKerasConfigurationException(
                "Parameter " + conf.getLAYER_FIELD_BATCHNORMALIZATION_MOVING_VARIANCE() + " does not exist in weights");
    if (weights.size() > 4) {
        Set<String> paramNames = weights.keySet();
        paramNames.remove(PARAM_NAME_BETA);
        paramNames.remove(PARAM_NAME_GAMMA);
        paramNames.remove(conf.getLAYER_FIELD_BATCHNORMALIZATION_MOVING_MEAN());
        paramNames.remove(conf.getLAYER_FIELD_BATCHNORMALIZATION_MOVING_VARIANCE());
        String unknownParamNames = paramNames.toString();
        log.warn("Attempting to set weights for unknown parameters: "
                + unknownParamNames.substring(1, unknownParamNames.length() - 1));
    }
}