Java Code Examples for org.nd4j.linalg.factory.Nd4j#empty()

The following examples show how to use org.nd4j.linalg.factory.Nd4j#empty() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KDTreeTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testNoDuplicates() {
    int N = 100;
    KDTree bigTree = new KDTree(2);

    List<INDArray> points = new ArrayList<>();
    for (int i = 0; i < N; ++i) {
        double[] data = new double[]{i, i};
        points.add(Nd4j.createFromArray(data));
    }

    for (int i = 0; i < N; ++i) {
        bigTree.insert(points.get(i));
    }

    assertEquals(N, bigTree.size());

    INDArray node = Nd4j.empty(DataType.DOUBLE);
    for (int i = 0; i < N; ++i) {
        node = bigTree.delete(node.isEmpty() ? points.get(i) : node);
    }

    assertEquals(0, bigTree.size());
}
 
Example 2
Source File: MtcnnUtil.java    From mtcnn-java with Apache License 2.0 6 votes vote down vote up
/**
 * Manual (ineffient) implementation of where_np (https://github.com/deeplearning4j/deeplearning4j/issues/6184) for matrix input.
 *
 * @param input
 * @param predicate
 * @return Returns the where matrix indexes
 */
public static INDArray getIndexWhereMatrix(INDArray input, Predicate<Double> predicate) {

	Assert.isTrue(input.isMatrix(), "Expected matrix but found: " + input.rank());

	List<Float> yxIndexList = new ArrayList<>();
	for (int y = 0; y < input.rows(); y++) {
		for (int x = 0; x < input.columns(); x++) {
			double v = input.getDouble(y, x);
			if (predicate.test(v)) {
				yxIndexList.add((float) y);
				yxIndexList.add((float) x);
			}
		}
	}

	if (CollectionUtils.isEmpty(yxIndexList)) {
		return Nd4j.empty();
	}

	return Nd4j.create(yxIndexList).reshape(new int[] { yxIndexList.size() / 2, 2 });
}
 
Example 3
Source File: WordVectorsImpl.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
 * This method returns 2D array, where each row represents corresponding label
 *
 * @param labels
 * @return
 */
@Override
public INDArray getWordVectors(@NonNull Collection<String> labels) {
    int indexes[] = new int[labels.size()];
    int cnt = 0;
    boolean useIndexUnknown = useUnknown && vocab.containsWord(getUNK());

    for (String label : labels) {
        if (vocab.containsWord(label)) {
            indexes[cnt] = vocab.indexOf(label);
        } else
            indexes[cnt] = useIndexUnknown ? vocab.indexOf(getUNK()) : -1;
        cnt++;
    }

    while (ArrayUtils.contains(indexes, -1)) {
        indexes = ArrayUtils.removeElement(indexes, -1);
    }
    if (indexes.length == 0) {
            return Nd4j.empty(((InMemoryLookupTable)lookupTable).getSyn0().dataType());
    }

    INDArray result = Nd4j.pullRows(lookupTable.getWeights(), 1, indexes);
    return result;
}
 
Example 4
Source File: EmptyTests.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testEmpyArray_1() {
    val array = Nd4j.empty();

    assertNotNull(array);
    assertTrue(array.isEmpty());

    assertFalse(array.isScalar());
    assertFalse(array.isVector());
    assertFalse(array.isRowVector());
    assertFalse(array.isColumnVector());
    assertFalse(array.isCompressed());
    assertFalse(array.isSparse());

    assertFalse(array.isAttached());

    assertEquals(Nd4j.dataType(), array.dataType());
}
 
Example 5
Source File: EmptyTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEmptyDtype_1() {
    val array = Nd4j.empty(DataType.INT);

    assertTrue(array.isEmpty());
    assertEquals(DataType.INT, array.dataType());
}
 
Example 6
Source File: BaseNDArray.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray cond(Condition condition) {
    if(isEmpty())
        return Nd4j.empty(DataType.BOOL);
    INDArray ret = Nd4j.createUninitialized(DataType.BOOL, this.shape());
    Nd4j.getExecutioner().exec(new MatchConditionTransform(this,ret, condition));
    return ret;
}
 
Example 7
Source File: BaseNDArray.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray isNaN(){
    validateNumericalArray("isNaN", true);
    if(isEmpty())
        return Nd4j.empty(DataType.BOOL);
    return Nd4j.getExecutioner().exec(new MatchConditionTransform(this, Nd4j.createUninitialized(DataType.BOOL, this.shape(), this.ordering()), Conditions.isNan()));
}
 
Example 8
Source File: BaseNDArray.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray isInfinite(){
    validateNumericalArray("isInfinite", true);
    if(isEmpty())
        return Nd4j.empty(DataType.BOOL);
    return Nd4j.getExecutioner().exec(new MatchConditionTransform(this, Nd4j.createUninitialized(DataType.BOOL, this.shape(), this.ordering()), Conditions.isInfinite()));
}
 
Example 9
Source File: EmptyTests.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEmptyNoop() {
    val output = Nd4j.empty(DataType.LONG);

    val op = DynamicCustomOp.builder("noop")
            .addOutputs(output)
            .build();

    Nd4j.exec(op);
}
 
Example 10
Source File: ShapeOpValidation.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testEmptyGather(){
    /*
    tf.reset_default_graph()
    inputFloat = tf.constant([], shape=[0,2,3], dtype=tf.float32)
    emptyInt = tf.constant([], shape=[0], dtype=tf.int32)

    gather = tf.gather(params=inputFloat, indices=emptyInt)

    sess = tf.Session()
    out = sess.run([gather])
    print(out[0].shape)
    print(out[0]);

    > (0, 2, 3)
    > []
     */
    INDArray emptyFloat = Nd4j.create(DataType.FLOAT, 0, 2, 3);
    INDArray emptyInt = Nd4j.create(DataType.INT, 0);
    DynamicCustomOp op = DynamicCustomOp.builder("gather")
            .addInputs(emptyFloat, emptyInt)
            .build();

    List<LongShapeDescriptor> l = op.calculateOutputShape();
    assertEquals(1, l.size());
    assertTrue(l.get(0).isEmpty());
    assertArrayEquals(new long[]{0,2,3}, l.get(0).getShape());

    INDArray out = Nd4j.empty(DataType.FLOAT);
    op.addOutputArgument(out);
}
 
Example 11
Source File: Shape.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
/**
 * Create an INDArray to represent the (possibly null) int[] dimensions.
 * If null or length 0, returns an empty INT array. Otherwise, returns a 1d INT NDArray
 * @param dimensions Dimensions to convert
 * @return Dimenions as an INDArray
 */
public static INDArray ndArrayDimFromInt(int... dimensions){
    if (dimensions == null || dimensions.length == 0)
        return Nd4j.empty(DataType.INT);
    else
        return Nd4j.createFromArray(dimensions);
}
 
Example 12
Source File: BaseNDArray.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray castTo(DataType dataType) {
    if(dataType == dataType())  //No-op if correct datatype
        return this;
    if(isEmpty() && rank() == 0){
        return Nd4j.empty(dataType);
    }
    val result = Nd4j.createUninitialized(dataType, this.shape(), this.ordering());
    result.assign(this);
    return result;
}
 
Example 13
Source File: SkipGram.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public double iterateSample(T w1, T lastWord, AtomicLong nextRandom, double alpha, boolean isInference,
                INDArray inferenceVector) {
    if (w1 == null || lastWord == null || (lastWord.getIndex() < 0 && !isInference)
                    || w1.getIndex() == lastWord.getIndex() || w1.getLabel().equals("STOP")
                    || lastWord.getLabel().equals("STOP") || w1.getLabel().equals("UNK")
                    || lastWord.getLabel().equals("UNK")) {
        return 0.0;
    }


    double score = 0.0;

    int[] idxSyn1 = null;
    byte[] codes = null;
    if (configuration.isUseHierarchicSoftmax()) {
        idxSyn1 = new int[w1.getCodeLength()];
        codes = new byte[w1.getCodeLength()];
        for (int i = 0; i < w1.getCodeLength(); i++) {
            int code = w1.getCodes().get(i);
            int point = w1.getPoints().get(i);
            if (point >= vocabCache.numWords() || point < 0)
                continue;

            codes[i] = (byte)code;
            idxSyn1[i] = point;
        }
    } else {
        idxSyn1 = new int[0];
        codes = new byte[0];
    }


    int target = w1.getIndex();
    //negative sampling
    if (negative > 0) {
        if (syn1Neg == null) {
            ((InMemoryLookupTable<T>) lookupTable).initNegative();
            syn1Neg = new DeviceLocalNDArray(((InMemoryLookupTable<T>) lookupTable).getSyn1Neg());
        }
    }

    if (batches.get() == null) {
        batches.set(new ArrayList<Aggregate>());
    }

    //log.info("VocabWords: {}; lastWordIndex: {}; syn1neg: {}", vocabCache.numWords(), lastWord.getIndex(), syn1Neg.get().rows());

    /*AggregateSkipGram sg = new AggregateSkipGram(syn0.get(), syn1.get(), syn1Neg.get(), expTable.get(), table.get(),
                    lastWord.getIndex(), idxSyn1, codes, (int) negative, target, vectorLength, alpha,
                    nextRandom.get(), vocabCache.numWords(), inferenceVector);
    if (!isInference) {
        batches.get().add(sg);
        if (batches.get().size() > 4096) {
            Nd4j.getExecutioner().exec(batches.get());
            batches.get().clear();
        }
    } else {
        Nd4j.getExecutioner().exec(sg);
    }*/

    nextRandom.set(Math.abs(nextRandom.get() * 25214903917L + 11));

    SkipGramRound sg = null;
    boolean useHS = configuration.isUseHierarchicSoftmax();
    boolean useNegative = configuration.getNegative() > 0;

    int[] intCodes = new int[codes.length];
    for (int i = 0; i < codes.length; ++i) {
        intCodes[i] = codes[i];
    }

    if (useHS && useNegative) {
        sg = new SkipGramRound(Nd4j.scalar(lastWord.getIndex()), Nd4j.scalar(target),
                syn0.get(), syn1.get(), syn1Neg.get(), expTable.get(),
                table.get(), (int) negative, Nd4j.create(idxSyn1), Nd4j.create(intCodes),
                Nd4j.scalar(alpha), Nd4j.scalar(nextRandom.get()),
                inferenceVector != null ? inferenceVector : Nd4j.empty(syn0.get().dataType()),
                configuration.isPreciseMode(), workers);
    }
    else if (useHS) {
        sg = new SkipGramRound(lastWord.getIndex(), syn0.get(), syn1.get(), expTable.get(),
                idxSyn1, codes,
                alpha, nextRandom.get(),
                inferenceVector != null ? inferenceVector : Nd4j.empty(syn0.get().dataType()));
    }
    else if (useNegative) {
        sg = new SkipGramRound(lastWord.getIndex(), target, syn0.get(), syn1Neg.get(), expTable.get(),
                table.get(), (int) negative,
                alpha, nextRandom.get(),
                inferenceVector != null ? inferenceVector : Nd4j.empty(syn0.get().dataType()));
    }

    Nd4j.getExecutioner().exec(sg);


    return score;
}
 
Example 14
Source File: MtcnnService.java    From mtcnn-java with Apache License 2.0 4 votes vote down vote up
/**
 *  STAGE 2
 *
 * @param image
 * @param totalBoxes
 * @param padResult
 * @return
 * @throws IOException
 */
private INDArray refinementStage(INDArray image, INDArray totalBoxes, MtcnnUtil.PadResult padResult) throws IOException {

	// num_boxes = total_boxes.shape[0]
	int numBoxes = totalBoxes.isEmpty() ? 0 : (int) totalBoxes.shape()[0];
	// if num_boxes == 0:
	//   return total_boxes, stage_status
	if (numBoxes == 0) {
		return totalBoxes;
	}

	INDArray tempImg1 = computeTempImage(image, numBoxes, padResult, 24);

	//this.refineNetGraph.associateArrayWithVariable(tempImg1, this.refineNetGraph.variableMap().get("rnet/input"));
	//List<DifferentialFunction> refineNetResults = this.refineNetGraph.exec().getRight();
	//INDArray out0 = refineNetResults.stream().filter(df -> df.getOwnName().equalsIgnoreCase("rnet/fc2-2/fc2-2"))
	//		.findFirst().get().outputVariable().getArr();
	//INDArray out1 = refineNetResults.stream().filter(df -> df.getOwnName().equalsIgnoreCase("rnet/prob1"))
	//		.findFirst().get().outputVariable().getArr();

	Map<String, INDArray> resultMap = this.refineNetGraphRunner.run(Collections.singletonMap("rnet/input", tempImg1));
	//INDArray out0 = resultMap.get("rnet/fc2-2/fc2-2");  // for ipazc/mtcnn model
	INDArray out0 = resultMap.get("rnet/conv5-2/conv5-2");
	INDArray out1 = resultMap.get("rnet/prob1");

	//  score = out1[1, :]
	INDArray score = out1.get(all(), point(1)).transposei();

	// ipass = np.where(score > self.__steps_threshold[1])
	INDArray ipass = MtcnnUtil.getIndexWhereVector(score.transpose(), s -> s > stepsThreshold[1]);
	//INDArray ipass = MtcnnUtil.getIndexWhereVector2(score.transpose(), Conditions.greaterThan(stepsThreshold[1]));

	if (ipass.isEmpty()) {
		totalBoxes = Nd4j.empty();
		return totalBoxes;
	}
	// total_boxes = np.hstack([total_boxes[ipass[0], 0:4].copy(), np.expand_dims(score[ipass].copy(), 1)])
	INDArray b1 = totalBoxes.get(new SpecifiedIndex(ipass.toLongVector()), interval(0, 4));
	INDArray b2 = ipass.isScalar() ? score.get(ipass).reshape(1, 1)
			: Nd4j.expandDims(score.get(ipass), 1);
	totalBoxes = Nd4j.hstack(b1, b2);

	// mv = out0[:, ipass[0]]
	INDArray mv = out0.get(new SpecifiedIndex(ipass.toLongVector()), all()).transposei();

	// if total_boxes.shape[0] > 0:
	if (!totalBoxes.isEmpty() && totalBoxes.shape()[0] > 0) {
		// pick = self.__nms(total_boxes, 0.7, 'Union')
		INDArray pick = MtcnnUtil.nonMaxSuppression(totalBoxes.dup(), 0.7, MtcnnUtil.NonMaxSuppressionType.Union).transpose();

		// total_boxes = total_boxes[pick, :]
		totalBoxes = totalBoxes.get(new SpecifiedIndex(pick.toLongVector()), all());

		// total_boxes = self.__bbreg(total_boxes.copy(), np.transpose(mv[:, pick]))
		totalBoxes = MtcnnUtil.bbreg(totalBoxes, mv.get(all(), new SpecifiedIndex(pick.toLongVector())).transpose());

		// total_boxes = self.__rerec(total_boxes.copy())
		totalBoxes = MtcnnUtil.rerec(totalBoxes, false);
	}

	return totalBoxes;
}
 
Example 15
Source File: CBOW.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public void iterateSample(T currentWord, int[] windowWords, boolean[] wordStatuses, AtomicLong nextRandom, double alpha,
                          boolean isInference, int numLabels, boolean trainWords, INDArray inferenceVector) {
    int[] idxSyn1 = null;
    byte[] codes = null;

    if (configuration.isUseHierarchicSoftmax()) {
        idxSyn1 = new int[currentWord.getCodeLength()];
        codes = new byte[currentWord.getCodeLength()];
        for (int p = 0; p < currentWord.getCodeLength(); p++) {
            if (currentWord.getPoints().get(p) < 0)
                continue;

            codes[p] = currentWord.getCodes().get(p);
            idxSyn1[p] = currentWord.getPoints().get(p);
        }
    } else {
        idxSyn1 = new int[0];
        codes = new byte[0];
    }


    if (negative > 0) {
        if (syn1Neg == null) {
            ((InMemoryLookupTable<T>) lookupTable).initNegative();
            syn1Neg = new DeviceLocalNDArray(((InMemoryLookupTable<T>) lookupTable).getSyn1Neg());
        }
    }

    if (batches.get() == null)
        batches.set(new ArrayList<Aggregate>());

    /*AggregateCBOW(syn0.get(), syn1.get(), syn1Neg.get(), expTable.get(), table.get(),
            currentWord.getIndex(), windowWords, idxSyn1, codes, (int) negative, currentWord.getIndex(),
            lookupTable.layerSize(), alpha, nextRandom.get(), vocabCache.numWords(), numLabels, trainWords,
            inferenceVector);*/

    boolean useHS = configuration.isUseHierarchicSoftmax();
    boolean useNegative = configuration.getNegative() > 0;

    int[] inputStatuses = new int[windowWords.length];
    for (int i = 0; i < windowWords.length; ++i) {
        if (i < wordStatuses.length)
            inputStatuses[i] = wordStatuses[i] ? 1 : 0;
        else
            inputStatuses[i] = -1;
    }
    INDArray wordsStatuses = Nd4j.createFromArray(inputStatuses);

    CbowRound cbow = null;

    if (useHS && useNegative) {
        cbow = new CbowRound(Nd4j.scalar(currentWord.getIndex()), Nd4j.createFromArray(windowWords),
                wordsStatuses,
                Nd4j.scalar(currentWord.getIndex()),
                syn0.get(), syn1.get(), syn1Neg.get(),
                expTable.get(), table.get(), Nd4j.createFromArray(idxSyn1), Nd4j.createFromArray(codes),
                (int)negative, Nd4j.scalar(alpha), Nd4j.scalar(nextRandom.get()),
                inferenceVector != null ? inferenceVector : Nd4j.empty(syn0.get().dataType()),
                Nd4j.empty(DataType.INT),
                trainWords,
                workers);
    }
    else if (useHS) {
        cbow = new CbowRound(currentWord.getIndex(), windowWords, wordsStatuses.toIntVector(),
                syn0.get(), syn1.get(),
                expTable.get(), idxSyn1, codes, alpha, nextRandom.get(),
                inferenceVector != null ? inferenceVector : Nd4j.empty(syn0.get().dataType()), 0);
    }
    else if (useNegative) {
        cbow = new CbowRound(currentWord.getIndex(), windowWords, wordsStatuses.toIntVector(), currentWord.getIndex(),
                syn0.get(), syn1Neg.get(),
                expTable.get(), table.get(), (int)negative, alpha, nextRandom.get(),
                inferenceVector != null ? inferenceVector : Nd4j.empty(syn0.get().dataType()), 0);
    }

    nextRandom.set(Math.abs(nextRandom.get() * 25214903917L + 11));
    Nd4j.getExecutioner().exec(cbow);

    /*if (!isInference) {
        batches.get().add(cbow);
        if (batches.get().size() > 4096) {
            Nd4j.getExecutioner().exec(batches.get());
            batches.get().clear();
        }
    } else
        Nd4j.getExecutioner().exec(cbow);*/

}
 
Example 16
Source File: MtcnnUtil.java    From mtcnn-java with Apache License 2.0 4 votes vote down vote up
/**
 * Use heatmap to generate bounding boxes.
 *
 * original code:
 *  - https://github.com/kpzhang93/MTCNN_face_detection_alignment/blob/master/code/codes/MTCNNv2/generateBoundingBox.m
 *  - https://github.com/davidsandberg/facenet/blob/master/src/align/detect_face.py#L660
 *
 * @param imap
 * @param reg
 * @param scale
 * @param stepThreshold
 * @return Returns the generated bboxes
 */
public static INDArray[] generateBoundingBox(INDArray imap, INDArray reg, double scale, double stepThreshold) {

	int stride = 2;
	int cellSize = 12;

	// imap = np.transpose(imap)
	// y, x = np.where(imap >= t)
	// imap = imap.transpose();
	INDArray bb = MtcnnUtil.getIndexWhereMatrix(imap, v -> v >= stepThreshold);
	//INDArray bb = MtcnnUtil.getIndexWhere3(imap, Conditions.greaterThanOrEqual(stepThreshold));

	if (bb.isEmpty()) {
		return new INDArray[] { Nd4j.empty(), Nd4j.empty() };
	}

	INDArray yx = bb.transpose();

	// TODO : implement the following code fragment
	//  if y.shape[0] == 1:
	//    dx1 = np.flipud(dx1)
	//    dy1 = np.flipud(dy1)
	//    dx2 = np.flipud(dx2)
	//    dy2 = np.flipud(dy2)
	if (yx.size(0) == 1) {
		throw new IllegalStateException("TODO");
	}

	//    q1 = np.fix((stride*bb+1)/scale)
	//    q2 = np.fix((stride*bb+cellsize-1+1)/scale)
	INDArray q1 = Transforms.floor(bb.mul(stride).add(1).div(scale));
	INDArray q2 = Transforms.floor(bb.mul(stride).add(cellSize).div(scale));

	//    dx1 = np.transpose(reg[:,:,0])
	//    dy1 = np.transpose(reg[:,:,1])
	//    dx2 = np.transpose(reg[:,:,2])
	//    dy2 = np.transpose(reg[:,:,3])
	INDArray dx1 = reg.get(all(), all(), point(0));
	INDArray dy1 = reg.get(all(), all(), point(1));
	INDArray dx2 = reg.get(all(), all(), point(2));
	INDArray dy2 = reg.get(all(), all(), point(3));

	// reg = np.transpose(np.vstack([ dx1[(y,x)], dy1[(y,x)], dx2[(y,x)], dy2[(y,x)] ]))
	INDArray outReg = Nd4j.vstack(dx1.get(yx), dy1.get(yx), dx2.get(yx), dy2.get(yx)).transpose();

	//  if reg.size == 0:
	//    reg = np.empty(shape=(0, 3))
	if (outReg.isEmpty()) {
		outReg = Nd4j.empty();
	}

	INDArray score = imap.get(yx).transpose();

	INDArray boundingBox = Nd4j.hstack(q1, q2, score, outReg);

	return new INDArray[] { boundingBox, outReg };
}
 
Example 17
Source File: MtcnnService.java    From mtcnn-java with Apache License 2.0 4 votes vote down vote up
private INDArray computeTempImage(INDArray image, int numBoxes, MtcnnUtil.PadResult padResult, int size) throws IOException {

		//  tempimg = np.zeros(shape=(size, size, 3, num_boxes))
		INDArray tempImg = Nd4j.zeros(new int[] { size, size, CHANNEL_COUNT, numBoxes }, C_ORDERING);

		opencv_core.Size newSize = new opencv_core.Size(size, size);

		for (int k = 0; k < numBoxes; k++) {
			//tmp = np.zeros((int(stage_status.tmph[k]), int(stage_status.tmpw[k]), 3))
			INDArray tmp = Nd4j.zeros(new int[] { padResult.getTmph().getInt(k), padResult.getTmpw().getInt(k), CHANNEL_COUNT }, C_ORDERING);

			// tmp[stage_status.dy[k] - 1:stage_status.edy[k], stage_status.dx[k] - 1:stage_status.edx[k], :] = \
			//   img[stage_status.y[k] - 1:stage_status.ey[k], stage_status.x[k] - 1:stage_status.ex[k], :]
			tmp.put(new INDArrayIndex[] {
							interval(padResult.getDy().getInt(k) - 1, padResult.getEdy().getInt(k)),
							interval(padResult.getDx().getInt(k) - 1, padResult.getEdx().getInt(k)),
							all() },
					image.get(
							interval(padResult.getY().getInt(k) - 1, padResult.getEy().getInt(k)),
							interval(padResult.getX().getInt(k) - 1, padResult.getEx().getInt(k)),
							all()));

			// if tmp.shape[0] > 0 and tmp.shape[1] > 0 or tmp.shape[0] == 0 and tmp.shape[1] == 0:
			//    tempimg[:, :, :, k] = cv2.resize(tmp, (size, size), interpolation=cv2.INTER_AREA)
			if ((tmp.shape()[0] > 0 && tmp.shape()[1] > 0) || (tmp.shape()[0] == 0 && tmp.shape()[1] == 0)) {

				INDArray resizedImage = resize(tmp.permutei(2, 0, 1).dup(), newSize)
						.get(point(0), all(), all(), all()).permutei(1, 2, 0).dup();

				tempImg.put(new INDArrayIndex[] { all(), all(), all(), point(k) }, resizedImage);
			}
			else {
				return Nd4j.empty();
			}
		}

		// tempimg = (tempimg - 127.5) * 0.0078125
		tempImg = tempImg.subi(127.5).muli(0.0078125);

		// tempimg1 = np.transpose(tempimg, (3, 1, 0, 2))
		INDArray tempImg1 = tempImg.permutei(3, 1, 0, 2).dup();

		return tempImg1;
	}
 
Example 18
Source File: CbowRound.java    From deeplearning4j with Apache License 2.0 2 votes vote down vote up
/**
 * ns round
 *
 * @param target
 * @param context
 * @param ngStarter
 * @param syn0
 * @param syn1Neg
 * @param expTable
 * @param negTable
 * @param alpha
 * @param nextRandom
 * @param inferenceVector
 */
public CbowRound(int target, @NonNull int[] context, @NonNull int[] lockedWords, int ngStarter, @NonNull INDArray syn0, @NonNull INDArray syn1Neg, @NonNull INDArray expTable, @NonNull INDArray negTable, int nsRounds, double alpha, long nextRandom, @NonNull INDArray inferenceVector, int numLabels) {
    this(Nd4j.scalar(target), Nd4j.createFromArray(context), Nd4j.createFromArray(lockedWords), Nd4j.scalar(ngStarter), syn0, Nd4j.empty(syn0.dataType()), syn1Neg, expTable, negTable, Nd4j.empty(DataType.INT), Nd4j.empty(DataType.BYTE), nsRounds, Nd4j.scalar(alpha), Nd4j.scalar(nextRandom), inferenceVector, Nd4j.scalar(numLabels), inferenceVector.isEmpty(), 1);
}
 
Example 19
Source File: SkipGramRound.java    From deeplearning4j with Apache License 2.0 2 votes vote down vote up
/**
 * sg hs round
 *
 * @param target
 * @param syn0
 * @param syn1
 * @param expTable
 * @param indices
 * @param codes
 * @param alpha
 * @param randomValue
 */
public SkipGramRound(int target, @NonNull INDArray syn0, @NonNull INDArray syn1, @NonNull INDArray expTable, int[] indices, byte[] codes, double alpha, long randomValue, INDArray inferenceVector) {
    this(Nd4j.scalar(target), Nd4j.scalar(-1), syn0, syn1, Nd4j.empty(syn1.dataType()), expTable, Nd4j.empty(syn1.dataType()), 0, Nd4j.createFromArray(indices), Nd4j.createFromArray(codes), Nd4j.scalar(alpha), Nd4j.scalar(randomValue), inferenceVector, false, 1);
}
 
Example 20
Source File: SkipGramRound.java    From deeplearning4j with Apache License 2.0 2 votes vote down vote up
/**
 * sg ns round
 *
 * @param target
 * @param ngStarter
 * @param syn0
 * @param syn1Neg
 * @param expTable
 * @param negTable
 */
public SkipGramRound(int target, int ngStarter, @NonNull INDArray syn0, @NonNull INDArray syn1Neg, @NonNull INDArray expTable, @NonNull INDArray negTable, int nsRounds, double alpha, long randomValue, INDArray inferenceVector) {
    this(Nd4j.scalar(target), Nd4j.scalar(ngStarter), syn0, Nd4j.empty(syn0.dataType()), syn1Neg, expTable, negTable, nsRounds, Nd4j.empty(DataType.INT), Nd4j.empty(DataType.BYTE), Nd4j.scalar(alpha), Nd4j.scalar(randomValue), inferenceVector, false, 1);
}