Java Code Examples for org.bytedeco.javacpp.IntPointer#get()

The following examples show how to use org.bytedeco.javacpp.IntPointer#get() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: EagerOperationBuilder.java    From java with Apache License 2.0 6 votes vote down vote up
private static TFE_TensorHandle[] execute(TFE_Op opHandle, EagerSession session) {
  requireOp(opHandle);
  try (PointerScope scope = new PointerScope()) {
    IntPointer numRetvals = new IntPointer(1).put(MAX_OUTPUTS_PER_OP);
    PointerPointer<TFE_TensorHandle> retvals = new PointerPointer<TFE_TensorHandle>(MAX_OUTPUTS_PER_OP);
    TF_Status status = TF_Status.newStatus();
    TFE_Execute(opHandle, retvals, numRetvals, status);
    status.throwExceptionIfNotOK();

    TFE_TensorHandle[] rethandles = new TFE_TensorHandle[numRetvals.get()];
    for (int i = 0; i < rethandles.length; ++i) {
      rethandles[i] = retvals.get(TFE_TensorHandle.class, i).withDeallocator();
      session.attach(rethandles[i]);
    }
    return rethandles;
  }
}
 
Example 2
Source File: IntIndexer.java    From tapir with MIT License 6 votes vote down vote up
/**
 * Creates a int indexer to access efficiently the data of a pointer.
 *
 * @param pointer data to access via a buffer or to copy to an array
 * @param direct {@code true} to use a direct buffer, see {@link Indexer} for details
 * @return the new int array backed by a buffer or an array
 */
public static IntIndexer create(final IntPointer pointer, int[] sizes, int[] strides, boolean direct) {
    if (direct) {
        return new IntBufferIndexer(pointer.asBuffer(), sizes, strides);
    } else {
        final int position = pointer.position();
        int[] array = new int[pointer.limit() - position];
        pointer.get(array);
        return new IntArrayIndexer(array, sizes, strides) {
            @Override public void release() {
                pointer.position(position).put(array);
                super.release();
            }
        };
    }
}
 
Example 3
Source File: JcublasLevel1.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Override
protected int isamax(long N, INDArray X, int incX) {
    if (Nd4j.dataType() != DataBuffer.Type.FLOAT)
        logger.warn("FLOAT iamax called");

    Nd4j.getExecutioner().push();

    CudaContext ctx = allocator.getFlowController().prepareAction(null, X);
    int ret2;

    CublasPointer xCPointer = new CublasPointer(X, ctx);

    cublasHandle_t handle = ctx.getHandle();
    synchronized (handle) {
        cublasSetStream_v2(new cublasContext(handle), new CUstream_st(ctx.getOldStream()));

        IntPointer resultPointer = new IntPointer(new int[] {0});
        cublasIsamax_v2(new cublasContext(handle), (int) N, (FloatPointer) xCPointer.getDevicePointer(), incX,
                        resultPointer);
        ret2 = resultPointer.get();
    }
    allocator.registerAction(ctx, null, X);

    return ret2 - 1;
}
 
Example 4
Source File: JcublasLevel1.java    From nd4j with Apache License 2.0 6 votes vote down vote up
@Override
protected int idamax(long N, INDArray X, int incX) {
    if (Nd4j.dataType() != DataBuffer.Type.DOUBLE)
        logger.warn("DOUBLE imax called");

    Nd4j.getExecutioner().push();

    CudaContext ctx = allocator.getFlowController().prepareAction(null, X);
    int ret2;

    CublasPointer xCPointer = new CublasPointer(X, ctx);

    cublasHandle_t handle = ctx.getHandle();
    synchronized (handle) {
        cublasSetStream_v2(new cublasContext(handle), new CUstream_st(ctx.getOldStream()));

        IntPointer resultPointer = new IntPointer(new int[] {0});
        cublasIdamax_v2(new cublasContext(handle), (int) N, (DoublePointer) xCPointer.getDevicePointer(), incX,
                        resultPointer);
        ret2 = resultPointer.get();
    }

    allocator.registerAction(ctx, null, X);

    return ret2 - 1;
}
 
Example 5
Source File: ALEMDP.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
public ALEMDP(String romFile, boolean render, Configuration configuration) {
    this.romFile = romFile;
    this.configuration = configuration;
    this.render = render;
    ale = new ALEInterface();
    setupGame();

    // Get the vector of minimal or legal actions
    IntPointer a = (getConfiguration().minimalActionSet ? ale.getMinimalActionSet()
                    : ale.getLegalActionSet());
    actions = new int[(int)a.limit()];
    a.get(actions);

    int height = (int)ale.getScreen().height();
    int width = (int)(int)ale.getScreen().width();

    discreteSpace = new DiscreteSpace(actions.length);
    int[] shape = {3, height, width};
    observationSpace = new ArrayObservationSpace<>(shape);
    screenBuffer = new byte[shape[0] * shape[1] * shape[2]];

}
 
Example 6
Source File: Codec.java    From JavaAV with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Get all supported sample formats by this {@code Codec}. If this {@code Codec}
 * is not an audio codec, then {@code null} is returned.
 *
 * @return all supported sample formats by this {@code Codec}.
 */
public SampleFormat[] getSupportedSampleFormats() {
	IntPointer sampleFormatsPointer = avCodec.sample_fmts();

	if (getType() != MediaType.AUDIO || sampleFormatsPointer == null)
		return null;

	List<SampleFormat> sampleFormats = new ArrayList<SampleFormat>();

	int format;
	int index = 0;
	while ((format = sampleFormatsPointer.get(index++)) != -1)
		sampleFormats.add(SampleFormat.byId(format));

	return sampleFormats.toArray(new SampleFormat[0]);
}
 
Example 7
Source File: Codec.java    From JavaAV with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Get all supported sample rates by this {@code Codec}. If this {@code Codec}
 * is not an audio codec, then {@code null} is returned. The sample rates are
 * ordered in ascending order.
 *
 * @return all supported sample rates by this {@code Codec}.
 */
public Integer[] getSupportedSampleRates() {
	IntPointer sampleRatesPointer = avCodec.supported_samplerates();

	if (getType() != MediaType.AUDIO || sampleRatesPointer == null)
		return null;

	List<Integer> sampleRates = new ArrayList<Integer>();

	int sampleRate;
	int index = 0;
	while ((sampleRate = sampleRatesPointer.get(index++)) != 0)
		sampleRates.add(sampleRate);

	// ascending order
	Collections.sort(sampleRates);

	return sampleRates.toArray(new Integer[0]);
}
 
Example 8
Source File: Codec.java    From JavaAV with GNU General Public License v2.0 6 votes vote down vote up
/**
 * Get all supported pixel formats by this {@code Codec}. If this {@code Codec}
 * is not a video codec, then {@code null} is returned. The pixel formats are
 * ordered in ascending order.
 *
 * @return all supported pixel formats by this {@code Codec}.
 */
public PixelFormat[] getSupportedPixelFormats() {
	IntPointer formatsPointer = avCodec.pix_fmts();

	if (getType() != MediaType.VIDEO || formatsPointer == null)
		return null;

	List<PixelFormat> pixelFormats = new ArrayList<PixelFormat>();

	int format;
	int index = 0;
	while ((format = formatsPointer.get(index++)) != -1)
		pixelFormats.add(PixelFormat.byId(format));

	// ascending order
	Collections.sort(pixelFormats);

	return pixelFormats.toArray(new PixelFormat[0]);
}
 
Example 9
Source File: DetectedMarker.java    From PapARt with GNU Lesser General Public License v3.0 4 votes vote down vote up
public static DetectedMarker[] detect(ARToolKitPlus.TrackerMultiMarker tracker, opencv_core.IplImage image) {

        int cameraWidth = image.width();
        int cameraHeight = image.height();
        // TODO: check imgWith and init width.

        CvPoint2D32f corners = new CvPoint2D32f(4);
        CvMemStorage memory = CvMemStorage.create();
//        CvMat points = CvMat.create(1, 4, CV_32F, 2);
        Mat points = new Mat(1, 4, CV_32F, 2);

        CvSize subPixelSize = null, subPixelZeroZone = null;
        CvTermCriteria subPixelTermCriteria = null;
        int subPixelWindow = 11;

        subPixelSize = cvSize(subPixelWindow / 2, subPixelWindow / 2);
        subPixelZeroZone = cvSize(-1, -1);
        subPixelTermCriteria = cvTermCriteria(CV_TERMCRIT_EPS, 100, 0.001);

//        tracker.setThreshold(128);
        int n = 0;
        IntPointer markerNum = new IntPointer(1);
        ARToolKitPlus.ARMarkerInfo markers = new ARToolKitPlus.ARMarkerInfo(null);
//        tracker.arDetectMarkerLite(image.imageData(), tracker.getThreshold() /* 100 */, markers, markerNum);
        tracker.arDetectMarker(image.imageData(), tracker.getThreshold() /* 100 */, markers, markerNum);
        DetectedMarker[] markers2 = new DetectedMarker[markerNum.get(0)];

        for (int i = 0; i < markers2.length && !markers.isNull(); i++) {

            markers.position(i);
            int id = markers.id();
            if (id < 0) {
                // no detected ID...
                continue;
            }
            int dir = markers.dir();
            float confidence = markers.cf();
            float[] vertex = new float[8];
            markers.vertex().get(vertex);

            int w = subPixelWindow / 2 + 1;
            if (vertex[0] - w < 0 || vertex[0] + w >= cameraWidth || vertex[1] - w < 0 || vertex[1] + w >= cameraHeight
                    || vertex[2] - w < 0 || vertex[2] + w >= cameraWidth || vertex[3] - w < 0 || vertex[3] + w >= cameraHeight
                    || vertex[4] - w < 0 || vertex[4] + w >= cameraWidth || vertex[5] - w < 0 || vertex[5] + w >= cameraHeight
                    || vertex[6] - w < 0 || vertex[6] + w >= cameraWidth || vertex[7] - w < 0 || vertex[7] + w >= cameraHeight) {
                // too tight for cvFindCornerSubPix...

                continue;
            }

            // TODO: major bug here -> free error...
//            opencv_core.CvMat points = opencv_core.CvMat.create(1, 4, CV_32F, 2);
//            points.getFloatBuffer().put(vertex);
//            opencv_core.CvBox2D box = cvMinAreaRect2(points, memory);
//
//            float bw = box.size().width();
//            float bh = box.size().height();
//            cvClearMemStorage(memory);
//            if (bw <= 0 || bh <= 0 || bw / bh < 0.1 || bw / bh > 10) {
//                // marker is too "flat" to have been IDed correctly...
//                continue;
//            }
            for (int j = 0; j < 4; j++) {
                corners.position(j).put(vertex[2 * j], vertex[2 * j + 1]);
            }

            cvFindCornerSubPix(image, corners.position(0), 4, subPixelSize, subPixelZeroZone, subPixelTermCriteria);
            double[] d = {corners.position((4 - dir) % 4).x(), corners.position((4 - dir) % 4).y(),
                corners.position((5 - dir) % 4).x(), corners.position((5 - dir) % 4).y(),
                corners.position((6 - dir) % 4).x(), corners.position((6 - dir) % 4).y(),
                corners.position((7 - dir) % 4).x(), corners.position((7 - dir) % 4).y()};

            markers2[n++] = new DetectedMarker(id, d, confidence);
        }
        return Arrays.copyOf(markers2, n);
    }
 
Example 10
Source File: FaceRecognizer.java    From ExoVisix with MIT License 3 votes vote down vote up
public int recognize(IplImage faceData) {

		Mat faces = cvarrToMat(faceData);

		cvtColor(faces, faces, CV_BGR2GRAY);

		IntPointer label = new IntPointer(1);
		DoublePointer confidence = new DoublePointer(0);
		
 
		this.faceRecognizer.predict(faces, label, confidence);
		
		 
		int predictedLabel = label.get(0);
			
	 
		 
		//System.out.println(confidence.get(0));
		
 
	
		//Confidence value less than 60 means face is known 
		//Confidence value greater than 60 means face is unknown 
		 if(confidence.get(0) > 60)
		 {
			 //System.out.println("-1");
			 return -1;
		 }

		return predictedLabel;

	}