org.opencv.video.Video Java Examples

The following examples show how to use org.opencv.video.Video. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BackgroundSubtractor.java    From go-bees with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Get the instance of BackgroundSubtractorMOG2 with the desired configuration.
 *
 * @param history         the number of frames to consider in the background model.
 * @param shadowThreshold the threshold to consider a pixel as shadow or not.
 * @return instance of BackgroundSubtractorMOG2.
 */
private BackgroundSubtractorMOG2 getMogInstance(int history, double shadowThreshold) {
    BackgroundSubtractorMOG2 instance =
            Video.createBackgroundSubtractorMOG2(history, VAR_THRESHOLD, DETECT_SHADOWS);
    instance.setBackgroundRatio(BACKGROUND_RATIO);
    instance.setVarInit(VAR_INIT);
    instance.setShadowThreshold(shadowThreshold);
    return instance;
}
 
Example #2
Source File: LKTracker.java    From OpenTLDAndroid with Apache License 2.0 5 votes vote down vote up
/**
 * @return Pair of new, FILTERED, last and current POINTS, or null if it hasn't managed to track anything.
 */
Pair<Point[], Point[]> track(final Mat lastImg, final Mat currentImg, Point[] lastPoints){
	final int size = lastPoints.length;
	final MatOfPoint2f currentPointsMat = new MatOfPoint2f();
	final MatOfPoint2f pointsFBMat = new MatOfPoint2f();
	final MatOfByte statusMat = new MatOfByte();
	final MatOfFloat errSimilarityMat = new MatOfFloat();
	final MatOfByte statusFBMat = new MatOfByte();
	final MatOfFloat errSimilarityFBMat = new MatOfFloat();
	
	//Forward-Backward tracking
	Video.calcOpticalFlowPyrLK(lastImg, currentImg, new MatOfPoint2f(lastPoints), currentPointsMat, 
			statusMat, errSimilarityMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA);
	Video.calcOpticalFlowPyrLK(currentImg, lastImg, currentPointsMat, pointsFBMat, 
			statusFBMat, errSimilarityFBMat, WINDOW_SIZE, MAX_LEVEL, termCriteria, 0, LAMBDA);
	
	final byte[] status = statusMat.toArray();
	float[] errSimilarity = new float[lastPoints.length]; 
	//final byte[] statusFB = statusFBMat.toArray();
	final float[] errSimilarityFB = errSimilarityFBMat.toArray();	
	
	// compute the real FB error (relative to LAST points not the current ones...
	final Point[] pointsFB = pointsFBMat.toArray();
	for(int i = 0; i < size; i++){
		errSimilarityFB[i] = Util.norm(pointsFB[i], lastPoints[i]);
	}
	
	final Point[] currPoints = currentPointsMat.toArray();
	// compute real similarity error
	errSimilarity = normCrossCorrelation(lastImg, currentImg, lastPoints, currPoints, status);
	
	
	//TODO  errSimilarityFB has problem != from C++
	// filter out points with fwd-back error > the median AND points with similarity error > median
	return filterPts(lastPoints, currPoints, errSimilarity, errSimilarityFB, status);
}
 
Example #3
Source File: MainActivity.java    From MOAAP with MIT License 4 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
        final int viewMode = mViewMode;
        switch (viewMode) {
            case VIEW_MODE_OPTICAL_FLOW:
                mGray = inputFrame.gray();
                if(features.toArray().length==0){
                    int rowStep = 50, colStep = 100;
                    int nRows = mGray.rows()/rowStep, nCols = mGray.cols()/colStep;

//                    Log.d(TAG, "\nRows: "+nRows+"\nCols: "+nCols+"\n");

                    Point points[] = new Point[nRows*nCols];
                    for(int i=0; i<nRows; i++){
                        for(int j=0; j<nCols; j++){
                            points[i*nCols+j]=new Point(j*colStep, i*rowStep);
//                            Log.d(TAG, "\nRow: "+i*rowStep+"\nCol: "+j*colStep+"\n: ");
                        }
                    }

                    features.fromArray(points);

                    prevFeatures.fromList(features.toList());
                    mPrevGray = mGray.clone();
                    break;
                }

                nextFeatures.fromArray(prevFeatures.toArray());
                Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err);

                List<Point> prevList=features.toList(), nextList=nextFeatures.toList();
                Scalar color = new Scalar(255);

                for(int i = 0; i<prevList.size(); i++){
//                    Core.circle(mGray, prevList.get(i), 5, color);
                    Imgproc.line(mGray, prevList.get(i), nextList.get(i), color);
                }

                mPrevGray = mGray.clone();
                break;
            case VIEW_MODE_KLT_TRACKER:
                mGray = inputFrame.gray();

                if(features.toArray().length==0){
                    Imgproc.goodFeaturesToTrack(mGray, features, 10, 0.01, 10);
                    Log.d(TAG, features.toList().size()+"");
                    prevFeatures.fromList(features.toList());
                    mPrevGray = mGray.clone();
//                    prevFeatures.fromList(nextFeatures.toList());
                    break;
                }

//                OpticalFlow(mPrevGray.getNativeObjAddr(), mGray.getNativeObjAddr(), prevFeatures.getNativeObjAddr(), nextFeatures.getNativeObjAddr());
                Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err);
                List<Point> drawFeature = nextFeatures.toList();
//                Log.d(TAG, drawFeature.size()+"");
                for(int i = 0; i<drawFeature.size(); i++){
                    Point p = drawFeature.get(i);
                    Imgproc.circle(mGray, p, 5, new Scalar(255));
                }
                mPrevGray = mGray.clone();
                prevFeatures.fromList(nextFeatures.toList());
                break;
            default: mViewMode = VIEW_MODE_KLT_TRACKER;
        }

        return mGray;
    }
 
Example #4
Source File: OpticalFlowOp.java    From StormCV with Apache License 2.0 4 votes vote down vote up
@Override
public List<CVParticle> execute(List<CVParticle> input) throws Exception {
	List<CVParticle> result = new ArrayList<CVParticle>();
	if(input.size() != 2 || !(input.get(0) instanceof Frame) || !(input.get(1) instanceof Frame))
		return result;
	
	Frame frame1 = (Frame)input.get(0);
	Frame frame2 = (Frame)input.get(1);
	
	MatOfByte mob1 = new MatOfByte(frame1.getImageBytes());
	Mat image1 = Highgui.imdecode(mob1, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
	Mat image1Gray = new Mat( image1.size(), CvType.CV_8UC1 );
	Imgproc.cvtColor( image1, image1Gray, Imgproc.COLOR_RGB2GRAY );
	
	MatOfByte mob2 = new MatOfByte(frame2.getImageBytes());
	Mat image2 = Highgui.imdecode(mob2, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
	Mat image2Gray = new Mat( image2.size(), CvType.CV_8UC1 );
	Imgproc.cvtColor( image2, image2Gray, Imgproc.COLOR_RGB2GRAY );
	
	Mat opticalFlow = new Mat( image1Gray.size(), CvType.CV_32FC2 );
	Video.calcOpticalFlowFarneback( image1Gray, image2Gray, opticalFlow, 0.5, 1, 1, 1, 7, 1.5, 1 );
	
	int cols = opticalFlow.cols();
	int rows = opticalFlow.rows();
	int maxz = opticalFlow.get(0,0).length;
	float[] tmp = new float[maxz];
	float[][][] dense = new float[cols][rows][maxz];
	for(int y=0; y<opticalFlow.rows(); y++){
		for(int x=0; x<opticalFlow.cols(); x++){
			opticalFlow.get(y,x, tmp);
			dense[x][y][0] = tmp[0];
			dense[x][y][1] = tmp[1];
		}
	}
	
	Feature feature = new Feature(frame1.getStreamId(), frame1.getSequenceNr(), name, frame2.getSequenceNr()-frame1.getSequenceNr(), null, dense);
	if(outputFrame){
		frame1.getFeatures().add(feature);
		result.add(frame1);
	}else{
		result.add(feature);
	}

	return result;
}