org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame Java Examples

The following examples show how to use org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ColorBlobDetectionActivity.java    From OpenCV-AndroidSamples with MIT License 7 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();

    if (mIsColorSelected) {
        mDetector.process(mRgba);
        List<MatOfPoint> contours = mDetector.getContours();
        Log.e(TAG, "Contours count: " + contours.size());
        Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);

        Mat colorLabel = mRgba.submat(4, 68, 4, 68);
        colorLabel.setTo(mBlobColorRgba);

        Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
        mSpectrum.copyTo(spectrumLabel);
    }

    return mRgba;
}
 
Example #2
Source File: MainActivity.java    From open-quartz with Apache License 2.0 6 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    switch (MainActivity.viewMode) {
        case MainActivity.VIEW_MODE_RGBA:
            return mRgba;

        case MainActivity.VIEW_MODE_HIST:
            return mRgba;

        case MainActivity.VIEW_MODE_CANNY:
            Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
            Imgproc.cvtColor(mIntermediateMat, mGray, Imgproc.COLOR_GRAY2BGRA, 4);
            return mGray;

        case MainActivity.VIEW_MODE_SOBEL:
            Imgproc.Sobel(mGray, mGray, CvType.CV_8U, 1, 1);
            //			Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
            Imgproc.cvtColor(mGray, mGray, Imgproc.COLOR_GRAY2BGRA, 4);
            return mGray;

        case MainActivity.VIEW_MODE_PIXELIZE:
            Imgproc.resize(mGray, mIntermediateMat, mSize0, 0.1, 0.1,
                Imgproc.INTER_NEAREST);
            Imgproc.resize(mIntermediateMat, mRgba, mRgba.size(), 0.0, 0.0,
                Imgproc.INTER_NEAREST);
            return mRgba;

        case MainActivity.VIEW_MODE_GRAY:
            return mGray;

        case MainActivity.VIEW_MODE_FEATURES:
            FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr());
            return mRgba;

        default:
            return mRgba;
    }
}
 
Example #3
Source File: MainActivity.java    From Android_OCV_Movement_Detection with MIT License 6 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) 
{   
	contours.clear();
	//gray frame because it requires less resource to process
	mGray = inputFrame.gray(); 
	
	//this function converts the gray frame into the correct RGB format for the BackgroundSubtractorMOG apply function
	Imgproc.cvtColor(mGray, mRgb, Imgproc.COLOR_GRAY2RGB); 
	
	//apply detects objects moving and produces a foreground mask
	//the lRate updates dynamically dependent upon seekbar changes
	sub.apply(mRgb, mFGMask, lRate); 

	//erode and dilate are used  to remove noise from the foreground mask
	Imgproc.erode(mFGMask, mFGMask, new Mat());
	Imgproc.dilate(mFGMask, mFGMask, new Mat());
	
	//drawing contours around the objects by first called findContours and then calling drawContours
	//RETR_EXTERNAL retrieves only external contours
	//CHAIN_APPROX_NONE detects all pixels for each contour
	Imgproc.findContours(mFGMask, contours, new Mat(), Imgproc.RETR_EXTERNAL , Imgproc.CHAIN_APPROX_NONE);
	
	//draws all the contours in red with thickness of 2
	Imgproc.drawContours(mRgb, contours, -1, new Scalar(255, 0, 0), 2);
	
	return mRgb;
}
 
Example #4
Source File: Tutorial2Activity.java    From OpenCV-AndroidSamples with MIT License 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    final int viewMode = mViewMode;
    switch (viewMode) {
        case VIEW_MODE_GRAY:
            // input frame has gray scale format
            Imgproc.cvtColor(inputFrame.gray(), mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
            break;
        case VIEW_MODE_RGBA:
            // input frame has RBGA format
            mRgba = inputFrame.rgba();
            break;
        case VIEW_MODE_CANNY:
            // input frame has gray scale format
            mRgba = inputFrame.rgba();
            Imgproc.Canny(inputFrame.gray(), mIntermediateMat, 80, 100);
            Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
            break;
        case VIEW_MODE_FEATURES:
            // input frame has RGBA format
            mRgba = inputFrame.rgba();
            mGray = inputFrame.gray();
            FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr());
            break;
    }

    return mRgba;
}
 
Example #5
Source File: FdActivity.java    From open-quartz with Apache License 2.0 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    if (mAbsoluteFaceSize == 0) {
        int height = mGray.rows();
        if (Math.round(height * mRelativeFaceSize) > 0) {
            mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
        }
    }

    MatOfRect faces = new MatOfRect();

    if (mJavaDetector != null) {
        mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2,
            // TODO: objdetect.CV_HAAR_SCALE_IMAGE
            new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
    }

    // Draw rectangles
    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++) {
        Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
    }

    return mRgba;
}
 
Example #6
Source File: FaceDetectionActivity.java    From AndroidFaceRecognizer with MIT License 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        
        mGray = inputFrame.gray();
        
        if(!detectionInProgress){
        	Mat image = new Mat(mGray.rows(), mGray.cols(), mGray.type());
        	mGray.copyTo(image);
        	detectFaceOnFrame(image);
        }
        
        return mRgba;
    }
 
Example #7
Source File: FaceDetectionActivity.java    From OpenCV-AndroidSamples with MIT License 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
        }
        else if (mDetectorType == NATIVE_DETECTOR) {
            if (mNativeDetector != null)
                mNativeDetector.detect(mGray, faces);
        }
        else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++)
            Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

        return mRgba;
    }
 
Example #8
Source File: MainActivity.java    From Form-N-Fun with MIT License 5 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba=inputFrame.rgba();
    if (Build.MODEL.equalsIgnoreCase("Nexus 5X")) //flip the frame on nexus5x
        Core.flip(mRgba, mRgba,-1);
    findmazesandballs.apply(mRgba); //process frames
    return mRgba;
}
 
Example #9
Source File: MonitoringPresenter.java    From go-bees with GNU General Public License v3.0 5 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    if(showAlgoOutput) {
        int numBees = bc.countBees(inputFrame.gray());
        view.setNumBees(numBees);
        bc.getProcessedFrame().copyTo(processedFrame);
        bc.getProcessedFrame().release();
        return processedFrame;
    }
    // If show algorithm output is false -> show original frame
    return inputFrame.rgba();
}
 
Example #10
Source File: MainActivity.java    From MOAAP with MIT License 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        //Get image size and draw a rectangle on the image for reference
        Mat temp = inputFrame.rgba();
        Imgproc.rectangle(temp, new Point(temp.cols()/2 - 200, temp.rows() / 2 - 200), new Point(temp.cols() / 2 + 200, temp.rows() / 2 + 200), new Scalar(255,255,255),1);
        Mat digit = temp.submat(temp.rows()/2 - 180, temp.rows() / 2 + 180, temp.cols() / 2 - 180, temp.cols() / 2 + 180).clone();
        Core.transpose(digit,digit);
        int predict_result = mnist.FindMatch(digit);
        Imgproc.putText(temp, Integer.toString(predict_result), new Point(50, 150), FONT_HERSHEY_SIMPLEX, 3.0, new Scalar(0, 0, 255), 5);

        return temp;
    }
 
Example #11
Source File: CameraActivity.java    From Android-Car-duino with GNU General Public License v2.0 4 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    return driver.processImage(inputFrame.rgba());
}
 
Example #12
Source File: MainActivity.java    From ResistorScanner with MIT License 4 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    return _resistorProcessor.processFrame(inputFrame);
}
 
Example #13
Source File: ResistorImageProcessor.java    From ResistorScanner with MIT License 4 votes vote down vote up
public Mat processFrame(CvCameraViewFrame frame)
{
    Mat imageMat = frame.rgba();
    int cols = imageMat.cols();
    int rows = imageMat.rows();

    Mat subMat = imageMat.submat(rows/2, rows/2+30, cols/2 - 50, cols/2 + 50);
    Mat filteredMat = new Mat();
    Imgproc.cvtColor(subMat, subMat, Imgproc.COLOR_RGBA2BGR);
    Imgproc.bilateralFilter(subMat, filteredMat, 5, 80, 80);
    Imgproc.cvtColor(filteredMat, filteredMat, Imgproc.COLOR_BGR2HSV);

    findLocations(filteredMat);

    if(_locationValues.size() >= 3)
    {
        // recover the resistor value by iterating through the centroid locations
        // in an ascending manner and using their associated colour values
        int k_tens = _locationValues.keyAt(0);
        int k_units = _locationValues.keyAt(1);
        int k_power = _locationValues.keyAt(2);

        int value = 10*_locationValues.get(k_tens) + _locationValues.get(k_units);
        value *= Math.pow(10, _locationValues.get(k_power));

        String valueStr;
        if(value >= 1e3 && value < 1e6)
            valueStr = String.valueOf(value/1e3) + " KOhm";
        else if(value >= 1e6)
            valueStr = String.valueOf(value/1e6) + " MOhm";
        else
            valueStr = String.valueOf(value) + " Ohm";

        if(value <= 1e9)
            Core.putText(imageMat, valueStr, new Point(10, 100), Core.FONT_HERSHEY_COMPLEX,
                         2, new Scalar(255, 0, 0, 255), 3);
    }

    Scalar color = new Scalar(255, 0, 0, 255);
    Core.line(imageMat, new Point(cols/2 - 50, rows/2), new Point(cols/2 + 50, rows/2 ), color, 2);
    return imageMat;
}
 
Example #14
Source File: OnCameraFrameRender.java    From OpenCV-AndroidSamples with MIT License 4 votes vote down vote up
public Mat render(CvCameraViewFrame inputFrame) {
    return mFrameRender.render(inputFrame);
}
 
Example #15
Source File: CameraActivity.java    From AndroidObjectDetection-OpenCV with MIT License 4 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    Mat frame = inputFrame.rgba();
    Imgproc.cvtColor(frame, frame, Imgproc.COLOR_RGBA2RGB);
    Size frame_size = new Size(416, 416);
    Scalar mean = new Scalar(127.5);

    Mat blob = Dnn.blobFromImage(frame, 1.0 / 255.0, frame_size, mean, true, false);
    //save_mat(blob);
    net.setInput(blob);

    List<Mat> result = new ArrayList<>();
    List<String> outBlobNames = net.getUnconnectedOutLayersNames();

    net.forward(result, outBlobNames);
    float confThreshold = 0.5f;

    for (int i = 0; i < result.size(); ++i) {
        // each row is a candidate detection, the 1st 4 numbers are
        // [center_x, center_y, width, height], followed by (N-4) class probabilities
        Mat level = result.get(i);
        for (int j = 0; j < level.rows(); ++j) {
            Mat row = level.row(j);
            Mat scores = row.colRange(5, level.cols());
            Core.MinMaxLocResult mm = Core.minMaxLoc(scores);
            float confidence = (float) mm.maxVal;
            Point classIdPoint = mm.maxLoc;
            if (confidence > confThreshold) {

                int centerX = (int) (row.get(0, 0)[0] * frame.cols());
                int centerY = (int) (row.get(0, 1)[0] * frame.rows());
                int width = (int) (row.get(0, 2)[0] * frame.cols());
                int height = (int) (row.get(0, 3)[0] * frame.rows());

                int left = (int) (centerX - width * 0.5);
                int top =(int)(centerY - height * 0.5);
                int right =(int)(centerX + width * 0.5);
                int bottom =(int)(centerY + height * 0.5);

                Point left_top = new Point(left, top);
                Point right_bottom=new Point(right, bottom);
                Point label_left_top = new Point(left, top-5);
                DecimalFormat df = new DecimalFormat("#.##");

                int class_id = (int) classIdPoint.x;
                String label= classNames.get(class_id) + ": " + df.format(confidence);
                Scalar color= colors.get(class_id);

                Imgproc.rectangle(frame, left_top,right_bottom , color, 3, 2);
                Imgproc.putText(frame, label, label_left_top, Imgproc.FONT_HERSHEY_SIMPLEX, 1, new Scalar(0, 0, 0), 4);
                Imgproc.putText(frame, label, label_left_top, Imgproc.FONT_HERSHEY_SIMPLEX, 1, new Scalar(255, 255, 255), 2);
            }
        }
    }
    return frame;
}
 
Example #16
Source File: CameraCalibrationActivity.java    From OpenCV-AndroidSamples with MIT License 4 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    return mOnCameraFrameRender.render(inputFrame);
}
 
Example #17
Source File: Tutorial1Activity.java    From OpenCV-AndroidSamples with MIT License 4 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    return inputFrame.rgba();
}
 
Example #18
Source File: Tutorial3Activity.java    From OpenCV-AndroidSamples with MIT License 4 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    return inputFrame.rgba();
}
 
Example #19
Source File: ShowCameraViewActivity.java    From FaceT with Mozilla Public License 2.0 4 votes vote down vote up
@Override
    public Mat onCameraFrame(CvCameraViewFrame cvCameraViewFrame) {

        mRgba = cvCameraViewFrame.rgba();
        mGray = cvCameraViewFrame.gray();


        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
        }


        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null) {
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());

            }
        } else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++) {
            Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
            face_middle_x = (int) facesArray[i].tl().x + facesArray[i].width / 2;
            face_middle_y = (int) facesArray[i].tl().y + facesArray[i].height / 2;
            Log.d("face middle : ", face_middle_x + "," + face_middle_y);
            Log.d(TAG, "faces array " + String.valueOf(i));

        }
//        if(facesArray.length > 0) {
//            //auto save face when it detects the face
//            //Thread t = getBaseContext().getMainLooper().getThread();
//            Thread t = new Thread(new Runnable() {
//                @Override
//                public void run() {
//                    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss_S");
//                    String currentDateandTime = sdf.format(new Date());
//                    String saveDir = Environment.getExternalStorageDirectory().getPath() + "/DCIM/OCV/FDSave";
//                    File dirCheck = new File(saveDir);
//                    if(!dirCheck.exists()) {
//                        dirCheck.mkdirs();
//                    }
//                    String fileName = saveDir + "/" + currentDateandTime + ".jpg";
//                    try {
//                        mOpenCvCameraView.takePicture(fileName);
//                    } catch (Exception ex) {
//                        ex.printStackTrace();
//                    }
//                }
//            });
//            t.start();
//        }
//
//        if (mIsColorSelected) {
//            mDetector.process(mRgba);
//            List<MatOfPoint> contours = mDetector.getContours();
//            Log.e(TAG, "Contours count: " + contours.size());
//            Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
//
//            Mat colorLabel = mRgba.submat(4, 68, 4, 68);
//            colorLabel.setTo(mBlobColorRgba);
//
//            Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
//            mSpectrum.copyTo(spectrumLabel);
//        }

        return mRgba;
    }
 
Example #20
Source File: AAVActivity.java    From AAV with GNU General Public License v2.0 4 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
	synchronized (inputFrame) {

		_rgbaImage = inputFrame.rgba();

		if (android.os.Build.MODEL.equalsIgnoreCase("Nexus 5X")) {
			Core.flip(_rgbaImage, _rgbaImage, -1);
		}

		double current_contour;

		// In contrast to the C++ interface, Android API captures images in the RGBA format.
		// Also, in HSV space, only the hue determines which color it is. Saturation determines
		// how 'white' the color is, and Value determines how 'dark' the color is.
		Imgproc.cvtColor(_rgbaImage, _hsvMat, Imgproc.COLOR_RGB2HSV_FULL);

		Core.inRange(_hsvMat, _lowerThreshold, _upperThreshold, _processedMat);

		// Imgproc.dilate(_processedMat, _dilatedMat, new Mat());
		Imgproc.erode(_processedMat, _dilatedMat, new Mat());
		Imgproc.findContours(_dilatedMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
		MatOfPoint2f points = new MatOfPoint2f();
		_contourArea = 7;
		for (int i = 0, n = contours.size(); i < n; i++) {
			current_contour = Imgproc.contourArea(contours.get(i));
			if (current_contour > _contourArea) {
				_contourArea = current_contour;
				contours.get(i).convertTo(points, CvType.CV_32FC2); // contours.get(x) is a single MatOfPoint, but to use minEnclosingCircle we need to pass a MatOfPoint2f so we need to do a
				// conversion
			}
		}
		if (!points.empty() && _contourArea > MIN_CONTOUR_AREA) {
			Imgproc.minEnclosingCircle(points, _centerPoint, null);
			// Core.circle(_rgbaImage, _centerPoint, 3, new Scalar(255, 0, 0), Core.FILLED);
			if (_showContourEnable)
				Core.circle(_rgbaImage, _centerPoint, (int) Math.round(Math.sqrt(_contourArea / Math.PI)), new Scalar(255, 0, 0), 3, 8, 0);// Core.FILLED);
		}
		contours.clear();
	}
	return _rgbaImage;
}
 
Example #21
Source File: MainActivity.java    From MOAAP with MIT License 4 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
        final int viewMode = mViewMode;
        switch (viewMode) {
            case VIEW_MODE_OPTICAL_FLOW:
                mGray = inputFrame.gray();
                if(features.toArray().length==0){
                    int rowStep = 50, colStep = 100;
                    int nRows = mGray.rows()/rowStep, nCols = mGray.cols()/colStep;

//                    Log.d(TAG, "\nRows: "+nRows+"\nCols: "+nCols+"\n");

                    Point points[] = new Point[nRows*nCols];
                    for(int i=0; i<nRows; i++){
                        for(int j=0; j<nCols; j++){
                            points[i*nCols+j]=new Point(j*colStep, i*rowStep);
//                            Log.d(TAG, "\nRow: "+i*rowStep+"\nCol: "+j*colStep+"\n: ");
                        }
                    }

                    features.fromArray(points);

                    prevFeatures.fromList(features.toList());
                    mPrevGray = mGray.clone();
                    break;
                }

                nextFeatures.fromArray(prevFeatures.toArray());
                Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err);

                List<Point> prevList=features.toList(), nextList=nextFeatures.toList();
                Scalar color = new Scalar(255);

                for(int i = 0; i<prevList.size(); i++){
//                    Core.circle(mGray, prevList.get(i), 5, color);
                    Imgproc.line(mGray, prevList.get(i), nextList.get(i), color);
                }

                mPrevGray = mGray.clone();
                break;
            case VIEW_MODE_KLT_TRACKER:
                mGray = inputFrame.gray();

                if(features.toArray().length==0){
                    Imgproc.goodFeaturesToTrack(mGray, features, 10, 0.01, 10);
                    Log.d(TAG, features.toList().size()+"");
                    prevFeatures.fromList(features.toList());
                    mPrevGray = mGray.clone();
//                    prevFeatures.fromList(nextFeatures.toList());
                    break;
                }

//                OpticalFlow(mPrevGray.getNativeObjAddr(), mGray.getNativeObjAddr(), prevFeatures.getNativeObjAddr(), nextFeatures.getNativeObjAddr());
                Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err);
                List<Point> drawFeature = nextFeatures.toList();
//                Log.d(TAG, drawFeature.size()+"");
                for(int i = 0; i<drawFeature.size(); i++){
                    Point p = drawFeature.get(i);
                    Imgproc.circle(mGray, p, 5, new Scalar(255));
                }
                mPrevGray = mGray.clone();
                prevFeatures.fromList(nextFeatures.toList());
                break;
            default: mViewMode = VIEW_MODE_KLT_TRACKER;
        }

        return mGray;
    }
 
Example #22
Source File: FaceRecognitionActivity.java    From AndroidFaceRecognizer with MIT License 3 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        
        mGray = inputFrame.gray();
        
        if(!detectionInProgress){
        	Mat image = new Mat(mGray.rows(), mGray.cols(), mGray.type());
        	mGray.copyTo(image);
        	detectFaceOnFrame(image);
        }


        return mRgba;
    }
 
Example #23
Source File: MainActivity.java    From OpenCV-Android-Object-Detection with MIT License 2 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    return recognize(inputFrame.rgba());

}