Java Code Examples for org.opencv.core.Core#flip()

The following examples show how to use org.opencv.core.Core#flip() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MainActivity.java    From MOAAP with MIT License 7 votes vote down vote up
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    //Rotating the input frame
    Mat mGray = inputFrame.gray();
    mRgba = inputFrame.rgba();
    if (mIsFrontCamera)
    {
        Core.flip(mRgba, mRgba, 1);
        Core.flip(mGray, mGray, 1);
    }

    //Detecting face in the frame
    MatOfRect faces = new MatOfRect();
    if(haarCascade != null)
    {
        haarCascade.detectMultiScale(mGray, faces, 1.1, 2, 2, new Size(200,200), new Size());
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
        Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), new Scalar(100), 3);
    return mRgba;
}
 
Example 2
Source File: MatOperation.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 6 votes vote down vote up
/***************************************************************************************
 *    Title: Rotate image by 90, 180 or 270 degrees
 *    Author: StereoMatching
 *    Date: 29.04.2013
 *    Code version: -
 *    Availability: http://stackoverflow.com
 *
 ***************************************************************************************/

public static void rotate_90n(Mat img, int angle)
{
    if(angle == 270 || angle == -90){
        // Rotate clockwise 270 degrees
        Core.transpose(img, img);
        Core.flip(img, img, 0);
    }else if(angle == 180 || angle == -180){
        // Rotate clockwise 180 degrees
        Core.flip(img, img, -1);
    }else if(angle == 90 || angle == -270){
        // Rotate clockwise 90 degrees
        Core.transpose(img, img);
        Core.flip(img, img, 1);
    }
}
 
Example 3
Source File: ImageProcessor.java    From Document-Scanner with GNU General Public License v3.0 6 votes vote down vote up
public void processPicture( Mat picture ) {

        Mat img = Imgcodecs.imdecode(picture, Imgcodecs.CV_LOAD_IMAGE_UNCHANGED);
        picture.release();

        Log.d(TAG, "processPicture - imported image " + img.size().width + "x" + img.size().height);

        if (mBugRotate) {
            Core.flip(img, img, 1 );
            Core.flip(img, img, 0 );
        }

        ScannedDocument doc = detectDocument(img);
        mMainActivity.saveDocument(doc);

        doc.release();
        picture.release();

        mMainActivity.setImageProcessorBusy(false);
        mMainActivity.waitSpinnerInvisible();
    }
 
Example 4
Source File: DetectionActivity.java    From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 6 votes vote down vote up
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat imgRgba = inputFrame.rgba();
    Mat img = new Mat();
    imgRgba.copyTo(img);
    List<Mat> images = ppF.getCroppedImage(img);
    Rect[] faces = ppF.getFacesForRecognition();

    // Selfie / Mirror mode
    if(front_camera){
        Core.flip(imgRgba,imgRgba,1);
    }
    if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){
        // skip
        return imgRgba;
    } else {
        faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
        for(int i = 0; i<faces.length; i++){
            MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], "", front_camera);
        }
        return imgRgba;
    }
}
 
Example 5
Source File: FaceFragment.java    From OpenCV-android with Apache License 2.0 5 votes vote down vote up
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();
    // 解决预览视图逆时针90度
    Core.transpose(mRgba, mRgba);
    Core.flip(mRgba, mRgba, 1);

    Rect[] obj = face.detectObject(mRgba, matOfRect);
    for (Rect rect : obj) {
        Imgproc.rectangle(mRgba, rect.tl(), rect.br(), face.getRectColor(), 3);
    }

    return mRgba;
}
 
Example 6
Source File: MainActivity.java    From Form-N-Fun with MIT License 5 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba=inputFrame.rgba();
    if (Build.MODEL.equalsIgnoreCase("Nexus 5X")) //flip the frame on nexus5x
        Core.flip(mRgba, mRgba,-1);
    findmazesandballs.apply(mRgba); //process frames
    return mRgba;
}
 
Example 7
Source File: FtcTestOpenCv.java    From FtcSamples with MIT License 5 votes vote down vote up
/**
 * This method rotate the image to the specified angle.
 *
 * @param src specifies the image to be rotated.
 * @param dst specifies the destination to put the rotated image.
 * @param angle specifies the rotation angle.
 */
private void rotateImage(Mat src, Mat dst, double angle)
{
    angle %= 360.0;
    if (angle == 0.0)
    {
        src.copyTo(dst);
    }
    else if (angle == 90.0 || angle == -270.0)
    {
        Core.transpose(src, dst);
        Core.flip(dst, dst, 1);
    }
    else if (angle == 180.0 || angle == -180.0)
    {
        Core.flip(src, dst, -1);
    }
    else if (angle == 270.0 || angle == -90.0)
    {
        Core.transpose(src, dst);
        Core.flip(dst, dst, 0);
    }
    else
    {
        Mat rotMat = Imgproc.getRotationMatrix2D(
                new Point(src.cols()/2.0, src.rows()/2.0), angle, 1.0);
        Imgproc.warpAffine(src, dst, rotMat, src.size());
    }
}
 
Example 8
Source File: OldMainActivity.java    From pasm-yolov3-Android with GNU General Public License v3.0 5 votes vote down vote up
@Override
protected List<Classifier.Recognition> doInBackground(Mat... mats) {
    Mat mRgbaTemp = mats[0];
    if (myBitmap != null){
        smallBitmap = Bitmap.createScaledBitmap(myBitmap, INPUT_WIDTH, INPUT_HEIGHT, false);
    }else{

        smallBitmap = Bitmap.createBitmap(INPUT_WIDTH, INPUT_HEIGHT, Bitmap.Config.RGB_565);
        Bitmap bigBitmap = Bitmap.createBitmap(mRgbaF.width(), mRgbaF.height(), Bitmap.Config.RGB_565);
        Mat mRgbaFixedSize = new Mat(INPUT_WIDTH, INPUT_HEIGHT, CvType.CV_8UC4);

        Core.transpose(mRgbaTemp, mRgbaT);
        Imgproc.resize(mRgbaT, mRgbaF, mRgbaF.size(), 0,0, 0);
        Core.flip(mRgbaF, mRgbaTemp, 1 );

        Imgproc.resize(mRgbaTemp, mRgbaFixedSize, new Size(INPUT_WIDTH, INPUT_HEIGHT), 0,0, 0);

        Utils.matToBitmap(mRgbaFixedSize, smallBitmap);
        Utils.matToBitmap(mRgbaTemp, bigBitmap);

        this.publishProgress(bigBitmap);


        //OLD Toast.makeText(getApplicationContext(), "Nessuna immagine caricata", Toast.LENGTH_SHORT).show();
    }

    List<Classifier.Recognition> recognitions = classifier.recognizeImage(smallBitmap);
    return  recognitions;
}
 
Example 9
Source File: CameraView.java    From FaceT with Mozilla Public License 2.0 5 votes vote down vote up
public Mat gray() {
    if (mRotatedGray != null) mRotatedGray.release();
    mRotatedGray = mYuvFrameData.submat(0, mWidth, 0, mHeight); //submat with reversed width and height because its done on the landscape frame
    mRotatedGray = mRotatedGray.t();
    Core.flip(mRotatedGray, mRotatedGray, 0);
    return mRotatedGray;
}
 
Example 10
Source File: CameraView.java    From FaceT with Mozilla Public License 2.0 5 votes vote down vote up
public Mat rgba() {
    Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2BGR_NV12, 4);
    if (mRotated != null) mRotated.release();
    mRotated = mRgba.submat(0, mWidth, 0, mHeight);
    mRotated = mRotated.t();
    Core.flip(mRotated, mRotated, 0);
    return mRotated;
}
 
Example 11
Source File: OpenCvCameraView.java    From PixaToon with GNU General Public License v3.0 5 votes vote down vote up
public Mat gray() {
    if (mRotated != null) mRotated.release();
    mRotated = mYuvFrameData.submat(0, mWidth, 0, mHeight); //submat with reversed width and height because its done on the landscape frame
    mRotated = mRotated.t();
    Core.flip(mRotated, mRotated, 1);
    return mRotated;
}
 
Example 12
Source File: OpenCvCameraView.java    From PixaToon with GNU General Public License v3.0 5 votes vote down vote up
public Mat rgba() {
    Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2BGR_NV12, 4);
    if (mRotated != null) mRotated.release();
    if(mCameraIndex == CAMERA_ID_FRONT)
        Core.flip(mRgba, mRgba, 1);
    mRotated = mRgba.t();
    Core.flip(mRotated, mRotated, 1);
    return mRotated;
}
 
Example 13
Source File: FaceRecognitionAppActivity.java    From FaceRecognitionApp with GNU General Public License v2.0 4 votes vote down vote up
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat mGrayTmp = inputFrame.gray();
    Mat mRgbaTmp = inputFrame.rgba();

    // Flip image to get mirror effect
    int orientation = mOpenCvCameraView.getScreenOrientation();
    if (mOpenCvCameraView.isEmulator()) // Treat emulators as a special case
        Core.flip(mRgbaTmp, mRgbaTmp, 1); // Flip along y-axis
    else {
        switch (orientation) { // RGB image
            case ActivityInfo.SCREEN_ORIENTATION_PORTRAIT:
            case ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT:
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT)
                    Core.flip(mRgbaTmp, mRgbaTmp, 0); // Flip along x-axis
                else
                    Core.flip(mRgbaTmp, mRgbaTmp, -1); // Flip along both axis
                break;
            case ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE:
            case ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE:
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT)
                    Core.flip(mRgbaTmp, mRgbaTmp, 1); // Flip along y-axis
                break;
        }
        switch (orientation) { // Grayscale image
            case ActivityInfo.SCREEN_ORIENTATION_PORTRAIT:
                Core.transpose(mGrayTmp, mGrayTmp); // Rotate image
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT)
                    Core.flip(mGrayTmp, mGrayTmp, -1); // Flip along both axis
                else
                    Core.flip(mGrayTmp, mGrayTmp, 1); // Flip along y-axis
                break;
            case ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT:
                Core.transpose(mGrayTmp, mGrayTmp); // Rotate image
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK)
                    Core.flip(mGrayTmp, mGrayTmp, 0); // Flip along x-axis
                break;
            case ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE:
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT)
                    Core.flip(mGrayTmp, mGrayTmp, 1); // Flip along y-axis
                break;
            case ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE:
                Core.flip(mGrayTmp, mGrayTmp, 0); // Flip along x-axis
                if (mOpenCvCameraView.mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK)
                    Core.flip(mGrayTmp, mGrayTmp, 1); // Flip along y-axis
                break;
        }
    }

    mGray = mGrayTmp;
    mRgba = mRgbaTmp;

    return mRgba;
}
 
Example 14
Source File: Transform.java    From FTCVision with MIT License 4 votes vote down vote up
public static void flip(Mat img, FlipType flipType) {
    Core.flip(img, img, flipType.val);
}
 
Example 15
Source File: AAVActivity.java    From AAV with GNU General Public License v2.0 4 votes vote down vote up
@Override
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
	synchronized (inputFrame) {

		_rgbaImage = inputFrame.rgba();

		if (android.os.Build.MODEL.equalsIgnoreCase("Nexus 5X")) {
			Core.flip(_rgbaImage, _rgbaImage, -1);
		}

		double current_contour;

		// In contrast to the C++ interface, Android API captures images in the RGBA format.
		// Also, in HSV space, only the hue determines which color it is. Saturation determines
		// how 'white' the color is, and Value determines how 'dark' the color is.
		Imgproc.cvtColor(_rgbaImage, _hsvMat, Imgproc.COLOR_RGB2HSV_FULL);

		Core.inRange(_hsvMat, _lowerThreshold, _upperThreshold, _processedMat);

		// Imgproc.dilate(_processedMat, _dilatedMat, new Mat());
		Imgproc.erode(_processedMat, _dilatedMat, new Mat());
		Imgproc.findContours(_dilatedMat, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
		MatOfPoint2f points = new MatOfPoint2f();
		_contourArea = 7;
		for (int i = 0, n = contours.size(); i < n; i++) {
			current_contour = Imgproc.contourArea(contours.get(i));
			if (current_contour > _contourArea) {
				_contourArea = current_contour;
				contours.get(i).convertTo(points, CvType.CV_32FC2); // contours.get(x) is a single MatOfPoint, but to use minEnclosingCircle we need to pass a MatOfPoint2f so we need to do a
				// conversion
			}
		}
		if (!points.empty() && _contourArea > MIN_CONTOUR_AREA) {
			Imgproc.minEnclosingCircle(points, _centerPoint, null);
			// Core.circle(_rgbaImage, _centerPoint, 3, new Scalar(255, 0, 0), Core.FILLED);
			if (_showContourEnable)
				Core.circle(_rgbaImage, _centerPoint, (int) Math.round(Math.sqrt(_contourArea / Math.PI)), new Scalar(255, 0, 0), 3, 8, 0);// Core.FILLED);
		}
		contours.clear();
	}
	return _rgbaImage;
}
 
Example 16
Source File: StudentImageCollectionActivity.java    From ml-authentication with Apache License 2.0 4 votes vote down vote up
@Override
    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        final Mat imgRgba = inputFrame.rgba();

//        Do not change screen brightness manually during test phase, due to the unknown location of the different test users.
//        M.Schälchli 20170129
//        if (isDeviceRooted){
//            DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba);
//        }

        long currentTime = new Date().getTime();

        if (authenticationAnimationAlreadyPlayed || ((startTimeAuthenticationAnimation + AuthenticationActivity.AUTHENTICATION_ANIMATION_TIME) < currentTime)){
            prepareForAuthentication();

            Mat imgCopy = new Mat();

            // Store original image for face recognition
            imgRgba.copyTo(imgCopy);

            // Mirror front camera image
            Core.flip(imgRgba,imgRgba,1);

            Rect face = new Rect();
            boolean isFaceInsideFrame = false;
            boolean faceDetected = false;

            if((lastTime + TIMER_DIFF) < currentTime){
                lastTime = currentTime;
                List<Mat> images = ppF.getCroppedImage(imgCopy);
                if((images != null) && (images.size() == 1)){
                    Mat img = images.get(0);
                    if(img != null) {
                        Rect[] faces = ppF.getFacesForRecognition();
                        if ((faces != null) && (faces.length == 1)) {
                            faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                            face = faces[0];
                            faceDetected = true;
                            // Reset startTimeFallback for fallback timeout, because at least one face has been detected
                            startTimeFallback = currentTime;
                            isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face);

                            if (isFaceInsideFrame){
                                if (!activityStopped){
                                    mediaPlayerAnimalSound.start();

                                    studentImages.add(img);

                                    // Stop after NUMBER_OF_IMAGES (settings option)
                                    if(imagesProcessed == NUMBER_OF_IMAGES){
                                        storeStudentImages();
                                        finish();
                                    }

                                    imagesProcessed++;
                                }
                            }
                        }
                    }
                }
            }

            if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){
                // Prevent from second execution of fallback activity because of threading
                startTimeFallback = currentTime;
                DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName());
                finish();
            }

            if (faceDetected && !isFaceInsideFrame && !activityStopped){
                DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face);
                AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound);
            }

            EnvironmentSettings.freeMemory();
        }

        return imgRgba;
    }
 
Example 17
Source File: AuthenticationActivity.java    From ml-authentication with Apache License 2.0 4 votes vote down vote up
@Override
    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        Mat imgRgba = inputFrame.rgba();

//        Do not change screen brightness manually during test phase, due to the unknown location of the different test users.
//        M.Schälchli 20170129
//        if (isDeviceRooted){
//            DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba);
//        }

        long currentTime = new Date().getTime();

        if ((!tensorFlowLoadingThread.isAlive()) && ((startTimeAuthenticationAnimation + AUTHENTICATION_ANIMATION_TIME) < currentTime)){
            prepareForAuthentication();

            if (!recognitionThread.isAlive() && recognitionThreadStarted) {
                List<Student> students = recognitionThread.getRecognizedStudent();
                Student student = new Student();
                if (students.size() == 1){
                    student = students.get(0);
                }
                numberOfTries++;
                Log.i(getClass().getName(), "Number of authentication/recognition tries: " + numberOfTries);
                if ((student != null) && (students.size() == 1)) {
                    AuthenticationHelper.updateCurrentStudent(student, getApplicationContext(), false);
                    finish();
                } else if (numberOfTries >= NUMBER_OF_MAXIMUM_TRIES) {
                    startStudentImageCollectionActivity(true);
                }
                recognitionThreadStarted = false;
            }

            Mat imgCopy = new Mat();

            // Store original image for face recognition
            imgRgba.copyTo(imgCopy);

            // Mirror front camera image
            Core.flip(imgRgba,imgRgba,1);

            Rect face = new Rect();
            boolean isFaceInsideFrame = false;
            boolean faceDetected = false;

            List<Mat> images = ppF.getCroppedImage(imgCopy);
            if (images != null && images.size() == 1){
                Mat img = images.get(0);
                if (img != null){
                    Rect[] faces = ppF.getFacesForRecognition();
                    if (faces != null && faces.length == 1){
                        faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                        face = faces[0];
                        faceDetected = true;
                        // Reset startTimeFallback for fallback timeout, because at least one face has been detected
                        startTimeFallback = currentTime;
                        isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face);

                        if (isFaceInsideFrame){
                            if (!recognitionThread.isAlive() && !recognitionThreadStarted){
                                if (!activityStopped){
                                    mediaPlayerAnimalSound.start();

                                    recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao);
                                    recognitionThread.setImg(img);
                                    recognitionThread.start();
                                    recognitionThreadStarted = true;
                                }
                            }
                        }
                    }
                }
            }

            if (faceDetected && !isFaceInsideFrame && !activityStopped){
                DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face);
                AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound);
            }

            if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){
                // Prevent from second execution of fallback activity because of threading
                startTimeFallback = currentTime;
                DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName());
                finish();
            }

            EnvironmentSettings.freeMemory();
        }

        return imgRgba;
    }
 
Example 18
Source File: Webcam.java    From ResCan with GNU General Public License v2.0 4 votes vote down vote up
public static void main(final String args[]) {

		System.out.println("Hello, OpenCV");
		// Load the native library.
		System.loadLibrary("opencv_java411");

		listAllVoices();
		System.setProperty("freetts.voices", "com.sun.speech.freetts.en.us.cmu_us_kal.KevinVoiceDirectory");
		VoiceManager voiceManager = VoiceManager.getInstance();
		voice = voiceManager.getVoice("kevin16");
		voice.allocate();

		camera = new VideoCapture(0);

		if (!camera.isOpened()) {
			System.out.println("Camera Error");
			}
		else {
			System.out.println("Camera OK?");
		}


		while (camera.isOpened()) {
			Mat frame = new Mat();



			// camera.grab();
			// System.out.println("Frame Grabbed");
			// camera.retrieve(frame);
			// System.out.println("Frame Decoded");

			camera.read(frame);
			Core.flip(frame, frame, -1);

			/*
			 * No difference camera.release();
			 */

			// System.out.println("Captured Frame Width " + frame.width());

			doMagic(frame);
			showResult(toBufferedImage(frame));

			/*
			 * try { Thread.sleep(10); } catch (InterruptedException e) { //
			 * TODO Auto-generated catch block e.printStackTrace(); }
			 */
		}
	}
 
Example 19
Source File: StudentImageCollectionActivity.java    From ml-authentication with Apache License 2.0 4 votes vote down vote up
@Override
    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        final Mat imgRgba = inputFrame.rgba();

//        Do not change screen brightness manually during test phase, due to the unknown location of the different test users.
//        M.Schälchli 20170129
//        if (isDeviceRooted){
//            DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba);
//        }

        long currentTime = new Date().getTime();

        if (authenticationAnimationAlreadyPlayed || ((startTimeAuthenticationAnimation + AuthenticationActivity.AUTHENTICATION_ANIMATION_TIME) < currentTime)){
            prepareForAuthentication();

            Mat imgCopy = new Mat();

            // Store original image for face recognition
            imgRgba.copyTo(imgCopy);

            // Mirror front camera image
            Core.flip(imgRgba,imgRgba,1);

            Rect face = new Rect();
            boolean isFaceInsideFrame = false;
            boolean faceDetected = false;

            if((lastTime + TIMER_DIFF) < currentTime){
                lastTime = currentTime;
                List<Mat> images = ppF.getCroppedImage(imgCopy);
                if((images != null) && (images.size() == 1)){
                    Mat img = images.get(0);
                    if(img != null) {
                        Rect[] faces = ppF.getFacesForRecognition();
                        if ((faces != null) && (faces.length == 1)) {
                            faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                            face = faces[0];
                            faceDetected = true;
                            // Reset startTimeFallback for fallback timeout, because at least one face has been detected
                            startTimeFallback = currentTime;
                            isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face);

                            if (isFaceInsideFrame){
                                if (!activityStopped){
                                    mediaPlayerAnimalSound.start();

                                    studentImages.add(img);

                                    // Stop after NUMBER_OF_IMAGES (settings option)
                                    if(imagesProcessed == NUMBER_OF_IMAGES){
                                        storeStudentImages();
                                        finish();
                                    }

                                    imagesProcessed++;
                                }
                            }
                        }
                    }
                }
            }

            if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){
                // Prevent from second execution of fallback activity because of threading
                startTimeFallback = currentTime;
                DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName());
                finish();
            }

            if (faceDetected && !isFaceInsideFrame && !activityStopped){
                DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face);
                AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound);
            }

            EnvironmentSettings.freeMemory();
        }

        return imgRgba;
    }
 
Example 20
Source File: MainActivity.java    From pasm-yolov3-Android with GNU General Public License v3.0 4 votes vote down vote up
@Override
protected Mat doInBackground(Mat... mats) {
    Mat mRgbaTemp = mats[0];
    ImageProcessor processor = new ImageProcessor(getApplicationContext(), classifier.getLabels());
    if (myBitmap != null){
        smallBitmap = Bitmap.createScaledBitmap(myBitmap, INPUT_SIZE, INPUT_SIZE, false);
        Display display = getWindowManager().getDefaultDisplay();
        Point size = new Point();
        display.getSize(size);
        int width = size.x;
        int height = size.y;

        float ratio = (float)myBitmap.getWidth() / (float)myBitmap.getHeight();
        Bitmap reducedBitmap = Bitmap.createScaledBitmap(myBitmap, (int) (height * ratio), height, false);

        this.publishProgress(reducedBitmap);
        processor.loadImage(myBitmap, INPUT_SIZE, INPUT_SIZE);
    }else{
        smallBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Bitmap.Config.RGB_565);
        Bitmap bigBitmap = Bitmap.createBitmap(mRgbaF.width(), mRgbaF.height(), Bitmap.Config.RGB_565);
        Mat mRgbaFixedSize = new Mat(INPUT_SIZE, INPUT_SIZE, CvType.CV_8UC4);

        Core.transpose(mRgbaTemp, mRgbaT);
        Imgproc.resize(mRgbaT, mRgbaF, mRgbaF.size(), 0,0, 0);
        Core.flip(mRgbaF, mRgbaTemp, 1 );

        Imgproc.resize(mRgbaTemp, mRgbaFixedSize, new Size(INPUT_SIZE, INPUT_SIZE), 0,0, 0);

        Utils.matToBitmap(mRgbaFixedSize, smallBitmap);
        Utils.matToBitmap(mRgbaTemp, bigBitmap);

        this.publishProgress(bigBitmap);
        processor.loadImage(bigBitmap, INPUT_SIZE, INPUT_SIZE);
        //OLD Toast.makeText(getApplicationContext(), "Nessuna immagine caricata", Toast.LENGTH_SHORT).show();
    }

    List<Classifier.Recognition> recognitions = classifier.recognizeImage(smallBitmap);
    Mat mat = processor.drawBoxes(recognitions, 0.2);
    imageSaver.save(mat); // remove for realtime processing!
    return mat;
}