ch.zhaw.facerecognitionlibrary.Helpers.MatOperation Java Examples

The following examples show how to use ch.zhaw.facerecognitionlibrary.Helpers.MatOperation. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RecognitionActivity.java    From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 6 votes vote down vote up
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat imgRgba = inputFrame.rgba();
    Mat img = new Mat();
    imgRgba.copyTo(img);
    List<Mat> images = ppF.getProcessedImage(img, PreProcessorFactory.PreprocessingMode.RECOGNITION);
    Rect[] faces = ppF.getFacesForRecognition();

    // Selfie / Mirror mode
    if(front_camera){
        Core.flip(imgRgba,imgRgba,1);
    }
    if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){
        // skip
        return imgRgba;
    } else {
        faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
        for(int i = 0; i<faces.length; i++){
            MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], rec.recognize(images.get(i), ""), front_camera);
        }
        return imgRgba;
    }
}
 
Example #2
Source File: DetectionActivity.java    From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 6 votes vote down vote up
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    Mat imgRgba = inputFrame.rgba();
    Mat img = new Mat();
    imgRgba.copyTo(img);
    List<Mat> images = ppF.getCroppedImage(img);
    Rect[] faces = ppF.getFacesForRecognition();

    // Selfie / Mirror mode
    if(front_camera){
        Core.flip(imgRgba,imgRgba,1);
    }
    if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){
        // skip
        return imgRgba;
    } else {
        faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
        for(int i = 0; i<faces.length; i++){
            MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], "", front_camera);
        }
        return imgRgba;
    }
}
 
Example #3
Source File: DetectionHelper.java    From ml-authentication with Apache License 2.0 4 votes vote down vote up
public static void drawArrowFromFaceToFrame(AnimalOverlay animalOverlay, Mat img, Rect face){
    Rect mirroredFace = MatOperation.getMirroredFaceForFrontCamera(img, face);
    Point pointFace = new Point(mirroredFace.tl().x + mirroredFace.width / 2, mirroredFace.tl().y + mirroredFace.height / 2);
    Point pointFrame = new Point(animalOverlay.getFrameStartX() + (animalOverlay.getFrameEndX() - animalOverlay.getFrameStartX()) / 2, animalOverlay.getFrameStartY() + (animalOverlay.getFrameEndY() - animalOverlay.getFrameStartY()) / 2);
    Imgproc.arrowedLine(img, pointFace, pointFrame, RED_COLOR, 20, Imgproc.LINE_8, 0, 0.2);
}
 
Example #4
Source File: AuthenticationActivity.java    From ml-authentication with Apache License 2.0 4 votes vote down vote up
@Override
    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        Mat imgRgba = inputFrame.rgba();

//        Do not change screen brightness manually during test phase, due to the unknown location of the different test users.
//        M.Schälchli 20170129
//        if (isDeviceRooted){
//            DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba);
//        }

        long currentTime = new Date().getTime();

        if ((!tensorFlowLoadingThread.isAlive()) && ((startTimeAuthenticationAnimation + AUTHENTICATION_ANIMATION_TIME) < currentTime)){
            prepareForAuthentication();

            if (!recognitionThread.isAlive() && recognitionThreadStarted) {
                List<Student> students = recognitionThread.getRecognizedStudent();
                Student student = new Student();
                if (students.size() == 1){
                    student = students.get(0);
                }
                numberOfTries++;
                Log.i(getClass().getName(), "Number of authentication/recognition tries: " + numberOfTries);
                if ((student != null) && (students.size() == 1)) {
                    AuthenticationHelper.updateCurrentStudent(student, getApplicationContext(), false);
                    finish();
                } else if (numberOfTries >= NUMBER_OF_MAXIMUM_TRIES) {
                    startStudentImageCollectionActivity(true);
                }
                recognitionThreadStarted = false;
            }

            Mat imgCopy = new Mat();

            // Store original image for face recognition
            imgRgba.copyTo(imgCopy);

            // Mirror front camera image
            Core.flip(imgRgba,imgRgba,1);

            Rect face = new Rect();
            boolean isFaceInsideFrame = false;
            boolean faceDetected = false;

            List<Mat> images = ppF.getCroppedImage(imgCopy);
            if (images != null && images.size() == 1){
                Mat img = images.get(0);
                if (img != null){
                    Rect[] faces = ppF.getFacesForRecognition();
                    if (faces != null && faces.length == 1){
                        faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                        face = faces[0];
                        faceDetected = true;
                        // Reset startTimeFallback for fallback timeout, because at least one face has been detected
                        startTimeFallback = currentTime;
                        isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face);

                        if (isFaceInsideFrame){
                            if (!recognitionThread.isAlive() && !recognitionThreadStarted){
                                if (!activityStopped){
                                    mediaPlayerAnimalSound.start();

                                    recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao);
                                    recognitionThread.setImg(img);
                                    recognitionThread.start();
                                    recognitionThreadStarted = true;
                                }
                            }
                        }
                    }
                }
            }

            if (faceDetected && !isFaceInsideFrame && !activityStopped){
                DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face);
                AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound);
            }

            if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){
                // Prevent from second execution of fallback activity because of threading
                startTimeFallback = currentTime;
                DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName());
                finish();
            }

            EnvironmentSettings.freeMemory();
        }

        return imgRgba;
    }
 
Example #5
Source File: StudentImageCollectionActivity.java    From ml-authentication with Apache License 2.0 4 votes vote down vote up
@Override
    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        final Mat imgRgba = inputFrame.rgba();

//        Do not change screen brightness manually during test phase, due to the unknown location of the different test users.
//        M.Schälchli 20170129
//        if (isDeviceRooted){
//            DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba);
//        }

        long currentTime = new Date().getTime();

        if (authenticationAnimationAlreadyPlayed || ((startTimeAuthenticationAnimation + AuthenticationActivity.AUTHENTICATION_ANIMATION_TIME) < currentTime)){
            prepareForAuthentication();

            Mat imgCopy = new Mat();

            // Store original image for face recognition
            imgRgba.copyTo(imgCopy);

            // Mirror front camera image
            Core.flip(imgRgba,imgRgba,1);

            Rect face = new Rect();
            boolean isFaceInsideFrame = false;
            boolean faceDetected = false;

            if((lastTime + TIMER_DIFF) < currentTime){
                lastTime = currentTime;
                List<Mat> images = ppF.getCroppedImage(imgCopy);
                if((images != null) && (images.size() == 1)){
                    Mat img = images.get(0);
                    if(img != null) {
                        Rect[] faces = ppF.getFacesForRecognition();
                        if ((faces != null) && (faces.length == 1)) {
                            faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                            face = faces[0];
                            faceDetected = true;
                            // Reset startTimeFallback for fallback timeout, because at least one face has been detected
                            startTimeFallback = currentTime;
                            isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face);

                            if (isFaceInsideFrame){
                                if (!activityStopped){
                                    mediaPlayerAnimalSound.start();

                                    studentImages.add(img);

                                    // Stop after NUMBER_OF_IMAGES (settings option)
                                    if(imagesProcessed == NUMBER_OF_IMAGES){
                                        storeStudentImages();
                                        finish();
                                    }

                                    imagesProcessed++;
                                }
                            }
                        }
                    }
                }
            }

            if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){
                // Prevent from second execution of fallback activity because of threading
                startTimeFallback = currentTime;
                DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName());
                finish();
            }

            if (faceDetected && !isFaceInsideFrame && !activityStopped){
                DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face);
                AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound);
            }

            EnvironmentSettings.freeMemory();
        }

        return imgRgba;
    }
 
Example #6
Source File: DetectionTestActivity.java    From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 4 votes vote down vote up
@Override
protected void onResume() {
    super.onResume();
    final Handler handler = new Handler(Looper.getMainLooper());
    thread = new Thread(new Runnable() {
        public void run() {
            if(!Thread.currentThread().isInterrupted()){
                PreProcessorFactory ppF = new PreProcessorFactory(getApplicationContext());

                FileHelper fileHelper = new FileHelper();
                File[] detectionFolders = fileHelper.getDetectionTestList();
                if (detectionFolders.length > 0) {
                    // total and matches are used to calculate the accuracy afterwards
                    int total = 0;
                    int matches = 0;
                    List<String> results = new ArrayList<>();
                    results.add("Expected Name;Expected File;Result");
                    Date time_start = new Date();
                    for (File folder : detectionFolders) {
                        File[] files = folder.listFiles();
                        int counter = 1;
                        for (File file : files) {
                            if (FileHelper.isFileAnImage(file)) {
                                Mat imgRgba = Imgcodecs.imread(file.getAbsolutePath());
                                Imgproc.cvtColor(imgRgba, imgRgba, Imgproc.COLOR_BGRA2RGBA);

                                List<Mat> images = ppF.getProcessedImage(imgRgba, PreProcessorFactory.PreprocessingMode.DETECTION);
                                Rect[] faces = ppF.getFacesForRecognition();

                                String result = "";

                                if (faces == null || faces.length == 0) {
                                    result = RESULT_NEGATIVE;
                                } else {
                                    result = RESULT_POSITIVE;
                                    faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                                    for(int i = 0; i<faces.length; i++){
                                        MatOperation.drawRectangleAndLabelOnPreview(images.get(0), faces[i], "", false);
                                    }
                                }

                                // Save images
                                String[] tokens = file.getName().split("\\.");
                                String filename = tokens[0];
                                for (int i=0; i<images.size();i++){
                                    MatName m = new MatName(filename + "_" + (i + 1), images.get(i));
                                    fileHelper.saveMatToImage(m, FileHelper.RESULTS_PATH + "/" + time_start.toString() + "/");
                                }

                                tokens = file.getParent().split("/");
                                final String name = tokens[tokens.length - 1];

                                results.add(name + ";" + file.getName() + ";" + result);

                                total++;

                                if (name.equals(result)) {
                                    matches++;
                                }
                                // Update screen to show the progress
                                final int counterPost = counter;
                                final int filesLength = files.length;
                                progress.post(new Runnable() {
                                    @Override
                                    public void run() {
                                        progress.append("Image " + counterPost + " of " + filesLength + " from " + name + "\n");
                                    }
                                });
                                counter++;
                            }
                        }
                    }
                    Date time_end = new Date();
                    long duration = time_end.getTime() - time_start.getTime();
                    int durationPerImage = (int) duration / total;
                    double accuracy = (double) matches / (double) total;
                    Map<String, ?> printMap = PreferenceManager.getDefaultSharedPreferences(getApplicationContext()).getAll();
                    fileHelper.saveResultsToFile(printMap, accuracy, durationPerImage, results);

                    final Intent intent = new Intent(getApplicationContext(), MainActivity.class);
                    intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
                    intent.putExtra("accuracy", accuracy);
                    handler.post(new Runnable() {
                        @Override
                        public void run() {
                            startActivity(intent);
                        }
                    });
                }
            } else {
                Thread.currentThread().interrupt();
            }
        }
    });
    thread.start();
}
 
Example #7
Source File: PreProcessor.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 4 votes vote down vote up
public void setAngle(int angle) {
    this.angle = angle;
    for (Mat img : images){
        MatOperation.rotate_90n(img, angle);
    }
}
 
Example #8
Source File: DetectionHelper.java    From ml-authentication with Apache License 2.0 4 votes vote down vote up
public static void drawArrowFromFaceToFrame(AnimalOverlay animalOverlay, Mat img, Rect face){
    Rect mirroredFace = MatOperation.getMirroredFaceForFrontCamera(img, face);
    Point pointFace = new Point(mirroredFace.tl().x + mirroredFace.width / 2, mirroredFace.tl().y + mirroredFace.height / 2);
    Point pointFrame = new Point(animalOverlay.getFrameStartX() + (animalOverlay.getFrameEndX() - animalOverlay.getFrameStartX()) / 2, animalOverlay.getFrameStartY() + (animalOverlay.getFrameEndY() - animalOverlay.getFrameStartY()) / 2);
    Imgproc.arrowedLine(img, pointFace, pointFrame, RED_COLOR, 20, Imgproc.LINE_8, 0, 0.2);
}
 
Example #9
Source File: AuthenticationActivity.java    From ml-authentication with Apache License 2.0 4 votes vote down vote up
@Override
    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        Mat imgRgba = inputFrame.rgba();

//        Do not change screen brightness manually during test phase, due to the unknown location of the different test users.
//        M.Schälchli 20170129
//        if (isDeviceRooted){
//            DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba);
//        }

        long currentTime = new Date().getTime();

        if ((!tensorFlowLoadingThread.isAlive()) && ((startTimeAuthenticationAnimation + AUTHENTICATION_ANIMATION_TIME) < currentTime)){
            prepareForAuthentication();

            if (!recognitionThread.isAlive() && recognitionThreadStarted) {
                List<Student> students = recognitionThread.getRecognizedStudent();
                Student student = new Student();
                if (students.size() == 1){
                    student = students.get(0);
                }
                numberOfTries++;
                Log.i(getClass().getName(), "Number of authentication/recognition tries: " + numberOfTries);
                if ((student != null) && (students.size() == 1)) {
                    AuthenticationHelper.updateCurrentStudent(student, getApplicationContext(), false);
                    finish();
                } else if (numberOfTries >= NUMBER_OF_MAXIMUM_TRIES) {
                    startStudentImageCollectionActivity(true);
                }
                recognitionThreadStarted = false;
            }

            Mat imgCopy = new Mat();

            // Store original image for face recognition
            imgRgba.copyTo(imgCopy);

            // Mirror front camera image
            Core.flip(imgRgba,imgRgba,1);

            Rect face = new Rect();
            boolean isFaceInsideFrame = false;
            boolean faceDetected = false;

            List<Mat> images = ppF.getCroppedImage(imgCopy);
            if (images != null && images.size() == 1){
                Mat img = images.get(0);
                if (img != null){
                    Rect[] faces = ppF.getFacesForRecognition();
                    if (faces != null && faces.length == 1){
                        faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                        face = faces[0];
                        faceDetected = true;
                        // Reset startTimeFallback for fallback timeout, because at least one face has been detected
                        startTimeFallback = currentTime;
                        isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face);

                        if (isFaceInsideFrame){
                            if (!recognitionThread.isAlive() && !recognitionThreadStarted){
                                if (!activityStopped){
                                    mediaPlayerAnimalSound.start();

                                    recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao);
                                    recognitionThread.setImg(img);
                                    recognitionThread.start();
                                    recognitionThreadStarted = true;
                                }
                            }
                        }
                    }
                }
            }

            if (faceDetected && !isFaceInsideFrame && !activityStopped){
                DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face);
                AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound);
            }

            if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){
                // Prevent from second execution of fallback activity because of threading
                startTimeFallback = currentTime;
                DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName());
                finish();
            }

            EnvironmentSettings.freeMemory();
        }

        return imgRgba;
    }
 
Example #10
Source File: StudentImageCollectionActivity.java    From ml-authentication with Apache License 2.0 4 votes vote down vote up
@Override
    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        final Mat imgRgba = inputFrame.rgba();

//        Do not change screen brightness manually during test phase, due to the unknown location of the different test users.
//        M.Schälchli 20170129
//        if (isDeviceRooted){
//            DetectionHelper.setIncreasedScreenBrightness(getApplicationContext(), imgRgba);
//        }

        long currentTime = new Date().getTime();

        if (authenticationAnimationAlreadyPlayed || ((startTimeAuthenticationAnimation + AuthenticationActivity.AUTHENTICATION_ANIMATION_TIME) < currentTime)){
            prepareForAuthentication();

            Mat imgCopy = new Mat();

            // Store original image for face recognition
            imgRgba.copyTo(imgCopy);

            // Mirror front camera image
            Core.flip(imgRgba,imgRgba,1);

            Rect face = new Rect();
            boolean isFaceInsideFrame = false;
            boolean faceDetected = false;

            if((lastTime + TIMER_DIFF) < currentTime){
                lastTime = currentTime;
                List<Mat> images = ppF.getCroppedImage(imgCopy);
                if((images != null) && (images.size() == 1)){
                    Mat img = images.get(0);
                    if(img != null) {
                        Rect[] faces = ppF.getFacesForRecognition();
                        if ((faces != null) && (faces.length == 1)) {
                            faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition());
                            face = faces[0];
                            faceDetected = true;
                            // Reset startTimeFallback for fallback timeout, because at least one face has been detected
                            startTimeFallback = currentTime;
                            isFaceInsideFrame = DetectionHelper.isFaceInsideFrame(animalOverlay, imgRgba, face);

                            if (isFaceInsideFrame){
                                if (!activityStopped){
                                    mediaPlayerAnimalSound.start();

                                    studentImages.add(img);

                                    // Stop after NUMBER_OF_IMAGES (settings option)
                                    if(imagesProcessed == NUMBER_OF_IMAGES){
                                        storeStudentImages();
                                        finish();
                                    }

                                    imagesProcessed++;
                                }
                            }
                        }
                    }
                }
            }

            if (DetectionHelper.shouldFallbackActivityBeStarted(startTimeFallback, currentTime)){
                // Prevent from second execution of fallback activity because of threading
                startTimeFallback = currentTime;
                DetectionHelper.startFallbackActivity(getApplicationContext(), getClass().getName());
                finish();
            }

            if (faceDetected && !isFaceInsideFrame && !activityStopped){
                DetectionHelper.drawArrowFromFaceToFrame(animalOverlay, imgRgba, face);
                AuthenticationInstructionHelper.playTabletPlacementOverlay(mediaPlayerTabletPlacement, mediaPlayerTabletPlacementOverlay, mediaPlayerAnimalSound);
            }

            EnvironmentSettings.freeMemory();
        }

        return imgRgba;
    }