com.google.android.gms.vision.face.FaceDetector Java Examples

The following examples show how to use com.google.android.gms.vision.face.FaceDetector. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: IsOperationalFunction.java    From face-detection-ane with Apache License 2.0 6 votes vote down vote up
@Override
public FREObject call( FREContext context, FREObject[] args ) {
	super.call( context, args );

	AIR.log( "FaceDetection::isOperational" );

	Activity activity = AIR.getContext().getActivity();

	FaceDetector.Builder fb = new FaceDetector.Builder( activity.getApplicationContext() );
	final FaceDetector detector = fb.build();
	try {
		return FREObject.newObject( detector.isOperational() );
	} catch( FREWrongThreadException e ) {
		e.printStackTrace();
	}

	return null;
}
 
Example #2
Source File: FaceOverlayView.java    From AndroidDemoProjects with Apache License 2.0 6 votes vote down vote up
public void setBitmap( Bitmap bitmap ) {
    mBitmap = bitmap;
    FaceDetector detector = new FaceDetector.Builder( getContext() )
            .setTrackingEnabled(true)
            .setLandmarkType(FaceDetector.ALL_LANDMARKS)
            .setMode(FaceDetector.ACCURATE_MODE)
            .build();

    if (!detector.isOperational()) {
        //Handle contingency
    } else {
        Frame frame = new Frame.Builder().setBitmap(bitmap).build();
        mFaces = detector.detect(frame);
        detector.release();
    }
    logFaceData();
    invalidate();
}
 
Example #3
Source File: GooglyEyesActivity.java    From android-vision with Apache License 2.0 6 votes vote down vote up
/**
 * Creates the face detector and the camera.
 */
private void createCameraSource() {
    Context context = getApplicationContext();
    FaceDetector detector = createFaceDetector(context);

    int facing = CameraSource.CAMERA_FACING_FRONT;
    if (!mIsFrontFacing) {
        facing = CameraSource.CAMERA_FACING_BACK;
    }

    // The camera source is initialized to use either the front or rear facing camera.  We use a
    // relatively low resolution for the camera preview, since this is sufficient for this app
    // and the face detector will run faster at lower camera resolutions.
    //
    // However, note that there is a speed/accuracy trade-off with respect to choosing the
    // camera resolution.  The face detector will run faster with lower camera resolutions,
    // but may miss smaller faces, landmarks, or may not correctly detect eyes open/closed in
    // comparison to using higher camera resolutions.  If you have any of these issues, you may
    // want to increase the resolution.
    mCameraSource = new CameraSource.Builder(context, detector)
            .setFacing(facing)
            .setRequestedPreviewSize(320, 240)
            .setRequestedFps(60.0f)
            .setAutoFocusEnabled(true)
            .build();
}
 
Example #4
Source File: FaceAnalyser.java    From UserAwareVideoView with Apache License 2.0 6 votes vote down vote up
/**
 * Create face decoder and camera source.
 */
private void creteCameraTracker() {
    mDetector = new FaceDetector.Builder(mActivity)
            .setTrackingEnabled(false)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .build();

    mDetector.setProcessor(
            new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
                    .build());

    if (!mDetector.isOperational()) {
        mUserAwareVideoView.onErrorOccurred();
        Log.e("Start Tracking", "Face tracker is not operational.");
    }

    mCameraSource = new CameraSource.Builder(mActivity, mDetector)
            .setRequestedPreviewSize(640, 480)
            .setFacing(CameraSource.CAMERA_FACING_FRONT)
            .setRequestedFps(30.0f)
            .build();
}
 
Example #5
Source File: ARFilterActivity.java    From Machine-Learning-Projects-for-Mobile-Applications with MIT License 5 votes vote down vote up
private void createCameraSourceBack() {
    previewFaceDetector = new FaceDetector.Builder(context)
            .setClassificationType(FaceDetector.NO_CLASSIFICATIONS)
            .setLandmarkType(FaceDetector.ALL_LANDMARKS)
            .setMode(FaceDetector.FAST_MODE)
            .setProminentFaceOnly(true)
            .setTrackingEnabled(true)
            .build();

    if(previewFaceDetector.isOperational()) {
        previewFaceDetector.setProcessor(new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory()).build());
    } else {
        Toast.makeText(context, "FACE DETECTION NOT AVAILABLE", Toast.LENGTH_SHORT).show();
    }

    if(useCamera2) {
        mCamera2Source = new Camera2Source.Builder(context, previewFaceDetector)
                .setFocusMode(Camera2Source.CAMERA_AF_AUTO)
                .setFlashMode(Camera2Source.CAMERA_FLASH_AUTO)
                .setFacing(Camera2Source.CAMERA_FACING_BACK)
                .build();

        //IF CAMERA2 HARDWARE LEVEL IS LEGACY, CAMERA2 IS NOT NATIVE.
        //WE WILL USE CAMERA1.
        if(mCamera2Source.isCamera2Native()) {
            startCameraSource();
        } else {
            useCamera2 = false;
            if(usingFrontCamera) createCameraSourceFront(); else createCameraSourceBack();
        }
    } else {
        mCameraSource = new CameraSource.Builder(context, previewFaceDetector)
                .setFacing(CameraSource.CAMERA_FACING_BACK)
                .setRequestedFps(30.0f)
                .build();

        startCameraSource();
    }
}
 
Example #6
Source File: FaceFilterActivity.java    From Android-face-filters with Apache License 2.0 5 votes vote down vote up
/**
 * Creates and starts the camera.  Note that this uses a higher resolution in comparison
 * to other detection examples to enable the barcode detector to detect small barcodes
 * at long distances.
 */
private void createCameraSource() {

    Context context = getApplicationContext();
    FaceDetector detector = new FaceDetector.Builder(context)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .setLandmarkType(FaceDetector.ALL_LANDMARKS)
            .setMode(FaceDetector.ACCURATE_MODE)
            .build();

    detector.setProcessor(
            new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
                    .build());

    //new MultiProcessor.Builder<>(new GraphicTextTrackerFactory()).build();

    if (!detector.isOperational()) {
        // Note: The first time that an app using face API is installed on a device, GMS will
        // download a native library to the device in order to do detection.  Usually this
        // completes before the app is run for the first time.  But if that download has not yet
        // completed, then the above call will not detect any faces.
        //
        // isOperational() can be used to check if the required native library is currently
        // available.  The detector will automatically become operational once the library
        // download completes on device.
        Log.w(TAG, "Face detector dependencies are not yet available.");
    }

    mCameraSource = new CameraSource.Builder(context, detector)
            .setRequestedPreviewSize(640, 480)
            .setAutoFocusEnabled(true)
            .setFacing(CameraSource.CAMERA_FACING_BACK)
            .setRequestedFps(30.0f)
            .build();
    //observer.start();
    /*
    TextGraphic mTextGraphic = new TextGraphic(mGraphicOverlay);
    mGraphicOverlay.add(mTextGraphic);
    mTextGraphic.updateText(2);*/
}
 
Example #7
Source File: FaceTrackerActivity.java    From android-vision with Apache License 2.0 5 votes vote down vote up
/**
 * Creates and starts the camera.  Note that this uses a higher resolution in comparison
 * to other detection examples to enable the barcode detector to detect small barcodes
 * at long distances.
 */
private void createCameraSource() {

    Context context = getApplicationContext();
    FaceDetector detector = new FaceDetector.Builder(context)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .build();

    detector.setProcessor(
            new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
                    .build());

    if (!detector.isOperational()) {
        // Note: The first time that an app using face API is installed on a device, GMS will
        // download a native library to the device in order to do detection.  Usually this
        // completes before the app is run for the first time.  But if that download has not yet
        // completed, then the above call will not detect any faces.
        //
        // isOperational() can be used to check if the required native library is currently
        // available.  The detector will automatically become operational once the library
        // download completes on device.
        Log.w(TAG, "Face detector dependencies are not yet available.");
    }

    mCameraSource = new CameraSource.Builder(context, detector)
            .setRequestedPreviewSize(640, 480)
            .setFacing(CameraSource.CAMERA_FACING_BACK)
            .setRequestedFps(30.0f)
            .build();
}
 
Example #8
Source File: GooglyFaceTracker.java    From android-vision with Apache License 2.0 5 votes vote down vote up
/**
 * Updates the positions and state of eyes to the underlying graphic, according to the most
 * recent face detection results.  The graphic will render the eyes and simulate the motion of
 * the iris based upon these changes over time.
 */
@Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
    mOverlay.add(mEyesGraphic);

    updatePreviousProportions(face);

    PointF leftPosition = getLandmarkPosition(face, Landmark.LEFT_EYE);
    PointF rightPosition = getLandmarkPosition(face, Landmark.RIGHT_EYE);

    float leftOpenScore = face.getIsLeftEyeOpenProbability();
    boolean isLeftOpen;
    if (leftOpenScore == Face.UNCOMPUTED_PROBABILITY) {
        isLeftOpen = mPreviousIsLeftOpen;
    } else {
        isLeftOpen = (leftOpenScore > EYE_CLOSED_THRESHOLD);
        mPreviousIsLeftOpen = isLeftOpen;
    }

    float rightOpenScore = face.getIsRightEyeOpenProbability();
    boolean isRightOpen;
    if (rightOpenScore == Face.UNCOMPUTED_PROBABILITY) {
        isRightOpen = mPreviousIsRightOpen;
    } else {
        isRightOpen = (rightOpenScore > EYE_CLOSED_THRESHOLD);
        mPreviousIsRightOpen = isRightOpen;
    }

    mEyesGraphic.updateEyes(leftPosition, isLeftOpen, rightPosition, isRightOpen);
}
 
Example #9
Source File: FaceAnalyser.java    From Prevent-Screen-Off with Apache License 2.0 5 votes vote down vote up
/**
 * Update the position/characteristics of the face within the overlay.
 */
@Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
    Log.d(getClass().getSimpleName(), "onUpdate" + face.getIsLeftEyeOpenProbability());

    if (face.getIsLeftEyeOpenProbability() > 0.10 && face.getIsRightEyeOpenProbability() > 0.10) {
        isEyesClosedCount = 0;

       mWakelockManager.acquireWakelock();
    } else {
        isEyesClosedCount++;

        if (isEyesClosedCount > 2) mWakelockManager.releaseWakelock();
    }
}
 
Example #10
Source File: FaceAnalyser.java    From Prevent-Screen-Off with Apache License 2.0 5 votes vote down vote up
/**
 * Create the {@link FaceDetector} and initialize the {@link CameraSourcePreview}. To start eye tracking you
 * should call {@link #startEyeTracker()} directly. This will call this method internally.
 */
private void creteCameraTracker() {
    //check for the camera permission
    if (ActivityCompat.checkSelfPermission(mActivity, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
        mScreenListener.onErrorOccurred(Errors.CAMERA_PERMISSION_NOT_AVAILABLE);
        return;
    }

    //check if the front camera is available?
    if (!isFrontCameraAvailable()) {
        mScreenListener.onErrorOccurred(Errors.FRONT_CAMERA_NOT_AVAILABLE);
        return;
    }

    mDetector = new FaceDetector.Builder(mActivity)
            .setTrackingEnabled(false)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .build();
    mDetector.setProcessor(new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
            .build());

    //The decoder is not operational
    if (!mDetector.isOperational()) {
        mScreenListener.onErrorOccurred(Errors.UNDEFINED);
        return;
    }

    mCameraSource = new CameraSource.Builder(mActivity, mDetector)
            .setRequestedPreviewSize(640, 480)
            .setFacing(CameraSource.CAMERA_FACING_FRONT)
            .setRequestedFps(30.0f)
            .build();
}
 
Example #11
Source File: FaceAnalyser.java    From UserAwareVideoView with Apache License 2.0 5 votes vote down vote up
/**
 * When new frame analysed.
 */
@Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
    Log.d("FaceTracker", "onUpdate" + face.getIsLeftEyeOpenProbability());

    //if left and right eyes are open. (Probability more than 10%)
    if (face.getIsLeftEyeOpenProbability() > 0.10 && face.getIsRightEyeOpenProbability() > 0.10) {
        isEyesClosedCount = 0;
        mUserAwareVideoView.onUserAttentionAvailable();
    } else {
        isEyesClosedCount++;
        if (isEyesClosedCount > 2) mUserAwareVideoView.onUserAttentionGone();
    }
}
 
Example #12
Source File: FaceCaptureActivity.java    From flutter_mobile_vision with MIT License 5 votes vote down vote up
@SuppressLint("InlinedApi")
protected void createCameraSource() throws MobileVisionException {
    Context context = getApplicationContext();

    // TODO: Verify attributes.
    FaceDetector faceDetector = new FaceDetector.Builder(context)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .build();

    FaceTrackerFactory faceTrackerFactory = new FaceTrackerFactory(graphicOverlay, showText);

    faceDetector.setProcessor(
            new MultiProcessor.Builder<>(faceTrackerFactory).build());

    if (!faceDetector.isOperational()) {
        IntentFilter lowStorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW);
        boolean hasLowStorage = registerReceiver(null, lowStorageFilter) != null;

        if (hasLowStorage) {
            throw new MobileVisionException("Low Storage.");
        }
    }

    cameraSource = new CameraSource
            .Builder(getApplicationContext(), faceDetector)
            .setFacing(camera)
            .setRequestedPreviewSize(previewWidth, previewHeight)
            .setFocusMode(autoFocus ? Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE : null)
            .setFlashMode(useFlash ? Camera.Parameters.FLASH_MODE_TORCH : null)
            .setRequestedFps(fps)
            .build();
}
 
Example #13
Source File: FaceRecognition.java    From MagicalCamera with Apache License 2.0 5 votes vote down vote up
/***
 * This method realize the face detection, and this call in another methods
 * for automatice the process
 * @param stroke the bold of line to show around the face
 * @param color the color of rectangle to recognizer the face
 * @param activity the currect activity
 * @param photo your photo
 * @return
 */
private Bitmap faceDetection(int stroke, int color, Activity activity, Bitmap photo){
     this.detector = new FaceDetector.Builder(activity)
            .setMode(FaceDetector.ACCURATE_MODE)
            .setLandmarkType(FaceDetector.ALL_LANDMARKS)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .setTrackingEnabled(false)
            .build();
        try {
            if (false == this.detector.isOperational()) {
                return null;
            }

            //Add the image on a Frame object
            Frame frame = new Frame.Builder()
                    .setBitmap(photo)
                    .build();

            //Detect all faces from Frame object
            SparseArray<Face> faceArray = detector.detect(frame);

            //Do some drawing on faces
            Bitmap outBitmap = drawOnFace(faceArray, photo, stroke, color);

            //Releasing the detector object
            this.detector.release();
            return (outBitmap != null) ? outBitmap : photo;
        }catch(Exception ev){
            return null;
        }
}
 
Example #14
Source File: AlbumActivity.java    From medialibrary with Apache License 2.0 5 votes vote down vote up
@Override
public FaceDetector getFaceDetector() {
    if (mFaceDetector == null) {
        mFaceDetector = new FaceDetector.Builder(this)
                .setTrackingEnabled(true)
                .build();
    }
    return mFaceDetector;
}
 
Example #15
Source File: ARFilterActivity.java    From Machine-Learning-Projects-for-Mobile-Applications with MIT License 5 votes vote down vote up
private void createCameraSourceFront() {
        previewFaceDetector = new FaceDetector.Builder(context)
                .setClassificationType(FaceDetector.NO_CLASSIFICATIONS)
                .setLandmarkType(FaceDetector.ALL_LANDMARKS)
                .setMode(FaceDetector.FAST_MODE)
                .setProminentFaceOnly(usingFrontCamera)
                .setTrackingEnabled(true)
                .setMinFaceSize(usingFrontCamera?0.35f : 0.15f)
                .build();

        if(previewFaceDetector.isOperational()) {
            previewFaceDetector.setProcessor(new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory()).build());
        } else {
            Toast.makeText(context, "FACE DETECTION NOT AVAILABLE", Toast.LENGTH_SHORT).show();
        }
        Log.e(TAG, "createCameraSourceFront: "+useCamera2 );
        if(useCamera2) {
            mCamera2Source = new Camera2Source.Builder(context, previewFaceDetector)
                    .setFocusMode(Camera2Source.CAMERA_AF_AUTO)
                    .setFlashMode(Camera2Source.CAMERA_FLASH_AUTO)
                    .setFacing(Camera2Source.CAMERA_FACING_FRONT)
                    .build();
            startCameraSource();
            //IF CAMERA2 HARDWARE LEVEL IS LEGACY, CAMERA2 IS NOT NATIVE.
            //WE WILL USE CAMERA1.
//            if(mCamera2Source.isCamera2Native()) {
//                startCameraSource();
//            } else {
//                useCamera2 = false;
//                if(usingFrontCamera) createCameraSourceFront(); else createCameraSourceBack();
//            }
        } else {
            mCameraSource = new CameraSource.Builder(context, previewFaceDetector)
                    .setFacing(CameraSource.CAMERA_FACING_FRONT)
                    .setRequestedFps(30.0f)
                    .build();

            startCameraSource();
        }
    }
 
Example #16
Source File: MainActivity.java    From Camera2Vision with Apache License 2.0 5 votes vote down vote up
private void createCameraSourceBack() {
    previewFaceDetector = new FaceDetector.Builder(context)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .setLandmarkType(FaceDetector.ALL_LANDMARKS)
            .setMode(FaceDetector.FAST_MODE)
            .setProminentFaceOnly(true)
            .setTrackingEnabled(true)
            .build();

    if(previewFaceDetector.isOperational()) {
        previewFaceDetector.setProcessor(new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory()).build());
    } else {
        Toast.makeText(context, "FACE DETECTION NOT AVAILABLE", Toast.LENGTH_SHORT).show();
    }

    if(useCamera2) {
        mCamera2Source = new Camera2Source.Builder(context, previewFaceDetector)
                .setFocusMode(Camera2Source.CAMERA_AF_AUTO)
                .setFlashMode(Camera2Source.CAMERA_FLASH_AUTO)
                .setFacing(Camera2Source.CAMERA_FACING_BACK)
                .build();

        //IF CAMERA2 HARDWARE LEVEL IS LEGACY, CAMERA2 IS NOT NATIVE.
        //WE WILL USE CAMERA1.
        if(mCamera2Source.isCamera2Native()) {
            startCameraSource();
        } else {
            useCamera2 = false;
            if(usingFrontCamera) createCameraSourceFront(); else createCameraSourceBack();
        }
    } else {
        mCameraSource = new CameraSource.Builder(context, previewFaceDetector)
                .setFacing(CameraSource.CAMERA_FACING_BACK)
                .setRequestedFps(30.0f)
                .build();

        startCameraSource();
    }
}
 
Example #17
Source File: MainActivity.java    From Camera2Vision with Apache License 2.0 5 votes vote down vote up
private void createCameraSourceFront() {
    previewFaceDetector = new FaceDetector.Builder(context)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .setLandmarkType(FaceDetector.ALL_LANDMARKS)
            .setMode(FaceDetector.FAST_MODE)
            .setProminentFaceOnly(true)
            .setTrackingEnabled(true)
            .build();

    if(previewFaceDetector.isOperational()) {
        previewFaceDetector.setProcessor(new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory()).build());
    } else {
        Toast.makeText(context, "FACE DETECTION NOT AVAILABLE", Toast.LENGTH_SHORT).show();
    }

    if(useCamera2) {
        mCamera2Source = new Camera2Source.Builder(context, previewFaceDetector)
                .setFocusMode(Camera2Source.CAMERA_AF_AUTO)
                .setFlashMode(Camera2Source.CAMERA_FLASH_AUTO)
                .setFacing(Camera2Source.CAMERA_FACING_FRONT)
                .build();

        //IF CAMERA2 HARDWARE LEVEL IS LEGACY, CAMERA2 IS NOT NATIVE.
        //WE WILL USE CAMERA1.
        if(mCamera2Source.isCamera2Native()) {
            startCameraSource();
        } else {
            useCamera2 = false;
            if(usingFrontCamera) createCameraSourceFront(); else createCameraSourceBack();
        }
    } else {
        mCameraSource = new CameraSource.Builder(context, previewFaceDetector)
                .setFacing(CameraSource.CAMERA_FACING_FRONT)
                .setRequestedFps(30.0f)
                .build();

        startCameraSource();
    }
}
 
Example #18
Source File: FaceFilterActivity.java    From FaceFilter with MIT License 5 votes vote down vote up
/**
 * Creates and starts the camera.  Note that this uses a higher resolution in comparison
 * to other detection examples to enable the barcode detector to detect small barcodes
 * at long distances.
 */
private void createCameraSource() {

    Context context = getApplicationContext();
    FaceDetector detector = new FaceDetector.Builder(context)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .setLandmarkType(FaceDetector.ALL_LANDMARKS)
            .setMode(FaceDetector.ACCURATE_MODE)
            .build();

    detector.setProcessor(
            new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
                    .build());

    if (!detector.isOperational()) {
        // Note: The first time that an app using face API is installed on a device, GMS will
        // download a native library to the device in order to do detection.  Usually this
        // completes before the app is run for the first time.  But if that download has not yet
        // completed, then the above call will not detect any faces.
        //
        // isOperational() can be used to check if the required native library is currently
        // available.  The detector will automatically become operational once the library
        // download completes on device.
        Log.w(TAG, "Face detector dependencies are not yet available.");
    }

    mCameraSource = new CameraSource.Builder(context, detector)
            .setRequestedPreviewSize(640, 480)
            .setFacing(CameraSource.CAMERA_FACING_FRONT)
            .setRequestedFps(30.0f)
            .build();
}
 
Example #19
Source File: MainActivity.java    From Camera2Vision with Apache License 2.0 4 votes vote down vote up
/**
 * Update the position/characteristics of the face within the overlay.
 */
@Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
    mOverlay.add(mFaceGraphic);
    mFaceGraphic.updateFace(face);
}
 
Example #20
Source File: MoodModule.java    From HomeMirror with Apache License 2.0 4 votes vote down vote up
/**
 * Creates and starts the camera.  Note that this uses a higher resolution in comparison
 * to other detection examples to enable the barcode detector to detect small barcodes
 * at long distances.
 */
private void createCameraSource() {

    Context context = mContextWeakReference.get();
    FaceDetector detector = new FaceDetector.Builder(context)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .build();

    detector.setProcessor(new Detector.Processor<Face>() {
        @Override
        public void release() {

        }

        @Override
        public void receiveDetections(final Detector.Detections<Face> detections) {
            final SparseArray<Face> detectedItems = detections.getDetectedItems();
            if (detectedItems.size() != 0) {
                final int key = detectedItems.keyAt(0);
                final Face face = detectedItems.get(key);
                final float isSmilingProbability = face.getIsSmilingProbability();
                String feedback = getFeedbackForSmileProbability(isSmilingProbability);
                mCallBacks.onShouldGivePositiveAffirmation(feedback);
            }
        }
    });

    if (!detector.isOperational()) {
        // Note: The first time that an app using face API is installed on a device, GMS will
        // download a native library to the device in order to do detection.  Usually this
        // completes before the app is run for the first time.  But if that download has not yet
        // completed, then the above call will not detect any faces.
        //
        // isOperational() can be used to check if the required native library is currently
        // available.  The detector will automatically become operational once the library
        // download completes on device.
        Log.w(TAG, "Face detector dependencies are not yet available.");
    }

    try {
        mCameraSource = new CameraSource.Builder(context, detector)
                .setRequestedPreviewSize(640, 480)
                .setFacing(CameraSource.CAMERA_FACING_FRONT)
                .setRequestedFps(30.0f)
                .build();

        mCameraSource.start();
    } catch (IOException | RuntimeException e) {
        Log.e(TAG, "Something went horribly wrong, with your face.", e);
    }
}
 
Example #21
Source File: FaceTrackerActivity.java    From android-vision with Apache License 2.0 4 votes vote down vote up
/**
 * Update the position/characteristics of the face within the overlay.
 */
@Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
    mOverlay.add(mFaceGraphic);
    mFaceGraphic.updateFace(face);
}
 
Example #22
Source File: ARFilterActivity.java    From Machine-Learning-Projects-for-Mobile-Applications with MIT License 4 votes vote down vote up
@Override
public void onUpdate(FaceDetector.Detections detectionResults, Face face) {
    mOverlay.add(mFaceGraphic);
    updatePreviousLandmarkPositions(face);

    // Get head angles.
    mFaceData.setEulerY(face.getEulerY());
    mFaceData.setEulerZ(face.getEulerZ());

    // Get face dimensions.
    mFaceData.setPosition(face.getPosition());
    mFaceData.setWidth(face.getWidth());
    mFaceData.setHeight(face.getHeight());

    // Get the positions of facial landmarks.
    mFaceData.setLeftEyePosition(getLandmarkPosition(face, Landmark.LEFT_EYE));
    mFaceData.setRightEyePosition(getLandmarkPosition(face, Landmark.RIGHT_EYE));
    mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.LEFT_CHEEK));
    mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.RIGHT_CHEEK));
    mFaceData.setNoseBasePosition(getLandmarkPosition(face, Landmark.NOSE_BASE));
    mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.LEFT_EAR));
    mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.LEFT_EAR_TIP));
    mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.RIGHT_EAR));
    mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.RIGHT_EAR_TIP));
    mFaceData.setMouthLeftPosition(getLandmarkPosition(face, Landmark.LEFT_MOUTH));
    mFaceData.setMouthBottomPosition(getLandmarkPosition(face, Landmark.BOTTOM_MOUTH));
    mFaceData.setMouthRightPosition(getLandmarkPosition(face, Landmark.RIGHT_MOUTH));

    // 1
    final float EYE_CLOSED_THRESHOLD = 0.4f;
    float leftOpenScore = face.getIsLeftEyeOpenProbability();
    if (leftOpenScore == Face.UNCOMPUTED_PROBABILITY) {
        mFaceData.setLeftEyeOpen(mPreviousIsLeftEyeOpen);
    } else {
        mFaceData.setLeftEyeOpen(leftOpenScore > EYE_CLOSED_THRESHOLD);
        mPreviousIsLeftEyeOpen = mFaceData.isLeftEyeOpen();
    }
    float rightOpenScore = face.getIsRightEyeOpenProbability();
    if (rightOpenScore == Face.UNCOMPUTED_PROBABILITY) {
        mFaceData.setRightEyeOpen(mPreviousIsRightEyeOpen);
    } else {
        mFaceData.setRightEyeOpen(rightOpenScore > EYE_CLOSED_THRESHOLD);
        mPreviousIsRightEyeOpen = mFaceData.isRightEyeOpen();
    }

    // 2
    // See if there's a smile!
    // Determine if person is smiling.
    final float SMILING_THRESHOLD = 0.8f;
    mFaceData.setSmiling(face.getIsSmilingProbability() > SMILING_THRESHOLD);

    mFaceGraphic.update(mFaceData);
}
 
Example #23
Source File: MultiTrackerActivity.java    From android-vision with Apache License 2.0 4 votes vote down vote up
/**
 * Creates and starts the camera.  Note that this uses a higher resolution in comparison
 * to other detection examples to enable the barcode detector to detect small barcodes
 * at long distances.
 */
private void createCameraSource() {


    Context context = getApplicationContext();

    // A face detector is created to track faces.  An associated multi-processor instance
    // is set to receive the face detection results, track the faces, and maintain graphics for
    // each face on screen.  The factory is used by the multi-processor to create a separate
    // tracker instance for each face.
    FaceDetector faceDetector = new FaceDetector.Builder(context).build();
    FaceTrackerFactory faceFactory = new FaceTrackerFactory(mGraphicOverlay);
    faceDetector.setProcessor(
            new MultiProcessor.Builder<>(faceFactory).build());

    // A barcode detector is created to track barcodes.  An associated multi-processor instance
    // is set to receive the barcode detection results, track the barcodes, and maintain
    // graphics for each barcode on screen.  The factory is used by the multi-processor to
    // create a separate tracker instance for each barcode.
    BarcodeDetector barcodeDetector = new BarcodeDetector.Builder(context).build();
    BarcodeTrackerFactory barcodeFactory = new BarcodeTrackerFactory(mGraphicOverlay);
    barcodeDetector.setProcessor(
            new MultiProcessor.Builder<>(barcodeFactory).build());

    // A multi-detector groups the two detectors together as one detector.  All images received
    // by this detector from the camera will be sent to each of the underlying detectors, which
    // will each do face and barcode detection, respectively.  The detection results from each
    // are then sent to associated tracker instances which maintain per-item graphics on the
    // screen.
    MultiDetector multiDetector = new MultiDetector.Builder()
            .add(faceDetector)
            .add(barcodeDetector)
            .build();

    if (!multiDetector.isOperational()) {
        // Note: The first time that an app using the barcode or face API is installed on a
        // device, GMS will download a native libraries to the device in order to do detection.
        // Usually this completes before the app is run for the first time.  But if that
        // download has not yet completed, then the above call will not detect any barcodes
        // and/or faces.
        //
        // isOperational() can be used to check if the required native libraries are currently
        // available.  The detectors will automatically become operational once the library
        // downloads complete on device.
        Log.w(TAG, "Detector dependencies are not yet available.");

        // Check for low storage.  If there is low storage, the native library will not be
        // downloaded, so detection will not become operational.
        IntentFilter lowstorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW);
        boolean hasLowStorage = registerReceiver(null, lowstorageFilter) != null;

        if (hasLowStorage) {
            Toast.makeText(this, R.string.low_storage_error, Toast.LENGTH_LONG).show();
            Log.w(TAG, getString(R.string.low_storage_error));
        }
    }

    // Creates and starts the camera.  Note that this uses a higher resolution in comparison
    // to other detection examples to enable the barcode detector to detect small barcodes
    // at long distances.
    mCameraSource = new CameraSource.Builder(getApplicationContext(), multiDetector)
            .setFacing(CameraSource.CAMERA_FACING_BACK)
            .setRequestedPreviewSize(1600, 1024)
            .setRequestedFps(15.0f)
            .build();
}
 
Example #24
Source File: PhotoViewerActivity.java    From android-vision with Apache License 2.0 4 votes vote down vote up
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_photo_viewer);

    InputStream stream = getResources().openRawResource(R.raw.face);
    Bitmap bitmap = BitmapFactory.decodeStream(stream);

    // A new face detector is created for detecting the face and its landmarks.
    //
    // Setting "tracking enabled" to false is recommended for detection with unrelated
    // individual images (as opposed to video or a series of consecutively captured still
    // images).  For detection on unrelated individual images, this will give a more accurate
    // result.  For detection on consecutive images (e.g., live video), tracking gives a more
    // accurate (and faster) result.
    //
    // By default, landmark detection is not enabled since it increases detection time.  We
    // enable it here in order to visualize detected landmarks.
    FaceDetector detector = new FaceDetector.Builder(getApplicationContext())
            .setTrackingEnabled(false)
            .setLandmarkType(FaceDetector.ALL_LANDMARKS)
            .build();

    // This is a temporary workaround for a bug in the face detector with respect to operating
    // on very small images.  This will be fixed in a future release.  But in the near term, use
    // of the SafeFaceDetector class will patch the issue.
    Detector<Face> safeDetector = new SafeFaceDetector(detector);

    // Create a frame from the bitmap and run face detection on the frame.
    Frame frame = new Frame.Builder().setBitmap(bitmap).build();
    SparseArray<Face> faces = safeDetector.detect(frame);

    if (!safeDetector.isOperational()) {
        // Note: The first time that an app using face API is installed on a device, GMS will
        // download a native library to the device in order to do detection.  Usually this
        // completes before the app is run for the first time.  But if that download has not yet
        // completed, then the above call will not detect any faces.
        //
        // isOperational() can be used to check if the required native library is currently
        // available.  The detector will automatically become operational once the library
        // download completes on device.
        Log.w(TAG, "Face detector dependencies are not yet available.");

        // Check for low storage.  If there is low storage, the native library will not be
        // downloaded, so detection will not become operational.
        IntentFilter lowstorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW);
        boolean hasLowStorage = registerReceiver(null, lowstorageFilter) != null;

        if (hasLowStorage) {
            Toast.makeText(this, R.string.low_storage_error, Toast.LENGTH_LONG).show();
            Log.w(TAG, getString(R.string.low_storage_error));
        }
    }

    FaceView overlay = (FaceView) findViewById(R.id.faceView);
    overlay.setContent(bitmap, faces);

    // Although detector may be used multiple times for different images, it should be released
    // when it is no longer needed in order to free native resources.
    safeDetector.release();
}
 
Example #25
Source File: FaceFilterActivity.java    From Android-face-filters with Apache License 2.0 4 votes vote down vote up
/**
 * Update the position/characteristics of the face within the overlay.
 */
@Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
    mOverlay.add(mFaceGraphic);
    mFaceGraphic.updateFace(face,typeFace);
}
 
Example #26
Source File: GooglyEyesActivity.java    From android-vision with Apache License 2.0 4 votes vote down vote up
/**
 * Creates the face detector and associated processing pipeline to support either front facing
 * mode or rear facing mode.  Checks if the detector is ready to use, and displays a low storage
 * warning if it was not possible to download the face library.
 */
@NonNull
private FaceDetector createFaceDetector(Context context) {
    // For both front facing and rear facing modes, the detector is initialized to do landmark
    // detection (to find the eyes), classification (to determine if the eyes are open), and
    // tracking.
    //
    // Use of "fast mode" enables faster detection for frontward faces, at the expense of not
    // attempting to detect faces at more varied angles (e.g., faces in profile).  Therefore,
    // faces that are turned too far won't be detected under fast mode.
    //
    // For front facing mode only, the detector will use the "prominent face only" setting,
    // which is optimized for tracking a single relatively large face.  This setting allows the
    // detector to take some shortcuts to make tracking faster, at the expense of not being able
    // to track multiple faces.
    //
    // Setting the minimum face size not only controls how large faces must be in order to be
    // detected, it also affects performance.  Since it takes longer to scan for smaller faces,
    // we increase the minimum face size for the rear facing mode a little bit in order to make
    // tracking faster (at the expense of missing smaller faces).  But this optimization is less
    // important for the front facing case, because when "prominent face only" is enabled, the
    // detector stops scanning for faces after it has found the first (large) face.
    FaceDetector detector = new FaceDetector.Builder(context)
            .setLandmarkType(FaceDetector.ALL_LANDMARKS)
            .setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
            .setTrackingEnabled(true)
            .setMode(FaceDetector.FAST_MODE)
            .setProminentFaceOnly(mIsFrontFacing)
            .setMinFaceSize(mIsFrontFacing ? 0.35f : 0.15f)
            .build();

    Detector.Processor<Face> processor;
    if (mIsFrontFacing) {
        // For front facing mode, a single tracker instance is used with an associated focusing
        // processor.  This configuration allows the face detector to take some shortcuts to
        // speed up detection, in that it can quit after finding a single face and can assume
        // that the nextIrisPosition face position is usually relatively close to the last seen
        // face position.
        Tracker<Face> tracker = new GooglyFaceTracker(mGraphicOverlay);
        processor = new LargestFaceFocusingProcessor.Builder(detector, tracker).build();
    } else {
        // For rear facing mode, a factory is used to create per-face tracker instances.  A
        // tracker is created for each face and is maintained as long as the same face is
        // visible, enabling per-face state to be maintained over time.  This is used to store
        // the iris position and velocity for each face independently, simulating the motion of
        // the eyes of any number of faces over time.
        //
        // Both the front facing mode and the rear facing mode use the same tracker
        // implementation, avoiding the need for any additional code.  The only difference
        // between these cases is the choice of Processor: one that is specialized for tracking
        // a single face or one that can handle multiple faces.  Here, we use MultiProcessor,
        // which is a standard component of the mobile vision API for managing multiple items.
        MultiProcessor.Factory<Face> factory = new MultiProcessor.Factory<Face>() {
            @Override
            public Tracker<Face> create(Face face) {
                return new GooglyFaceTracker(mGraphicOverlay);
            }
        };
        processor = new MultiProcessor.Builder<>(factory).build();
    }

    detector.setProcessor(processor);

    if (!detector.isOperational()) {
        // Note: The first time that an app using face API is installed on a device, GMS will
        // download a native library to the device in order to do detection.  Usually this
        // completes before the app is run for the first time.  But if that download has not yet
        // completed, then the above call will not detect any faces.
        //
        // isOperational() can be used to check if the required native library is currently
        // available.  The detector will automatically become operational once the library
        // download completes on device.
        Log.w(TAG, "Face detector dependencies are not yet available.");

        // Check for low storage.  If there is low storage, the native library will not be
        // downloaded, so detection will not become operational.
        IntentFilter lowStorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW);
        boolean hasLowStorage = registerReceiver(null, lowStorageFilter) != null;

        if (hasLowStorage) {
            Toast.makeText(this, R.string.low_storage_error, Toast.LENGTH_LONG).show();
            Log.w(TAG, getString(R.string.low_storage_error));
        }
    }
    return detector;
}
 
Example #27
Source File: ARFilterActivity.java    From Machine-Learning-Projects-for-Mobile-Applications with MIT License 4 votes vote down vote up
@Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
    mOverlay.remove(mFaceGraphic);
}
 
Example #28
Source File: FaceFilterActivity.java    From FaceFilter with MIT License 4 votes vote down vote up
/**
 * Update the position/characteristics of the face within the overlay.
 */
@Override
public void onUpdate(FaceDetector.Detections<Face> detectionResults, Face face) {
    mOverlay.add(mFaceGraphic);
    mFaceGraphic.updateFace(face);
}
 
Example #29
Source File: DetectFacesFunction.java    From face-detection-ane with Apache License 2.0 4 votes vote down vote up
@Override
public FREObject call( FREContext context, FREObject[] args ) {
	super.call( context, args );

	AIR.log( "FaceDetection::detect" );

	final int callbackId = FREObjectUtils.getInt( args[1] );
	final Bitmap image;
	try {
		image = BitmapDataUtils.getBitmap( (FREBitmapData) args[0] );
	} catch( Exception e ) {
		e.printStackTrace();
		AIR.log( "Error creating Bitmap out of FREBitmapData" );
		AIR.dispatchEvent(
				FaceDetectionEvent.FACE_DETECTION_ERROR,
				StringUtils.getEventErrorJSON( callbackId, "Error creating Bitmap out of FREBitmapData" )
		);
		return null;
	}
	/* Mode (accuracy) */
	final int accuracy = FREObjectUtils.getInt( args[2] ); // Comes in as a ready-to-use value
	boolean detectOpenEyes = FREObjectUtils.getBoolean( args[3] );
	boolean detectSmile = FREObjectUtils.getBoolean( args[4] );
	final boolean prominentFaceOnly = FREObjectUtils.getBoolean( args[5] );
	/* Classification type (detect open eyes, detect smile) */
	final int classification = (detectOpenEyes || detectSmile) ? FaceDetector.ALL_CLASSIFICATIONS : FaceDetector.NO_CLASSIFICATIONS;

	final Activity activity = AIR.getContext().getActivity();

	new Thread(
			new Runnable() {
				@Override
				public void run() {
					AIR.log( "Running FaceDetection in new thread" );
					FaceDetector.Builder fb = new FaceDetector.Builder( activity.getApplicationContext() );
					fb.setClassificationType( classification )
							.setMode( accuracy )
							.setTrackingEnabled( false )
							.setLandmarkType( FaceDetector.ALL_LANDMARKS ) // We want to know about eye/mouth positions
							.setProminentFaceOnly( prominentFaceOnly );

					/* Wrap the detector in SafeFaceDetector */
					final FaceDetector detector = fb.build();
					Detector<Face> sd = new SafeFaceDetector( detector );
					if( !sd.isOperational() ) {
						sd.release();
						AIR.log( "Error, detector is not operational." );
						AIR.dispatchEvent(
                                   FaceDetectionEvent.FACE_DETECTION_ERROR,
                                   StringUtils.getEventErrorJSON( -1, "Detector is not operational. Dependencies may have not been downloaded yet. Please, try again later." )
                           );
						return;
					}

					/* Create Frame with bitmap */
					final Frame frame = new Frame.Builder().setBitmap( image ).build();
					SparseArray<Face> faces = sd.detect( frame );

					/* Build faces JSONArray */
					JSONArray facesResult = getFacesJSONArray( faces );
					dispatchResponse( facesResult, callbackId );

					sd.release();
				}
			}
	).start();

	return null;
}
 
Example #30
Source File: MainActivity.java    From Camera2Vision with Apache License 2.0 4 votes vote down vote up
/**
 * Hide the graphic when the corresponding face was not detected.  This can happen for
 * intermediate frames temporarily (e.g., if the face was momentarily blocked from
 * view).
 */
@Override
public void onMissing(FaceDetector.Detections<Face> detectionResults) {
    mFaceGraphic.goneFace();
    mOverlay.remove(mFaceGraphic);
}