android.media.FaceDetector Java Examples

The following examples show how to use android.media.FaceDetector. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BitmapUtil.java    From haxsync with GNU General Public License v2.0 6 votes vote down vote up
private static PointF findFaceMid(Bitmap in){
	PointF mid = new PointF();
	Bitmap bitmap565 = in.copy(Bitmap.Config.RGB_565, true);

	FaceDetector fd = new FaceDetector(in.getWidth(), in.getHeight(), 1);
	FaceDetector.Face[] faces = new FaceDetector.Face[1];
	fd.findFaces(bitmap565, faces);
	

	FaceDetector.Face face = faces[0];
	if (face != null){
		try{
			face.getMidPoint(mid);
			return mid;
		} catch (NullPointerException n){}
	}
	return null;

}
 
Example #2
Source File: FaceUtils.java    From RairDemo with Apache License 2.0 5 votes vote down vote up
public static Bitmap detectionFace(Bitmap b) {
    // 检测前必须转化为RGB_565格式。文末有详述连接
    Bitmap bitmap = b.copy(Bitmap.Config.RGB_565, true);
    b.recycle();
    // 设定最大可查的人脸数量
    int MAX_FACES = 5;
    FaceDetector faceDet = new FaceDetector(bitmap.getWidth(), bitmap.getHeight(), MAX_FACES);
    // 将人脸数据存储到faceArray 中
    FaceDetector.Face[] faceArray = new FaceDetector.Face[MAX_FACES];
    // 返回找到图片中人脸的数量,同时把返回的脸部位置信息放到faceArray中,过程耗时
    int findFaceCount = faceDet.findFaces(bitmap, faceArray);
    // 获取传回的脸部数组中的第一张脸的信息
    FaceDetector.Face face1 = faceArray[0];
    // 获取双眼的中心点,用一个PointF来接收其x、y坐标
    PointF point = new PointF();
    face1.getMidPoint(point);
    // 获取该部位为人脸的可信度,0~1
    float confidence = face1.confidence();
    // 获取双眼间距
    float eyesDistance = face1.eyesDistance();
    // 获取面部姿势
    // 传入X则获取到x方向上的角度,传入Y则获取到y方向上的角度,传入Z则获取到z方向上的角度
    float angle = face1.pose(FaceDetector.Face.EULER_X);

    // todo 在bitmap上绘制一个Rect框住脸,因为返回的是眼睛位置,所以还要做一些处理

    return bitmap;
}
 
Example #3
Source File: CropImageActivity.java    From YiBo with Apache License 2.0 5 votes vote down vote up
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(), faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.getHighlightViews().size() == 1) {
                mCrop = mImageView.getHighlightViews().get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
                // CR: no need for the variable t. just do
                // Toast.makeText(...).show().
                Toast t = Toast.makeText(CropImageActivity.this, "Choose One", Toast.LENGTH_SHORT);
                t.show();
            }
        }
    });
}
 
Example #4
Source File: CropImageActivity.java    From YiBo with Apache License 2.0 5 votes vote down vote up
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();

    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right, faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom, faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop, mAspectX != 0 && mAspectY != 0);

    mImageView.add(hv);
}
 
Example #5
Source File: CameraSurfaceView.java    From Android-Screen-to-Face-Distance-Measurement with MIT License 5 votes vote down vote up
private void updateMeasurement(final FaceDetector.Face currentFace) {
	if (currentFace == null) {
		// _facesFoundInMeasurement--;
		return;
	}

	_foundFace = _currentFaceDetectionThread.getCurrentFace();

	_points.add(new Point(_foundFace.eyesDistance(),
			CALIBRATION_DISTANCE_A4_MM
					* (_distanceAtCalibrationPoint / _foundFace
							.eyesDistance())));

	while (_points.size() > _threashold) {
		_points.remove(0);
	}

	float sum = 0;
	for (Point p : _points) {
		sum += p.getEyeDistance();
	}

	_currentAvgEyeDistance = sum / _points.size();

	_currentDistanceToFace = CALIBRATION_DISTANCE_A4_MM
			* (_distanceAtCalibrationPoint / _currentAvgEyeDistance);

	_currentDistanceToFace = Util.MM_TO_CM(_currentDistanceToFace);

	MeasurementStepMessage message = new MeasurementStepMessage();
	message.setConfidence(currentFace.confidence());
	message.setCurrentAvgEyeDistance(_currentAvgEyeDistance);
	message.setDistToFace(_currentDistanceToFace);
	message.setEyesDistance(currentFace.eyesDistance());
	message.setMeasurementsLeft(_calibrationsLeft);
	message.setProcessTimeForLastFrame(_processTimeForLastFrame);

	MessageHUB.get().sendMessage(MessageHUB.MEASUREMENT_STEP, message);
}
 
Example #6
Source File: CropImage.java    From reader with MIT License 5 votes vote down vote up
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() == 1) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
                Toast t = Toast.makeText(CropImage.this,
                        R.string.multiface_crop_help,
                        Toast.LENGTH_SHORT);
                t.show();
            }
        }
    });
}
 
Example #7
Source File: CropImageActivity.java    From Android-RTEditor with Apache License 2.0 5 votes vote down vote up
public void run() {

            mImageMatrix = mImageView.getImageMatrix();
            Bitmap faceBitmap = prepareBitmap();

            mScale = 1.0F / mScale;
            if (faceBitmap != null && mDoFaceDetection) {
                FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                        faceBitmap.getHeight(), mFaces.length);
                mNumFaces = detector.findFaces(faceBitmap, mFaces);
            }

            if (faceBitmap != null && faceBitmap != mBitmap) {
                faceBitmap.recycle();
            }

            mHandler.post(new Runnable() {
                public void run() {
                    mWaitingToPick = mNumFaces > 1;
                    if (mNumFaces > 0) {
                        for (int i = 0; i < mNumFaces; i++) {
                            handleFace(mFaces[i]);
                        }
                    } else {
                        makeDefault();
                    }
                    mImageView.invalidate();
                    if (mImageView.mHighlightViews.size() == 1) {
                        mCrop = mImageView.mHighlightViews.get(0);
                        mCrop.setFocus(true);
                    }
                }
            });
        }
 
Example #8
Source File: CropImage.java    From droidddle with Apache License 2.0 5 votes vote down vote up
public void run() {
    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(), faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() == 1) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }

            if (mNumFaces > 1) {
                Toast t = Toast.makeText(CropImage.this, R.string.multiface_crop_help, Toast.LENGTH_SHORT);
                t.show();
            }
        }
    });
}
 
Example #9
Source File: CropImage.java    From droidddle with Apache License 2.0 5 votes vote down vote up
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();

    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right, faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom, faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop, mAspectX != 0 && mAspectY != 0);

    mImageView.add(hv);
}
 
Example #10
Source File: CropImageActivity.java    From memoir with Apache License 2.0 5 votes vote down vote up
public void run() {

            mImageMatrix = mImageView.getImageMatrix();
            Bitmap faceBitmap = prepareBitmap();

            mScale = 1.0F / mScale;
            if (faceBitmap != null && mDoFaceDetection) {
                FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                        faceBitmap.getHeight(), mFaces.length);
                mNumFaces = detector.findFaces(faceBitmap, mFaces);
            }

            if (faceBitmap != null && faceBitmap != mBitmap) {
                faceBitmap.recycle();
            }

            mHandler.post(new Runnable() {
                public void run() {
                    mWaitingToPick = mNumFaces > 1;
                    if (mNumFaces > 0) {
                        for (int i = 0; i < mNumFaces; i++) {
                            handleFace(mFaces[i]);
                        }
                    } else {
                        makeDefault();
                    }
                    mImageView.invalidate();
                    if (mImageView.mHighlightViews.size() == 1) {
                        mCrop = mImageView.mHighlightViews.get(0);
                        mCrop.setFocus(true);
                    }
                }
            });
        }
 
Example #11
Source File: PreProcessor.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 5 votes vote down vote up
public void setFaces(PreProcessorFactory.PreprocessingMode preprocessingMode) {
    List<Mat> images = getImages();

    PreferencesHelper preferencesHelper = new PreferencesHelper(context);
    if (preferencesHelper.getDetectionMethod()){
        faces = faceDetection.getFaces(images.get(0));
        angle = faceDetection.getAngle();
    } else {
        Mat img = images.get(0);
        FaceDetector faceDetector = new FaceDetector(img.cols(), img.rows(), 1);
        Bitmap bmp = Bitmap.createBitmap(img.cols(), img.rows(), Bitmap.Config.RGB_565);
        Utils.matToBitmap(img, bmp);
        FaceDetector.Face[] facesAndroid = new FaceDetector.Face[1];
        if (faceDetector.findFaces(bmp, facesAndroid) > 0){
            faces = new Rect[facesAndroid.length];
            for (int i=0; i<facesAndroid.length; i++){
                PointF pointF = new PointF();
                facesAndroid[i].getMidPoint(pointF);
                int xWidth = (int) (1.34 * facesAndroid[i].eyesDistance());
                int yWidth = (int) (1.12 * facesAndroid[i].eyesDistance());
                int dist = (int) (2.77 * facesAndroid[i].eyesDistance());
                Rect face = new Rect((int) pointF.x - xWidth, (int) pointF.y - yWidth, dist, dist);
                faces[i] = face;
            }
        }
    }

    if (preprocessingMode == PreProcessorFactory.PreprocessingMode.RECOGNITION && preferencesHelper.getDetectionMethod()){
        // Change the image rotation to the angle where the face was detected
        images.remove(0);
        images.add(faceDetection.getImg());
        setImages(images);
    }
}
 
Example #12
Source File: ImageData.java    From PrivacyStreams with Apache License 2.0 5 votes vote down vote up
List<FaceDetector.Face> getFaces(UQI uqi) {
    int max = 10;
    List<FaceDetector.Face> faces = new ArrayList<>();
    Bitmap bitmap = this.getBitmapRGB565(uqi);
    if (bitmap == null) return faces;
    FaceDetector detector = new FaceDetector(bitmap.getWidth(), bitmap.getHeight(), max);
    FaceDetector.Face[] facesArray = new FaceDetector.Face[max];
    int count = detector.findFaces(bitmap, facesArray);
    for (int i = 0; i < count; i++) {
        FaceDetector.Face face = facesArray[i];
        if (face != null && face.confidence() > 0.3)
            faces.add(face);
    }
    return faces;
}
 
Example #13
Source File: CropImageActivity.java    From memoir with Apache License 2.0 5 votes vote down vote up
@Override
public void run() {

    mImageMatrix = mImageView.getImageMatrix();
    Bitmap faceBitmap = prepareBitmap();

    mScale = 1.0F / mScale;
    if (faceBitmap != null && mDoFaceDetection) {
        FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                faceBitmap.getHeight(), mFaces.length);
        mNumFaces = detector.findFaces(faceBitmap, mFaces);
    }

    if (faceBitmap != null && faceBitmap != mBitmap) {
        faceBitmap.recycle();
    }

    mHandler.post(new Runnable() {
        @Override
        public void run() {
            mWaitingToPick = mNumFaces > 1;
            if (mNumFaces > 0) {
                for (int i = 0; i < mNumFaces; i++) {
                    handleFace(mFaces[i]);
                }
            } else {
                makeDefault();
            }
            mImageView.invalidate();
            if (mImageView.mHighlightViews.size() == 1) {
                mCrop = mImageView.mHighlightViews.get(0);
                mCrop.setFocus(true);
            }
        }
    });
}
 
Example #14
Source File: PUtil.java    From PHONK with GNU General Public License v3.0 5 votes vote down vote up
@PhonkMethod(description = "Detect faces in a bitmap", example = "")
@PhonkMethodParam(params = {"Bitmap", "numFaces"})
public int detectFaces(Bitmap bmp, int num_faces) {
    FaceDetector face_detector = new FaceDetector(bmp.getWidth(), bmp.getHeight(), num_faces);
    FaceDetector.Face[] faces = new FaceDetector.Face[num_faces];
    int face_count = face_detector.findFaces(bmp, faces);

    return face_count;
}
 
Example #15
Source File: FaceUtils.java    From RairDemo with Apache License 2.0 4 votes vote down vote up
public static Bitmap DetectionBitmap(Bitmap b) {
    Bitmap bitmap = null;
    // 检测前必须转化为RGB_565格式。文末有详述连接
    bitmap = b.copy(Bitmap.Config.RGB_565, true);
    b.recycle();
    // 设置你想检测的数量,数值越大错误率越高,所以需要置信度来判断,但有时候置信度也会出问题
    int MAX_FACES = 30;// I found it can detect number of face at least 27,
    FaceDetector faceDet = new FaceDetector(bitmap.getWidth(), bitmap.getHeight(), MAX_FACES);
    // 将人脸数据存储到faceArray中
    FaceDetector.Face[] faceArray = new FaceDetector.Face[MAX_FACES];
    // 返回找到图片中人脸的数量,同时把返回的脸部位置信息放到faceArray中,过程耗时,图片越大耗时越久
    int findFaceNum = faceDet.findFaces(bitmap, faceArray);
    Log.e("tag", "找到脸部数量:" + findFaceNum);
    // 获取传回的第一张脸信息
    FaceDetector.Face face1 = faceArray[0];
    // 获取双眼的中心点,用一个PointF来接收其x、y坐标
    PointF point = new PointF();
    face1.getMidPoint(point);
    // 获取该部位为人脸的可信度,0~1
    float confidence = face1.confidence();
    // 获取双眼的间距
    float eyesDistance = face1.eyesDistance();
    // 传入X则获取到x方向上的角度,传入Y则获取到y方向上的角度,传入Z则获取到z方向上的角度
    float angle = face1.pose(FaceDetector.Face.EULER_X);
    Paint p = new Paint();
    p.setAntiAlias(true);
    p.setStrokeWidth(1);
    p.setStyle(Paint.Style.STROKE);
    p.setColor(Color.GREEN);
    Canvas canvas = new Canvas(bitmap);
    PointF pf = new PointF();
    RectF r = new RectF();
    for (FaceDetector.Face face : faceArray) {
        if (face == null) {
            continue;
        }
        face.getMidPoint(pf);
        Log.e("tag", "FaceSDK : DetectionBitmap + " + face.confidence());
        // 这里的框,参数分别是:左上角的X,Y 右下角的X,Y
        // 也就是左上角(r.left,r.top),右下角( r.right,r.bottom)。
        // 该宽度是两眼珠黑色外边距
        float eyesDistance1 = face.eyesDistance();
        r.left = pf.x - eyesDistance1 / 1.2f;
        r.right = pf.x + eyesDistance1 / 1.2f;
        r.top = pf.y - eyesDistance1;
        r.bottom = pf.y + eyesDistance1 * 1.5f;
        Log.d(TAG, r.toString());
        // 画框
        canvas.drawRect(r, p);
    }
    return bitmap;
}
 
Example #16
Source File: FaceDetectionThread.java    From Android-Screen-to-Face-Distance-Measurement with MIT License 4 votes vote down vote up
/**
 * bla bla bla
 */
@Override
public void run() {

	long t = System.currentTimeMillis();

	YuvImage yuvimage = new YuvImage(_data, ImageFormat.NV21,
			_previewSize.width, _previewSize.height, null);

	ByteArrayOutputStream baos = new ByteArrayOutputStream();

	if (!yuvimage.compressToJpeg(new Rect(0, 0, _previewSize.width,
			_previewSize.height), 100, baos)) {

		Log.e("Camera", "compressToJpeg failed");

	}

	Log.i("Timing", "Compression finished: "
			+ (System.currentTimeMillis() - t));
	t = System.currentTimeMillis();

	BitmapFactory.Options bfo = new BitmapFactory.Options();
	bfo.inPreferredConfig = Bitmap.Config.RGB_565;

	_currentFrame = BitmapFactory.decodeStream(new ByteArrayInputStream(
			baos.toByteArray()), null, bfo);

	Log.i("Timing", "Decode Finished: " + (System.currentTimeMillis() - t));
	t = System.currentTimeMillis();

	// Rotate the so it siuts our portrait mode
	Matrix matrix = new Matrix();
	matrix.postRotate(90);
	matrix.preScale(-1, 1);
	// We rotate the same Bitmap
	_currentFrame = Bitmap.createBitmap(_currentFrame, 0, 0,
			_previewSize.width, _previewSize.height, matrix, false);

	Log.i("Timing",
			"Rotate, Create finished: " + (System.currentTimeMillis() - t));
	t = System.currentTimeMillis();

	if (_currentFrame == null) {
		Log.e(FACEDETECTIONTHREAD_TAG, "Could not decode Image");
		return;
	}

	FaceDetector d = new FaceDetector(_currentFrame.getWidth(),
			_currentFrame.getHeight(), 1);

	Face[] faces = new Face[1];
	d.findFaces(_currentFrame, faces);

	Log.i("Timing",
			"FaceDetection finished: " + (System.currentTimeMillis() - t));
	t = System.currentTimeMillis();

	_currentFace = faces[0];
	Log.d(FACEDETECTIONTHREAD_TAG, "Found: " + faces[0] + " Faces");
}
 
Example #17
Source File: SystemFaceDetector.java    From ViseFace with Apache License 2.0 4 votes vote down vote up
/**
 * 计算识别框
 */
private void getFaceRect() {
    Rect[] faceRectList = new Rect[mDetectorData.getFacesCount()];
    Rect rect = null;
    int index = 0;
    float distance = 0;
    for (int i = 0; i < mDetectorData.getFacesCount(); i++) {
        faceRectList[i] = new Rect();
        FaceDetector.Face face = mFaces[i];
        if (face != null) {
            float eyeDistance = face.eyesDistance();
            eyeDistance = eyeDistance * mZoomRatio;
            if (eyeDistance > distance) {
                distance = eyeDistance;
                rect = faceRectList[i];
                index = i;
            }
            PointF midEyesPoint = new PointF();
            face.getMidPoint(midEyesPoint);
            midEyesPoint.x = midEyesPoint.x * mZoomRatio;
            midEyesPoint.y = midEyesPoint.y * mZoomRatio;
            ViseLog.i("eyeDistance:" + eyeDistance + ",midEyesPoint.x:" + midEyesPoint.x
                    + ",midEyesPoint.y:" + midEyesPoint.y);
            faceRectList[i].set((int) (midEyesPoint.x - eyeDistance),
                    (int) (midEyesPoint.y - eyeDistance),
                    (int) (midEyesPoint.x + eyeDistance),
                    (int) (midEyesPoint.y + eyeDistance));
            ViseLog.i("FaceRectList[" + i + "]:" + faceRectList[i]);
        }
    }
    int width = (int) (mPreviewHeight * mZoomRatio / 5);
    if (rect != null && mCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) {
        int left = rect.left;
        rect.left = width - rect.right;
        rect.right = width - left;
        faceRectList[index].left = rect.left;
        faceRectList[index].right = rect.right;
    }
    mDetectorData.setLightIntensity(FaceUtil.getYUVLight(mDetectorData.getFaceData(), rect, width));
    mDetectorData.setFaceRectList(faceRectList);
    if (mCameraWidth > 0) {
        mDetectorData.setDistance(distance * 2.5f / mCameraWidth);
    }
}
 
Example #18
Source File: CropImage.java    From WifiChat with GNU General Public License v2.0 4 votes vote down vote up
public void run() {
	mImageMatrix = mImageView.getImageMatrix();
	Bitmap faceBitmap = prepareBitmap();

	mScale = 1.0F / mScale;
	if (faceBitmap != null) {
		FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
				faceBitmap.getHeight(), mFaces.length);
		mNumFaces = detector.findFaces(faceBitmap, mFaces);
	}

	if (faceBitmap != null && faceBitmap != mBitmap) {
		faceBitmap.recycle();
	}

	mHandler.post(new Runnable() {
		public void run() {
			mWaitingToPick = mNumFaces > 1;
			// if (mNumFaces > 0) {
			// // for (int i = 0; i < mNumFaces; i++) {
			// for (int i = 0; i < 1; i++) {
			// handleFace(mFaces[i]);
			// }
			// } else {
			makeDefault();
			// }
			mImageView.invalidate();
			if (mImageView.mHighlightViews.size() > 0) {
				mCrop = mImageView.mHighlightViews.get(0);
				mCrop.setFocus(true);
			}

			if (mNumFaces > 1) {
				// CR: no need for the variable t. just do
				// Toast.makeText(...).show().
				// Toast t = Toast.makeText(mContext,
				// R.string.multiface_crop_help, Toast.LENGTH_SHORT);
				// t.show();
			}
		}
	});
}
 
Example #19
Source File: CropImage.java    From WifiChat with GNU General Public License v2.0 4 votes vote down vote up
@SuppressWarnings("unused")
private void handleFace(FaceDetector.Face f) {
	PointF midPoint = new PointF();

	int r = ((int) (f.eyesDistance() * mScale)) * 2;
	f.getMidPoint(midPoint);
	midPoint.x *= mScale;
	midPoint.y *= mScale;

	int midX = (int) midPoint.x;
	int midY = (int) midPoint.y;

	HighlightView hv = new HighlightView(mImageView);

	int width = mBitmap.getWidth();
	int height = mBitmap.getHeight();

	Rect imageRect = new Rect(0, 0, width, height);

	RectF faceRect = new RectF(midX, midY, midX, midY);
	faceRect.inset(-r, -r);
	if (faceRect.left < 0) {
		faceRect.inset(-faceRect.left, -faceRect.left);
	}

	if (faceRect.top < 0) {
		faceRect.inset(-faceRect.top, -faceRect.top);
	}

	if (faceRect.right > imageRect.right) {
		faceRect.inset(faceRect.right - imageRect.right, faceRect.right
				- imageRect.right);
	}

	if (faceRect.bottom > imageRect.bottom) {
		faceRect.inset(faceRect.bottom - imageRect.bottom,
				faceRect.bottom - imageRect.bottom);
	}

	hv.setup(mImageMatrix, imageRect, faceRect, false, true);

	mImageView.add(hv);
}
 
Example #20
Source File: FaceCropper.java    From UltimateAndroid with Apache License 2.0 4 votes vote down vote up
public Bitmap cropFace(Bitmap original) {
    Bitmap fixedBitmap = BitmapUtils.forceEvenBitmapSize(original);
    fixedBitmap = BitmapUtils.forceConfig565(fixedBitmap);

    FaceDetector faceDetector = new FaceDetector(
            fixedBitmap.getWidth(), fixedBitmap.getHeight(),
            mMaxFaces);

    FaceDetector.Face[] faces = new FaceDetector.Face[mMaxFaces];

    // The bitmap must be in 565 format (for now).
    int faceCount = faceDetector.findFaces(fixedBitmap, faces);

    Logs.d("face crop");

    if (faceCount == 0) {
        return fixedBitmap;
    }

    int initX = fixedBitmap.getWidth();
    int initY = fixedBitmap.getHeight();
    int endX = 0;
    int endY = 0;

    PointF centerFace = new PointF();

    // Calculates minimum box to fit all detected faces
    for (int i = 0; i < faceCount; i++) {
        FaceDetector.Face face = faces[i];

        // Eyes distance * 3 usually fits an entire face
        int faceSize = (int) (face.eyesDistance() * 3);
        if (SizeMode.FaceMarginPx.equals(mSizeMode)) {
            faceSize += mFaceMarginPx * 2; // *2 for top and down/right and left effect
        } else if (SizeMode.EyeDistanceFactorMargin.equals(mSizeMode)) {
            faceSize += face.eyesDistance() * mEyeDistanceFactorMargin;
        }

        faceSize = Math.max(faceSize, mFaceMinSize);

        face.getMidPoint(centerFace);

        int tInitX = (int) (centerFace.x - faceSize / 2);
        int tInitY = (int) (centerFace.y - faceSize / 2);
        tInitX = Math.max(0, tInitX);
        tInitY = Math.max(0, tInitY);

        int tEndX = tInitX + faceSize;
        int tEndY = tInitY + faceSize;
        tEndX = Math.min(tEndX, fixedBitmap.getWidth());
        tEndY = Math.min(tEndY, fixedBitmap.getHeight());

        initX = Math.min(initX, tInitX);
        initY = Math.min(initY, tInitY);
        endX = Math.max(endX, tEndX);
        endY = Math.max(endY, tEndY);
    }

    int sizeX = endX - initX;
    int sizeY = endY - initY;

    if (sizeX + initX > fixedBitmap.getWidth()) {
        sizeX = fixedBitmap.getWidth() - initX;
    }
    if (sizeY + initY > fixedBitmap.getHeight()) {
        sizeY = fixedBitmap.getHeight() - initY;
    }

    Bitmap croppedBitmap = Bitmap.createBitmap(fixedBitmap, initX, initY, sizeX, sizeY);
    if (fixedBitmap != croppedBitmap) {
        fixedBitmap.recycle();
    }

    return croppedBitmap;
}
 
Example #21
Source File: FaceCropper.java    From UltimateAndroid with Apache License 2.0 4 votes vote down vote up
public Bitmap cropFace(Bitmap original) {
    Bitmap fixedBitmap = BitmapUtils.forceEvenBitmapSize(original);
    fixedBitmap = BitmapUtils.forceConfig565(fixedBitmap);

    FaceDetector faceDetector = new FaceDetector(
            fixedBitmap.getWidth(), fixedBitmap.getHeight(),
            mMaxFaces);

    FaceDetector.Face[] faces = new FaceDetector.Face[mMaxFaces];

    // The bitmap must be in 565 format (for now).
    int faceCount = faceDetector.findFaces(fixedBitmap, faces);

    Logs.d("face crop");

    if (faceCount == 0) {
        return fixedBitmap;
    }

    int initX = fixedBitmap.getWidth();
    int initY = fixedBitmap.getHeight();
    int endX = 0;
    int endY = 0;

    PointF centerFace = new PointF();

    // Calculates minimum box to fit all detected faces
    for (int i = 0; i < faceCount; i++) {
        FaceDetector.Face face = faces[i];

        // Eyes distance * 3 usually fits an entire face
        int faceSize = (int) (face.eyesDistance() * 3);
        if (SizeMode.FaceMarginPx.equals(mSizeMode)) {
            faceSize += mFaceMarginPx * 2; // *2 for top and down/right and left effect
        } else if (SizeMode.EyeDistanceFactorMargin.equals(mSizeMode)) {
            faceSize += face.eyesDistance() * mEyeDistanceFactorMargin;
        }

        faceSize = Math.max(faceSize, mFaceMinSize);

        face.getMidPoint(centerFace);

        int tInitX = (int) (centerFace.x - faceSize / 2);
        int tInitY = (int) (centerFace.y - faceSize / 2);
        tInitX = Math.max(0, tInitX);
        tInitY = Math.max(0, tInitY);

        int tEndX = tInitX + faceSize;
        int tEndY = tInitY + faceSize;
        tEndX = Math.min(tEndX, fixedBitmap.getWidth());
        tEndY = Math.min(tEndY, fixedBitmap.getHeight());

        initX = Math.min(initX, tInitX);
        initY = Math.min(initY, tInitY);
        endX = Math.max(endX, tEndX);
        endY = Math.max(endY, tEndY);
    }

    int sizeX = endX - initX;
    int sizeY = endY - initY;

    if (sizeX + initX > fixedBitmap.getWidth()) {
        sizeX = fixedBitmap.getWidth() - initX;
    }
    if (sizeY + initY > fixedBitmap.getHeight()) {
        sizeY = fixedBitmap.getHeight() - initY;
    }

    Bitmap croppedBitmap = Bitmap.createBitmap(fixedBitmap, initX, initY, sizeX, sizeY);
    if (fixedBitmap != croppedBitmap) {
        fixedBitmap.recycle();
    }

    return croppedBitmap;
}
 
Example #22
Source File: CropImage.java    From Pi-Locker with GNU General Public License v2.0 4 votes vote down vote up
public void run() {

            mImageMatrix = mImageView.getImageMatrix();
            Bitmap faceBitmap = prepareBitmap();

            mScale = 1.0F / mScale;
            if (faceBitmap != null && mDoFaceDetection) {
                FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                        faceBitmap.getHeight(), mFaces.length);
                mNumFaces = detector.findFaces(faceBitmap, mFaces);
            }

            if (faceBitmap != null && faceBitmap != mBitmap) {
                faceBitmap.recycle();
            }

            mHandler.post(new Runnable() {
                public void run() {

                    mWaitingToPick = mNumFaces > 1;
                    if (mNumFaces > 0) {
                        for (int i = 0; i < mNumFaces; i++) {
                            handleFace(mFaces[i]);
                        }
                    } else {
                        makeDefault();
                    }
                    mImageView.invalidate();
                    if (mImageView.mHighlightViews.size() == 1) {
                        mCrop = mImageView.mHighlightViews.get(0);
                        mCrop.setFocus(true);
                    }

                    if (mNumFaces > 1) {
                        Toast.makeText(CropImage.this,
                                "Multi face crop help",
                                Toast.LENGTH_SHORT).show();
                    }
                }
            });
        }
 
Example #23
Source File: CropImage.java    From Pi-Locker with GNU General Public License v2.0 4 votes vote down vote up
private void handleFace(FaceDetector.Face f) {

            PointF midPoint = new PointF();

            int r = ((int) (f.eyesDistance() * mScale)) * 2;
            f.getMidPoint(midPoint);
            midPoint.x *= mScale;
            midPoint.y *= mScale;

            int midX = (int) midPoint.x;
            int midY = (int) midPoint.y;

            HighlightView hv = new HighlightView(mImageView);

            int width = mBitmap.getWidth();
            int height = mBitmap.getHeight();

            Rect imageRect = new Rect(0, 0, width, height);

            RectF faceRect = new RectF(midX, midY, midX, midY);
            faceRect.inset(-r, -r);
            if (faceRect.left < 0) {
                faceRect.inset(-faceRect.left, -faceRect.left);
            }

            if (faceRect.top < 0) {
                faceRect.inset(-faceRect.top, -faceRect.top);
            }

            if (faceRect.right > imageRect.right) {
                faceRect.inset(faceRect.right - imageRect.right,
                        faceRect.right - imageRect.right);
            }

            if (faceRect.bottom > imageRect.bottom) {
                faceRect.inset(faceRect.bottom - imageRect.bottom,
                        faceRect.bottom - imageRect.bottom);
            }

            hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop,
                    mAspectX != 0 && mAspectY != 0);

            mImageView.add(hv);
        }
 
Example #24
Source File: CropImageActivity.java    From memoir with Apache License 2.0 4 votes vote down vote up
private void handleFace(FaceDetector.Face f) {

            PointF midPoint = new PointF();

            int r = ((int) (f.eyesDistance() * mScale)) * 2;
            f.getMidPoint(midPoint);
            midPoint.x *= mScale;
            midPoint.y *= mScale;

            int midX = (int) midPoint.x;
            int midY = (int) midPoint.y;

            HighlightView hv = new HighlightView(mImageView);

            int width = mBitmap.getWidth();
            int height = mBitmap.getHeight();

            Rect imageRect = new Rect(0, 0, width, height);

            RectF faceRect = new RectF(midX, midY, midX, midY);
            faceRect.inset(-r, -r);
            if (faceRect.left < 0) {
                faceRect.inset(-faceRect.left, -faceRect.left);
            }

            if (faceRect.top < 0) {
                faceRect.inset(-faceRect.top, -faceRect.top);
            }

            if (faceRect.right > imageRect.right) {
                faceRect.inset(faceRect.right - imageRect.right, faceRect.right
                        - imageRect.right);
            }

            if (faceRect.bottom > imageRect.bottom) {
                faceRect.inset(faceRect.bottom - imageRect.bottom,
                        faceRect.bottom - imageRect.bottom);
            }

            hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop,
                    mAspectX != 0 && mAspectY != 0);

            mImageView.add(hv);
        }
 
Example #25
Source File: CropImage.java    From reader with MIT License 4 votes vote down vote up
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();

    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right,
                       faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom,
                       faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop,false
             /*mAspectX != 0 && mAspectY != 0*/);

    mImageView.add(hv);
}
 
Example #26
Source File: CropImage.java    From reader with MIT License 4 votes vote down vote up
public void run() {
            mImageMatrix = mImageView.getImageMatrix();
            Bitmap faceBitmap = prepareBitmap();

            mScale = 1.0F / mScale;
            if (faceBitmap != null && mDoFaceDetection) {
                FaceDetector detector = new FaceDetector(faceBitmap.getWidth(),
                        faceBitmap.getHeight(), mFaces.length);
                mNumFaces = detector.findFaces(faceBitmap, mFaces);
            }

            if (faceBitmap != null && faceBitmap != mBitmap) {
                faceBitmap.recycle();
            }

            mHandler.post(new Runnable() {
                public void run() {
                    mWaitingToPick = mNumFaces > 1;
//                    if (mNumFaces > 0) {
//                        for (int i = 0; i < mNumFaces; i++) {
//                            handleFace(mFaces[i]);
//                        }
//                    } else {
//                        makeDefault();
//                    }
                    makeDefault();
                    mImageView.invalidate();
                    if (mImageView.mHighlightViews.size() == 1) {
                        mCrop = mImageView.mHighlightViews.get(0);
                        mCrop.setFocus(true);
                    }

                    if (mNumFaces > 1) {
                        Toast t = Toast.makeText(CropImage.this,
                                R.string.multiface_crop_help,
                                Toast.LENGTH_SHORT);
                        t.show();
                    }
                }
            });
        }
 
Example #27
Source File: CropImage.java    From reader with MIT License 4 votes vote down vote up
private void handleFace(FaceDetector.Face f) {
    PointF midPoint = new PointF();

    int r = ((int) (f.eyesDistance() * mScale)) * 2;
    f.getMidPoint(midPoint);
    midPoint.x *= mScale;
    midPoint.y *= mScale;

    int midX = (int) midPoint.x;
    int midY = (int) midPoint.y;

    HighlightView hv = new HighlightView(mImageView);

    int width = mBitmap.getWidth();
    int height = mBitmap.getHeight();

    Rect imageRect = new Rect(0, 0, width, height);

    RectF faceRect = new RectF(midX, midY, midX, midY);
    faceRect.inset(-r, -r);
    if (faceRect.left < 0) {
        faceRect.inset(-faceRect.left, -faceRect.left);
    }

    if (faceRect.top < 0) {
        faceRect.inset(-faceRect.top, -faceRect.top);
    }

    if (faceRect.right > imageRect.right) {
        faceRect.inset(faceRect.right - imageRect.right,
                       faceRect.right - imageRect.right);
    }

    if (faceRect.bottom > imageRect.bottom) {
        faceRect.inset(faceRect.bottom - imageRect.bottom,
                       faceRect.bottom - imageRect.bottom);
    }

    hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop,false
             /*mAspectX != 0 && mAspectY != 0*/);

    mImageView.add(hv);
}
 
Example #28
Source File: FaceCropper.java    From FaceRecognition with Apache License 2.0 4 votes vote down vote up
/**
 * 剪裁脸部的核心方法
 * 
 * @param bitmap
 *            待识别的图片
 * @param debug
 *            是否开启调试模式
 * @return
 */
private FaceResult cropFace(Bitmap bitmap, boolean debug) {
    Bitmap formatBitmap = BitmapOperate.formatBitmap(bitmap);
    formatBitmap = BitmapOperate.formatBitmapTo565(formatBitmap);
    Bitmap aimBitmap = formatBitmap.copy(Bitmap.Config.RGB_565, true);
    if (formatBitmap != aimBitmap) {
        formatBitmap.recycle();
    }
    // 创建一个人脸识别器
    FaceDetector faceDetector = new FaceDetector(aimBitmap.getWidth(),
            aimBitmap.getHeight(), mMaxFaces);
    // 人脸数组
    FaceDetector.Face[] faces = new FaceDetector.Face[mMaxFaces];
    // Bitmap必须是565格式
    int faceCount = faceDetector.findFaces(aimBitmap, faces);

    if (debug) {
        Log.d("kymjs", faceCount + "张脸被找到");
    }

    if (faceCount == 0) {
        return new FaceResult(aimBitmap);
    }

    int initX = aimBitmap.getWidth();
    int initY = aimBitmap.getHeight();
    int endX = 0;
    int endY = 0;

    PointF centerFace = new PointF();

    Canvas canvas = new Canvas(aimBitmap);
    canvas.drawBitmap(aimBitmap, new Matrix(), null);

    // 计算每张脸的最小外接圆
    for (int i = 0; i < faceCount; i++) {
        FaceDetector.Face face = faces[i];
        // 通常采用眼睛间距乘以三作为脸的外接圆直径
        int faceSize = (int) (face.eyesDistance() * 3);
        if (SizeMode.FACE_MARGIN.equals(mSizeMode)) {
            faceSize += mFaceMarginPx * 2;
        } else if (SizeMode.EYE_MARGIN.equals(mSizeMode)) {
            faceSize += face.eyesDistance() * mEyeMarginPx;
        }
        faceSize = Math.max(faceSize, mFaceMinSize);
        face.getMidPoint(centerFace);

        if (debug) { // 显示调试人脸识别圈
            canvas.drawPoint(centerFace.x, centerFace.y, mDebugPainter);
            canvas.drawCircle(centerFace.x, centerFace.y,
                    face.eyesDistance() * 1.5f, mDebugPainter);
        }

        int tInitX = (int) (centerFace.x - faceSize / 2);
        int tInitY = (int) (centerFace.y - faceSize / 2);
        tInitX = Math.max(0, tInitX);
        tInitY = Math.max(0, tInitY);

        int tEndX = tInitX + faceSize;
        int tEndY = tInitY + faceSize;
        tEndX = Math.min(tEndX, aimBitmap.getWidth());
        tEndY = Math.min(tEndY, aimBitmap.getHeight());

        initX = Math.min(initX, tInitX);
        initY = Math.min(initY, tInitY);
        endX = Math.max(endX, tEndX);
        endY = Math.max(endY, tEndY);
    }

    int sizeX = endX - initX;
    int sizeY = endY - initY;

    if (sizeX + initX > aimBitmap.getWidth()) {
        sizeX = aimBitmap.getWidth() - initX;
    }
    if (sizeY + initY > aimBitmap.getHeight()) {
        sizeY = aimBitmap.getHeight() - initY;
    }
    Point init = new Point(initX, initY);
    Point end = new Point(initX + sizeX, initY + sizeY);
    return new FaceResult(aimBitmap, init, end);
}
 
Example #29
Source File: FaceCropper.java    From AndroidFaceCropper with Apache License 2.0 4 votes vote down vote up
protected CropResult cropFace(Bitmap original, boolean debug) {
    Bitmap fixedBitmap = BitmapUtils.forceEvenBitmapSize(original);
    fixedBitmap = BitmapUtils.forceConfig565(fixedBitmap);
    Bitmap mutableBitmap = fixedBitmap.copy(Bitmap.Config.RGB_565, true);

    if (fixedBitmap != mutableBitmap) {
        fixedBitmap.recycle();
    }

    FaceDetector faceDetector = new FaceDetector(
            mutableBitmap.getWidth(), mutableBitmap.getHeight(),
            mMaxFaces);

    FaceDetector.Face[] faces = new FaceDetector.Face[mMaxFaces];

    // The bitmap must be in 565 format (for now).
    int faceCount = faceDetector.findFaces(mutableBitmap, faces);

    if (BuildConfig.DEBUG) {
        Log.d(LOG_TAG, faceCount + " faces found");
    }

    if (faceCount == 0) {
        return new CropResult(mutableBitmap);
    }

    int initX = mutableBitmap.getWidth();
    int initY = mutableBitmap.getHeight();
    int endX = 0;
    int endY = 0;

    PointF centerFace = new PointF();

    Canvas canvas = new Canvas(mutableBitmap);
    canvas.drawBitmap(mutableBitmap, new Matrix(), null);

    // Calculates minimum box to fit all detected faces
    for (int i = 0; i < faceCount; i++) {
        FaceDetector.Face face = faces[i];

        // Eyes distance * 3 usually fits an entire face
        int faceSize = (int) (face.eyesDistance() * 3);

        if (SizeMode.FaceMarginPx.equals(mSizeMode)) {
            faceSize += mFaceMarginPx * 2; // *2 for top and down/right and left effect
        }
        else if (SizeMode.EyeDistanceFactorMargin.equals(mSizeMode)) {
            faceSize += face.eyesDistance() * mEyeDistanceFactorMargin;
        }

        faceSize = Math.max(faceSize, mFaceMinSize);

        face.getMidPoint(centerFace);

        if (debug) {
            canvas.drawPoint(centerFace.x, centerFace.y, mDebugPainter);
            canvas.drawCircle(centerFace.x, centerFace.y, face.eyesDistance() * 1.5f, mDebugPainter);
        }

        int tInitX = (int) (centerFace.x - faceSize / 2);
        int tInitY = (int) (centerFace.y - faceSize / 2);
        tInitX = Math.max(0, tInitX);
        tInitY = Math.max(0, tInitY);

        int tEndX = tInitX + faceSize;
        int tEndY = tInitY + faceSize;
        tEndX = Math.min(tEndX, mutableBitmap.getWidth());
        tEndY = Math.min(tEndY, mutableBitmap.getHeight());

        initX = Math.min(initX, tInitX);
        initY = Math.min(initY, tInitY);
        endX = Math.max(endX, tEndX);
        endY = Math.max(endY, tEndY);
    }

    int sizeX = endX - initX;
    int sizeY = endY - initY;

    if (sizeX + initX > mutableBitmap.getWidth()) {
        sizeX = mutableBitmap.getWidth() - initX;
    }
    if (sizeY + initY > mutableBitmap.getHeight()) {
        sizeY = mutableBitmap.getHeight() - initY;
    }

    Point init = new Point(initX, initY);
    Point end = new Point(initX + sizeX, initY + sizeY);

    return new CropResult(mutableBitmap, init, end);
}
 
Example #30
Source File: CropImageActivity.java    From Android-RTEditor with Apache License 2.0 4 votes vote down vote up
private void handleFace(FaceDetector.Face f) {

            PointF midPoint = new PointF();

            int r = ((int) (f.eyesDistance() * mScale)) * 2;
            f.getMidPoint(midPoint);
            midPoint.x *= mScale;
            midPoint.y *= mScale;

            int midX = (int) midPoint.x;
            int midY = (int) midPoint.y;

            HighlightView hv = new HighlightView(mImageView);

            int width = mBitmap.getWidth();
            int height = mBitmap.getHeight();

            Rect imageRect = new Rect(0, 0, width, height);

            RectF faceRect = new RectF(midX, midY, midX, midY);
            faceRect.inset(-r, -r);
            if (faceRect.left < 0) {
                faceRect.inset(-faceRect.left, -faceRect.left);
            }

            if (faceRect.top < 0) {
                faceRect.inset(-faceRect.top, -faceRect.top);
            }

            if (faceRect.right > imageRect.right) {
                faceRect.inset(faceRect.right - imageRect.right, faceRect.right
                        - imageRect.right);
            }

            if (faceRect.bottom > imageRect.bottom) {
                faceRect.inset(faceRect.bottom - imageRect.bottom,
                        faceRect.bottom - imageRect.bottom);
            }

            hv.setup(mImageMatrix, imageRect, faceRect, mCircleCrop,
                    mAspectX != 0 && mAspectY != 0);

            mImageView.add(hv);
        }