org.opencv.android.Utils Java Examples

The following examples show how to use org.opencv.android.Utils. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MainActivity.java    From MOAAP with MIT License 8 votes vote down vote up
void Contours() {
    Mat grayMat = new Mat();
    Mat cannyEdges = new Mat();
    Mat hierarchy = new Mat();

    List<MatOfPoint> contourList = new ArrayList<MatOfPoint>(); //A list to store all the contours

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.Canny(originalMat, cannyEdges, 10, 100);

    //finding contours
    Imgproc.findContours(cannyEdges, contourList, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    //Drawing contours on a new image
    Mat contours = new Mat();
    contours.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC3);
    Random r = new Random();
    for (int i = 0; i < contourList.size(); i++) {
        Imgproc.drawContours(contours, contourList, i, new Scalar(r.nextInt(255), r.nextInt(255), r.nextInt(255)), -1);
    }
    //Converting Mat back to Bitmap
    Utils.matToBitmap(contours, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example #2
Source File: MainActivity.java    From MOAAP with MIT License 7 votes vote down vote up
public void DifferenceOfGaussian() {
    Mat grayMat = new Mat();
    Mat blur1 = new Mat();
    Mat blur2 = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.GaussianBlur(grayMat, blur1, new Size(15, 15), 5);
    Imgproc.GaussianBlur(grayMat, blur2, new Size(21, 21), 5);

    //Subtracting the two blurred images
    Mat DoG = new Mat();
    Core.absdiff(blur1, blur2, DoG);

    //Inverse Binary Thresholding
    Core.multiply(DoG, new Scalar(100), DoG);
    Imgproc.threshold(DoG, DoG, 50, 255, Imgproc.THRESH_BINARY_INV);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(DoG, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example #3
Source File: MainActivity.java    From OCR-Test with Apache License 2.0 6 votes vote down vote up
public Bitmap convertToBlackWhite(Bitmap compressImage)
{
    Log.d("CV", "Before converting to black");
    Mat imageMat = new Mat();
    Utils.bitmapToMat(compressImage, imageMat);
    Imgproc.cvtColor(imageMat, imageMat, Imgproc.COLOR_BGR2GRAY);
    Imgproc.GaussianBlur(imageMat, imageMat, new Size(3, 3), 0);
    //Imgproc.adaptiveThreshold(imageMat, imageMat, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY_INV, 5, 4);
    //Imgproc.medianBlur(imageMat, imageMat, 3);
    Imgproc.threshold(imageMat, imageMat, 0, 255, Imgproc.THRESH_OTSU);

    Bitmap newBitmap = compressImage;
    Utils.matToBitmap(imageMat, newBitmap);
    imageView.setImageBitmap(newBitmap);
    Log.d("CV", "After converting to black");


    return newBitmap;

}
 
Example #4
Source File: PyramidActivity.java    From MOAAP with MIT License 6 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
    super.onActivityResult(requestCode, resultCode, imageReturnedIntent);

    switch(requestCode) {
        case SELECT_PHOTO:
            if(resultCode == RESULT_OK){
                try {
                    final Uri imageUri = imageReturnedIntent.getData();
                    final InputStream imageStream = getContentResolver().openInputStream(imageUri);
                    final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
                    src = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC4);
                    Utils.bitmapToMat(selectedImage, src);
                    srcSelected = true;
                    bGaussianPyrUp.setEnabled(true);
                    bGaussianPyrDown.setEnabled(true);
                    bLaplacianPyr.setEnabled(true);
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                }
            }
            break;
    }
}
 
Example #5
Source File: MainActivity.java    From MOAAP with MIT License 6 votes vote down vote up
void HOGDescriptor() {
    Mat grayMat = new Mat();
    Mat people = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    HOGDescriptor hog = new HOGDescriptor();
    hog.setSVMDetector(HOGDescriptor.getDefaultPeopleDetector());

    MatOfRect faces = new MatOfRect();
    MatOfDouble weights = new MatOfDouble();

    hog.detectMultiScale(grayMat, faces, weights);
    originalMat.copyTo(people);
    //Draw faces on the image
    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
        Imgproc.rectangle(people, facesArray[i].tl(), facesArray[i].br(), new Scalar(100), 3);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(people, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example #6
Source File: MainActivity.java    From MOAAP with MIT License 6 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
    //Put it there, just in case:)
    super.onActivityResult(requestCode, resultCode, imageReturnedIntent);

    switch(requestCode) {
        case SELECT_PHOTO:
            if(resultCode == RESULT_OK && read_external_storage_granted){
                try {
                    final Uri imageUri = imageReturnedIntent.getData();
                    final InputStream imageStream = getContentResolver().openInputStream(imageUri);
                    final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
                    src = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC4);
                    Utils.bitmapToMat(selectedImage, src);
                    src_gray = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC1);
                    switch (ACTION_MODE) {
                        case HomeActivity.GAUSSIAN_BLUR:
                            Imgproc.GaussianBlur(src, src, new Size(9, 9), 0);
                            break;
                        case HomeActivity.MEAN_BLUR:
                            Imgproc.blur(src, src, new Size(9, 9));
                            break;
                        case HomeActivity.MEDIAN_BLUR:
                            Imgproc.medianBlur(src, src, 9);
                            break;
                        case HomeActivity.SHARPEN:
                            Mat kernel = new Mat(3, 3, CvType.CV_16SC1);
                            //int[] values = {0, -1, 0, -1, 5, -1, 0, -1, 0};
                            Log.d("imageType", CvType.typeToString(src.type()) + "");
                            kernel.put(0, 0, 0, -1, 0, -1, 5, -1, 0, -1, 0);
                            Imgproc.filter2D(src, src, src_gray.depth(), kernel);
                            break;
                        case HomeActivity.DILATE:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Mat kernelDilate = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
                            Imgproc.dilate(src_gray, src_gray, kernelDilate);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.ERODE:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Mat kernelErode = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
                            Imgproc.erode(src_gray, src_gray, kernelErode);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.THRESHOLD:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.ADAPTIVE_THRESHOLD:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.adaptiveThreshold(src_gray, src_gray, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 3, 0);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                    }
                    Bitmap processedImage = Bitmap.createBitmap(src.cols(), src.rows(), Bitmap.Config.ARGB_8888);
                    Log.i("imageType", CvType.typeToString(src.type()) + "");
                    Utils.matToBitmap(src, processedImage);
                    ivImage.setImageBitmap(selectedImage);
                    ivImageProcessed.setImageBitmap(processedImage);
                    Log.i("process", "process done");
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                }
            }
            break;
    }
}
 
Example #7
Source File: ProcessHelper.java    From OpenCV-android with Apache License 2.0 6 votes vote down vote up
/**
 * 角点检测
 *
 * @param origin   原始bitmap
 * @param callback 回调
 */
public void harris(Bitmap origin, ProcessCallback callback) {
    if (origin == null) {
        return;
    }
    try {
        Bitmap bitmap = Bitmap.createBitmap(origin.getWidth(), origin.getHeight(), Bitmap.Config.RGB_565);
        Utils.bitmapToMat(origin, rgbMat);
        Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
        Mat corners = new Mat();
        Mat tempDst = new Mat();
        Mat tempDstNorm = new Mat();
        // 找出角点
        Imgproc.cornerHarris(grayMat, tempDst, 2, 3, 0.04);
        // 归一化Harris角点的输出
        Core.normalize(tempDst, tempDstNorm, 0, 255, Core.NORM_MINMAX);
        Core.convertScaleAbs(tempDstNorm, corners);
        // 绘制角点
        Random random = new Random();
        for (int i = 0; i < tempDstNorm.cols(); i++) {
            for (int j = 0; j < tempDstNorm.rows(); j++) {
                double[] value = tempDstNorm.get(j, i);
                if (value[0] > 250) {
                    // 决定了画出哪些角点,值越大选择画出的点就越少
                    Imgproc.circle(corners, new Point(i, j), 5, new Scalar(random.nextInt(255)), 2);
                }
            }
        }
        Utils.matToBitmap(corners, bitmap);
        callback.onSuccess(bitmap);
    } catch (Exception e) {
        callback.onFailed(e.getMessage());
    }
}
 
Example #8
Source File: ProcessHelper.java    From OpenCV-android with Apache License 2.0 6 votes vote down vote up
/**
 * 边缘检测
 *
 * @param origin   原始bitmap
 * @param callback 回调
 */
public void canny(Bitmap origin, ProcessCallback callback) {
    if (origin == null) {
        return;
    }
    try {
        Bitmap bitmap = Bitmap.createBitmap(origin.getWidth(), origin.getHeight(), Bitmap.Config.RGB_565);
        Utils.bitmapToMat(origin, rgbMat);
        Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
        Mat edges = new Mat();
        // 阈值极限
        Imgproc.Canny(grayMat, edges, 50, 300);
        Utils.matToBitmap(edges, bitmap);
        callback.onSuccess(bitmap);
    } catch (Exception e) {
        callback.onFailed(e.getMessage());
    }
}
 
Example #9
Source File: FtcTestOpenCv.java    From FtcSamples with MIT License 6 votes vote down vote up
/**
 * This method is called when the camera view is started. It will allocate and initialize
 * some global resources.
 *
 * @param width specifies the width of the camera view.
 * @param height specifies the height of the camera view.
 */
@Override
public void onCameraViewStarted(int width, int height)
{
    faceRects = new MatOfRect();
    totalProcessingTime = 0;
    framesProcessed = 0;

    overlayImage = new Mat();
    Bitmap overlayBitmap =
            BitmapFactory.decodeResource(activity.getResources(), R.drawable.mustache);
    Utils.bitmapToMat(overlayBitmap, overlayImage);
    //
    // Don't allow overlay unless overlay image has the rgba channels.
    //
    if (overlayImage.channels() < 4) doOverlayImage = false;
}
 
Example #10
Source File: MainActivity.java    From OpenCV-Android-Object-Detection with MIT License 6 votes vote down vote up
private void initializeOpenCVDependencies() throws IOException {
    mOpenCvCameraView.enableView();
    detector = FeatureDetector.create(FeatureDetector.ORB);
    descriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
    matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    img1 = new Mat();
    AssetManager assetManager = getAssets();
    InputStream istr = assetManager.open("a.jpeg");
    Bitmap bitmap = BitmapFactory.decodeStream(istr);
    Utils.bitmapToMat(bitmap, img1);
    Imgproc.cvtColor(img1, img1, Imgproc.COLOR_RGB2GRAY);
    img1.convertTo(img1, 0); //converting the image to match with the type of the cameras image
    descriptors1 = new Mat();
    keypoints1 = new MatOfKeyPoint();
    detector.detect(img1, keypoints1);
    descriptor.compute(img1, keypoints1, descriptors1);

}
 
Example #11
Source File: OCRProcessor.java    From Camdroid with Apache License 2.0 6 votes vote down vote up
@Override
protected void draw() {
    Utils.matToBitmap(out, this.bmp);
    Canvas canvas = new Canvas(this.bmp);

    int y = bounds.height();
    int c = 1;
    for (String line : simpleText.split("\n")) {
        canvas.drawText(line, bounds.width(), y, paint);
        y = y + bounds.height();
        if (c >= lines)
            break;;
    }

    this.drawer.drawBitmap(this.bmp);

}
 
Example #12
Source File: FaceRecognitionActivity.java    From AndroidFaceRecognizer with MIT License 6 votes vote down vote up
private void onFaceCaptured(Mat faceMat){
	capturingImage = false;
	final boolean willRecognizeButtonAppear = capturedMat == null;
	capturedMat = faceMat;
	final Bitmap bmp = Bitmap.createBitmap(faceMat.cols(), faceMat.rows(), Bitmap.Config.RGB_565);
	Utils.matToBitmap(faceMat, bmp);
	FaceRecognitionActivity.this.runOnUiThread(new Runnable() {
		
		@Override
		public void run() {
			capturedImage.setImageBitmap(bmp);
			captureButton.setBackgroundResource(R.drawable.capturestart);
			captureButton.setText("Start Capturing");
			if(willRecognizeButtonAppear) {
				bringRecognizeButtonAnimatedly();
			}
		}
	});
}
 
Example #13
Source File: TensorFlow.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 6 votes vote down vote up
private float[] getPixels(Mat img){
    Bitmap bmp = Bitmap.createBitmap(inputSize, inputSize, Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(img, bmp);
    int[] intValues = new int[inputSize * inputSize];
    bmp.getPixels(intValues, 0, inputSize, 0, 0, inputSize, inputSize);

    float[] floatValues = new float[inputSize * inputSize * channels];
    for (int i = 0; i < intValues.length; ++i) {
        final int val = intValues[i];
        floatValues[i * 3 + 0] = (((float)((val >> 16) & 0xFF)) - imageMean) / imageStd;
        floatValues[i * 3 + 1] = (((float)((val >> 8) & 0xFF)) - imageMean) / imageStd;
        floatValues[i * 3 + 2] = (((float)(val & 0xFF)) - imageMean) / imageStd;
    }

    return floatValues;
}
 
Example #14
Source File: MainActivity.java    From MOAAP with MIT License 6 votes vote down vote up
void Sobel() {
    Mat grayMat = new Mat();
    Mat sobel = new Mat(); //Mat to store the final result

    //Matrices to store gradient and absolute gradient respectively
    Mat grad_x = new Mat();
    Mat abs_grad_x = new Mat();

    Mat grad_y = new Mat();
    Mat abs_grad_y = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    //Calculating gradient in horizontal direction
    Imgproc.Sobel(grayMat, grad_x, CvType.CV_16S, 1, 0, 3, 1, 0);

    //Calculating gradient in vertical direction
    Imgproc.Sobel(grayMat, grad_y, CvType.CV_16S, 0, 1, 3, 1, 0);

    //Calculating absolute value of gradients in both the direction
    Core.convertScaleAbs(grad_x, abs_grad_x);
    Core.convertScaleAbs(grad_y, abs_grad_y);

    //Calculating the resultant gradient
    Core.addWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 1, sobel);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(sobel, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example #15
Source File: DocumentDetector.java    From CVScanner with GNU General Public License v3.0 6 votes vote down vote up
Document detectDocument(Frame frame){
    Size imageSize = new Size(frame.getMetadata().getWidth(), frame.getMetadata().getHeight());
    Mat src = new Mat();
    Utils.bitmapToMat(frame.getBitmap(), src);
    List<MatOfPoint> contours = CVProcessor.findContours(src);
    src.release();

    if(!contours.isEmpty()){
        CVProcessor.Quadrilateral quad = CVProcessor.getQuadrilateral(contours, imageSize);

        if(quad != null){
            quad.points = CVProcessor.getUpscaledPoints(quad.points, CVProcessor.getScaleRatio(imageSize));
            return new Document(frame, quad);
        }
    }

    return null;
}
 
Example #16
Source File: MainActivity.java    From MOAAP with MIT License 5 votes vote down vote up
void HoughCircles() {
    Mat grayMat = new Mat();
    Mat cannyEdges = new Mat();
    Mat circles = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.Canny(grayMat, cannyEdges, 10, 100);

    Imgproc.HoughCircles(cannyEdges, circles, Imgproc.CV_HOUGH_GRADIENT, 1, cannyEdges.rows() / 15);//, grayMat.rows() / 8);

    Mat houghCircles = new Mat();
    houghCircles.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC1);

    //Drawing lines on the image
    for (int i = 0; i < circles.cols(); i++) {
        double[] parameters = circles.get(0, i);
        double x, y;
        int r;

        x = parameters[0];
        y = parameters[1];
        r = (int) parameters[2];

        Point center = new Point(x, y);

        //Drawing circles on an image
        Imgproc.circle(houghCircles, center, r, new Scalar(255, 0, 0), 1);
    }

    //Converting Mat back to Bitmap
    Utils.matToBitmap(houghCircles, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example #17
Source File: MainActivity.java    From MOAAP with MIT License 5 votes vote down vote up
void HarrisCorner() {
    Mat grayMat = new Mat();
    Mat corners = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Mat tempDst = new Mat();
    //finding contours
    Imgproc.cornerHarris(grayMat, tempDst, 2, 3, 0.04);

    //Normalizing harris corner's output
    Mat tempDstNorm = new Mat();
    Core.normalize(tempDst, tempDstNorm, 0, 255, Core.NORM_MINMAX);
    Core.convertScaleAbs(tempDstNorm, corners);

    //Drawing corners on a new image
    Random r = new Random();
    for (int i = 0; i < tempDstNorm.cols(); i++) {
        for (int j = 0; j < tempDstNorm.rows(); j++) {
            double[] value = tempDstNorm.get(j, i);
            if (value[0] > 150)
                Imgproc.circle(corners, new Point(i, j), 5, new Scalar(r.nextInt(255)), 2);
        }
    }

    //Converting Mat back to Bitmap
    Utils.matToBitmap(corners, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example #18
Source File: MainActivity.java    From MOAAP with MIT License 5 votes vote down vote up
void HoughLines() {

        Mat grayMat = new Mat();
        Mat cannyEdges = new Mat();
        Mat lines = new Mat();

        //Converting the image to grayscale
        Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

        Imgproc.Canny(grayMat, cannyEdges, 10, 100);

        Imgproc.HoughLinesP(cannyEdges, lines, 1, Math.PI / 180, 50, 20, 20);

        Mat houghLines = new Mat();
        houghLines.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC1);

        //Drawing lines on the image
        for (int i = 0; i < lines.cols(); i++) {
            double[] points = lines.get(0, i);
            double x1, y1, x2, y2;

            x1 = points[0];
            y1 = points[1];
            x2 = points[2];
            y2 = points[3];

            Point pt1 = new Point(x1, y1);
            Point pt2 = new Point(x2, y2);

            //Drawing lines on an image
            Imgproc.line(houghLines, pt1, pt2, new Scalar(255, 0, 0), 1);
        }

        //Converting Mat back to Bitmap
        Utils.matToBitmap(houghLines, currentBitmap);
        imageView.setImageBitmap(currentBitmap);

    }
 
Example #19
Source File: FtcVuforia.java    From FtcSamples with MIT License 5 votes vote down vote up
/**
 * This method gets a frame from the frame queue and returns the image that matches the format specified by the
 * configVideoSource method.
 *
 * @param frame specifies the frame object to hold image.
 * @return true if success, false otherwise.
 */
@Override
public boolean getFrame(Mat frame)
{
    boolean success = false;

    try
    {
        VuforiaLocalizer.CloseableFrame closeableFrame = localizer.getFrameQueue().take();

        for (int i = 0; i < closeableFrame.getNumImages(); i++)
        {
            Image image = closeableFrame.getImage(i);
            if (image.getWidth() == imageWidth && image.getHeight() == imageHeight &&
                    image.getFormat() == PIXEL_FORMAT.RGB565)
            {
                Bitmap bm = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.RGB_565);
                bm.copyPixelsFromBuffer(image.getPixels());
                Utils.bitmapToMat(bm, frame);
                break;
            }
        }

        closeableFrame.close();
        success = true;
    }
    catch (InterruptedException e)
    {
        e.printStackTrace();
    }

    return success;
}
 
Example #20
Source File: FaceRecognitionActivity.java    From AndroidFaceRecognizer with MIT License 5 votes vote down vote up
private void recognize(){
  	new Thread(new Runnable() {
	
	@Override
	public void run() {
		int result = faceRecognition(capturedMat.getNativeObjAddr(), persons.size());
		for(int i = 0; i < persons.size(); i++) {
			int id = (int)persons.get(i).getId();
			if(result == id) {
				final int index = i;
				FaceRecognitionActivity.this.runOnUiThread(new Runnable() {
					
					@Override
					public void run() {
						ImageView image = (ImageView)findViewById(R.id.frresultimage);
						Mat m = Highgui.imread(persons.get(index).getFacesFolderPath()+"/1.jpg");
						final Bitmap bmp = Bitmap.createBitmap(m.cols(), m.rows(), Bitmap.Config.RGB_565);
						Utils.matToBitmap(m, bmp);
						image.setImageBitmap(bmp);
						TextView resultText = (TextView)findViewById(R.id.frresulttextview);
						resultText.setText(persons.get(index).getName());
						if(!showingResults) {
							showResults();
						}
					}
				});
			}
		}
	}
}).start();
  }
 
Example #21
Source File: FaceDetectionActivity.java    From AndroidFaceRecognizer with MIT License 5 votes vote down vote up
private void onFaceCaptured(Mat faceMat){
	for(int i = 0; i < faceImages.size(); i++) {
		if(!capturedMats.containsKey(i)) {
			capturedMats.put(i, faceMat);
			final Bitmap bmp = Bitmap.createBitmap(faceMat.cols(), faceMat.rows(), Bitmap.Config.RGB_565);
			Utils.matToBitmap(faceMat, bmp);
			final int index = i;
			FaceDetectionActivity.this.runOnUiThread(new Runnable() {
				
				@Override
				public void run() {
					faceImages.get(index).setImageBitmap(bmp);
					deleteIcons.get(index).setVisibility(View.VISIBLE);
				}
			});
			break;
		}
	}
	if(capturedMats.size() == 10) {
		FaceDetectionActivity.this.runOnUiThread(new Runnable() {
			
			@Override
			public void run() {
				nameEdit.setVisibility(View.VISIBLE);
				saveButton.setVisibility(View.VISIBLE);
				capturingImage = false;
				captureButton.setImageResource(R.drawable.capturestart);
				captureText.setText("Start Capturing");
				if(!isTraining) {
					yTranslateAnimation(deleteButton, 0, deleteButtonSecondPos - deleteButtonFirstPos, true, true);
				}
				yTranslateAnimation(nameEdit, screenHeight, 0, true, false);
				yTranslateAnimation(saveButton, 3*screenHeight, 0, true, false);
			}
		});
	}
}
 
Example #22
Source File: PreProcessor.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 5 votes vote down vote up
public void setFaces(PreProcessorFactory.PreprocessingMode preprocessingMode) {
    List<Mat> images = getImages();

    PreferencesHelper preferencesHelper = new PreferencesHelper(context);
    if (preferencesHelper.getDetectionMethod()){
        faces = faceDetection.getFaces(images.get(0));
        angle = faceDetection.getAngle();
    } else {
        Mat img = images.get(0);
        FaceDetector faceDetector = new FaceDetector(img.cols(), img.rows(), 1);
        Bitmap bmp = Bitmap.createBitmap(img.cols(), img.rows(), Bitmap.Config.RGB_565);
        Utils.matToBitmap(img, bmp);
        FaceDetector.Face[] facesAndroid = new FaceDetector.Face[1];
        if (faceDetector.findFaces(bmp, facesAndroid) > 0){
            faces = new Rect[facesAndroid.length];
            for (int i=0; i<facesAndroid.length; i++){
                PointF pointF = new PointF();
                facesAndroid[i].getMidPoint(pointF);
                int xWidth = (int) (1.34 * facesAndroid[i].eyesDistance());
                int yWidth = (int) (1.12 * facesAndroid[i].eyesDistance());
                int dist = (int) (2.77 * facesAndroid[i].eyesDistance());
                Rect face = new Rect((int) pointF.x - xWidth, (int) pointF.y - yWidth, dist, dist);
                faces[i] = face;
            }
        }
    }

    if (preprocessingMode == PreProcessorFactory.PreprocessingMode.RECOGNITION && preferencesHelper.getDetectionMethod()){
        // Change the image rotation to the angle where the face was detected
        images.remove(0);
        images.add(faceDetection.getImg());
        setImages(images);
    }
}
 
Example #23
Source File: ImageSaveTask.java    From CVScanner with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Override this method to perform a computation on a background thread. The
 * specified parameters are the parameters passed to {@link #execute}
 * by the caller of this task.
 * <p/>
 * This method can call {@link #publishProgress} to publish updates
 * on the UI thread.
 *
 * @param params The parameters of the task.
 * @return A result, defined by the subclass of this task.
 * @see #onPreExecute()
 * @see #onPostExecute
 * @see #publishProgress
 */
@Override
protected String doInBackground(Void... params) {
    Size imageSize = new Size(image.getWidth(), image.getHeight());
    Mat imageMat = new Mat(imageSize, CvType.CV_8UC4);
    Utils.bitmapToMat(image, imageMat);

    image.recycle();

    Mat croppedImage = CVProcessor.fourPointTransform(imageMat, points);
    imageMat.release();

    Mat enhancedImage = CVProcessor.adjustBirghtnessAndContrast(croppedImage, 1);
    croppedImage.release();

    enhancedImage = CVProcessor.sharpenImage(enhancedImage);

    String imagePath = null;
    try {
        imagePath = Util.saveImage(mContext,
                "IMG_CVScanner_" + System.currentTimeMillis(), enhancedImage, false);
        enhancedImage.release();
        Util.setExifRotation(mContext, Util.getUriFromPath(imagePath), rotation);
    } catch (IOException e) {
        e.printStackTrace();
    }

    return imagePath;
}
 
Example #24
Source File: ImageProcessor.java    From pasm-yolov3-Android with GNU General Public License v3.0 5 votes vote down vote up
public void loadImage(Bitmap loadedImage, int yoloWidth, int yoloHeight) {
    this.widthRatio = (float)loadedImage.getWidth() / yoloWidth;
    this.heightRatio = (float)loadedImage.getHeight() / yoloHeight;

    Bitmap bmp32 = loadedImage.copy(Bitmap.Config.ARGB_8888, true);
    Utils.bitmapToMat(bmp32, rgbImage);

}
 
Example #25
Source File: CameraAnalyzer.java    From LPR with Apache License 2.0 5 votes vote down vote up
private Mat ImagetoMat(ImageProxy imageProxy) {
    ImageProxy.PlaneProxy[] plane = imageProxy.getPlanes();
    ByteBuffer yBuffer = plane[0].getBuffer();  // Y
    ByteBuffer uBuffer = plane[1].getBuffer();  // U
    ByteBuffer vBuffer = plane[2].getBuffer();  // V

    int ySize = yBuffer.remaining();
    int uSize = uBuffer.remaining();
    int vSize = vBuffer.remaining();

    byte[] nv21 = new byte[ySize + uSize + vSize];

    //U and V are swapped
    yBuffer.get(nv21, 0, ySize);
    vBuffer.get(nv21, ySize, vSize);
    uBuffer.get(nv21, ySize + vSize, uSize);
    try {
        YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, imageProxy.getWidth(), imageProxy.getHeight(), null);
        ByteArrayOutputStream stream = new ByteArrayOutputStream(nv21.length);
        yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 90, stream);
        Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
        Matrix matrix = new Matrix();
        matrix.postRotate(90);
        Rect rect = scannerView.getFramingRectInPreview(bitmap.getWidth(), bitmap.getHeight());
        bitmap = Bitmap.createBitmap(bitmap, rect.top, rect.left, rect.height(), rect.width(), matrix, true);
        stream.close();
        Mat mat = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC4);
        Utils.bitmapToMat(bitmap, mat);
        return mat;
    } catch (IOException e) {
        e.printStackTrace();
    }
    return null;
}
 
Example #26
Source File: OldMainActivity.java    From pasm-yolov3-Android with GNU General Public License v3.0 5 votes vote down vote up
@Override
protected List<Classifier.Recognition> doInBackground(Mat... mats) {
    Mat mRgbaTemp = mats[0];
    if (myBitmap != null){
        smallBitmap = Bitmap.createScaledBitmap(myBitmap, INPUT_WIDTH, INPUT_HEIGHT, false);
    }else{

        smallBitmap = Bitmap.createBitmap(INPUT_WIDTH, INPUT_HEIGHT, Bitmap.Config.RGB_565);
        Bitmap bigBitmap = Bitmap.createBitmap(mRgbaF.width(), mRgbaF.height(), Bitmap.Config.RGB_565);
        Mat mRgbaFixedSize = new Mat(INPUT_WIDTH, INPUT_HEIGHT, CvType.CV_8UC4);

        Core.transpose(mRgbaTemp, mRgbaT);
        Imgproc.resize(mRgbaT, mRgbaF, mRgbaF.size(), 0,0, 0);
        Core.flip(mRgbaF, mRgbaTemp, 1 );

        Imgproc.resize(mRgbaTemp, mRgbaFixedSize, new Size(INPUT_WIDTH, INPUT_HEIGHT), 0,0, 0);

        Utils.matToBitmap(mRgbaFixedSize, smallBitmap);
        Utils.matToBitmap(mRgbaTemp, bigBitmap);

        this.publishProgress(bigBitmap);


        //OLD Toast.makeText(getApplicationContext(), "Nessuna immagine caricata", Toast.LENGTH_SHORT).show();
    }

    List<Classifier.Recognition> recognitions = classifier.recognizeImage(smallBitmap);
    return  recognitions;
}
 
Example #27
Source File: OpenCVTestCase.java    From OpenTLDAndroid with Apache License 2.0 5 votes vote down vote up
protected static Mat readMatFromFile(final String name){
	// BitmapFactory.decodeResource scales the image which is not great... Geting the image from resources is a BIG PAIN !
	//final Bitmap image = new BitmapDrawable(getContext().getResources(), getContext().getResources().openRawResource(R.drawable.test_grid_frame)).getBitmap();
	final Bitmap image = BitmapFactory.decodeFile("/mnt/sdcard/TLDtest/" + name + ".png");
	final Mat img = new Mat();
	Utils.bitmapToMat(image, img);
	
	return img;		
}
 
Example #28
Source File: MainActivity.java    From pasm-yolov3-Android with GNU General Public License v3.0 5 votes vote down vote up
@Override
protected void onPostExecute(Mat result) {
    ImageView ivGallery = findViewById(R.id.ivGallery);
    ivGallery.setVisibility(View.GONE);
    ImageView iv = findViewById(R.id.ivPreview);
    Bitmap bigBitmap = Bitmap.createBitmap(result.width(), result.height(), Bitmap.Config.RGB_565);
    Utils.matToBitmap(result, bigBitmap);
    iv.setImageBitmap(bigBitmap);
    iv.setVisibility(View.VISIBLE);
    TextView tv = findViewById(R.id.textView);
    tv.setText("Done!");

}
 
Example #29
Source File: Image.java    From RobotHelper with GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * 模板匹配
 *
 * @param srcImg      //源图像
 * @param templateImg //模板图像
 * @param threshold   //相识度阈值,阈值调小可以一定程度解决不同手机分辨率的问题
 * @return //如果没有找到则返回(-1,-1)点
 */
public static Point matchTemplate(Bitmap srcImg, Bitmap templateImg, double threshold) {

    if (threshold <= 0) {
        threshold = 0.5;
    }


    Mat tpl = new Mat();
    Mat src = new Mat();
    Utils.bitmapToMat(srcImg, src);
    Utils.bitmapToMat(templateImg, tpl);


    int height = src.rows() - tpl.rows() + 1;
    int width = src.cols() - tpl.cols() + 1;
    Mat result = new Mat(height, width, CvType.CV_32FC1);
    int method = Imgproc.TM_CCOEFF_NORMED;
    Imgproc.matchTemplate(src, tpl, result, method);
    Core.MinMaxLocResult minMaxResult = Core.minMaxLoc(result);
    org.opencv.core.Point maxloc = minMaxResult.maxLoc;
    if (minMaxResult.maxVal < threshold) {
        return new Point(-1, -1);
    }
    org.opencv.core.Point minloc = minMaxResult.minLoc;
    org.opencv.core.Point matchloc = null;
    matchloc = maxloc;
    return new Point((int) matchloc.x, (int) matchloc.y);

}
 
Example #30
Source File: ProcessHelper.java    From OpenCV-android with Apache License 2.0 5 votes vote down vote up
/**
 * 灰度处理
 *
 * @param origin   原始bitmap
 * @param callback 回调
 */
public void gray(Bitmap origin, ProcessCallback callback) {
    if (origin == null) {
        return;
    }
    try {
        Bitmap bitmap = Bitmap.createBitmap(origin.getWidth(), origin.getHeight(), Bitmap.Config.RGB_565);
        Utils.bitmapToMat(origin, rgbMat);
        Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
        Utils.matToBitmap(grayMat, bitmap);
        callback.onSuccess(bitmap);
    } catch (Exception e) {
        callback.onFailed(e.getMessage());
    }
}