Java Code Examples for org.opencv.android.Utils

The following examples show how to use org.opencv.android.Utils. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: MOAAP   Source File: MainActivity.java    License: MIT License 7 votes vote down vote up
public void DifferenceOfGaussian() {
    Mat grayMat = new Mat();
    Mat blur1 = new Mat();
    Mat blur2 = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.GaussianBlur(grayMat, blur1, new Size(15, 15), 5);
    Imgproc.GaussianBlur(grayMat, blur2, new Size(21, 21), 5);

    //Subtracting the two blurred images
    Mat DoG = new Mat();
    Core.absdiff(blur1, blur2, DoG);

    //Inverse Binary Thresholding
    Core.multiply(DoG, new Scalar(100), DoG);
    Imgproc.threshold(DoG, DoG, 50, 255, Imgproc.THRESH_BINARY_INV);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(DoG, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 2
Source Project: MOAAP   Source File: MainActivity.java    License: MIT License 7 votes vote down vote up
void Contours() {
    Mat grayMat = new Mat();
    Mat cannyEdges = new Mat();
    Mat hierarchy = new Mat();

    List<MatOfPoint> contourList = new ArrayList<MatOfPoint>(); //A list to store all the contours

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.Canny(originalMat, cannyEdges, 10, 100);

    //finding contours
    Imgproc.findContours(cannyEdges, contourList, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    //Drawing contours on a new image
    Mat contours = new Mat();
    contours.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC3);
    Random r = new Random();
    for (int i = 0; i < contourList.size(); i++) {
        Imgproc.drawContours(contours, contourList, i, new Scalar(r.nextInt(255), r.nextInt(255), r.nextInt(255)), -1);
    }
    //Converting Mat back to Bitmap
    Utils.matToBitmap(contours, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 3
Source Project: OCR-Test   Source File: MainActivity.java    License: Apache License 2.0 6 votes vote down vote up
public Bitmap convertToBlackWhite(Bitmap compressImage)
{
    Log.d("CV", "Before converting to black");
    Mat imageMat = new Mat();
    Utils.bitmapToMat(compressImage, imageMat);
    Imgproc.cvtColor(imageMat, imageMat, Imgproc.COLOR_BGR2GRAY);
    Imgproc.GaussianBlur(imageMat, imageMat, new Size(3, 3), 0);
    //Imgproc.adaptiveThreshold(imageMat, imageMat, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY_INV, 5, 4);
    //Imgproc.medianBlur(imageMat, imageMat, 3);
    Imgproc.threshold(imageMat, imageMat, 0, 255, Imgproc.THRESH_OTSU);

    Bitmap newBitmap = compressImage;
    Utils.matToBitmap(imageMat, newBitmap);
    imageView.setImageBitmap(newBitmap);
    Log.d("CV", "After converting to black");


    return newBitmap;

}
 
Example 4
Source Project: OpenCV-android   Source File: ProcessHelper.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * 边缘检测
 *
 * @param origin   原始bitmap
 * @param callback 回调
 */
public void canny(Bitmap origin, ProcessCallback callback) {
    if (origin == null) {
        return;
    }
    try {
        Bitmap bitmap = Bitmap.createBitmap(origin.getWidth(), origin.getHeight(), Bitmap.Config.RGB_565);
        Utils.bitmapToMat(origin, rgbMat);
        Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
        Mat edges = new Mat();
        // 阈值极限
        Imgproc.Canny(grayMat, edges, 50, 300);
        Utils.matToBitmap(edges, bitmap);
        callback.onSuccess(bitmap);
    } catch (Exception e) {
        callback.onFailed(e.getMessage());
    }
}
 
Example 5
Source Project: MOAAP   Source File: PyramidActivity.java    License: MIT License 6 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
    super.onActivityResult(requestCode, resultCode, imageReturnedIntent);

    switch(requestCode) {
        case SELECT_PHOTO:
            if(resultCode == RESULT_OK){
                try {
                    final Uri imageUri = imageReturnedIntent.getData();
                    final InputStream imageStream = getContentResolver().openInputStream(imageUri);
                    final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
                    src = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC4);
                    Utils.bitmapToMat(selectedImage, src);
                    srcSelected = true;
                    bGaussianPyrUp.setEnabled(true);
                    bGaussianPyrDown.setEnabled(true);
                    bLaplacianPyr.setEnabled(true);
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                }
            }
            break;
    }
}
 
Example 6
Source Project: MOAAP   Source File: MainActivity.java    License: MIT License 6 votes vote down vote up
void Sobel() {
    Mat grayMat = new Mat();
    Mat sobel = new Mat(); //Mat to store the final result

    //Matrices to store gradient and absolute gradient respectively
    Mat grad_x = new Mat();
    Mat abs_grad_x = new Mat();

    Mat grad_y = new Mat();
    Mat abs_grad_y = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    //Calculating gradient in horizontal direction
    Imgproc.Sobel(grayMat, grad_x, CvType.CV_16S, 1, 0, 3, 1, 0);

    //Calculating gradient in vertical direction
    Imgproc.Sobel(grayMat, grad_y, CvType.CV_16S, 0, 1, 3, 1, 0);

    //Calculating absolute value of gradients in both the direction
    Core.convertScaleAbs(grad_x, abs_grad_x);
    Core.convertScaleAbs(grad_y, abs_grad_y);

    //Calculating the resultant gradient
    Core.addWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 1, sobel);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(sobel, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 7
Source Project: MOAAP   Source File: MainActivity.java    License: MIT License 6 votes vote down vote up
void HOGDescriptor() {
    Mat grayMat = new Mat();
    Mat people = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    HOGDescriptor hog = new HOGDescriptor();
    hog.setSVMDetector(HOGDescriptor.getDefaultPeopleDetector());

    MatOfRect faces = new MatOfRect();
    MatOfDouble weights = new MatOfDouble();

    hog.detectMultiScale(grayMat, faces, weights);
    originalMat.copyTo(people);
    //Draw faces on the image
    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
        Imgproc.rectangle(people, facesArray[i].tl(), facesArray[i].br(), new Scalar(100), 3);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(people, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 8
Source Project: FtcSamples   Source File: FtcTestOpenCv.java    License: MIT License 6 votes vote down vote up
/**
 * This method is called when the camera view is started. It will allocate and initialize
 * some global resources.
 *
 * @param width specifies the width of the camera view.
 * @param height specifies the height of the camera view.
 */
@Override
public void onCameraViewStarted(int width, int height)
{
    faceRects = new MatOfRect();
    totalProcessingTime = 0;
    framesProcessed = 0;

    overlayImage = new Mat();
    Bitmap overlayBitmap =
            BitmapFactory.decodeResource(activity.getResources(), R.drawable.mustache);
    Utils.bitmapToMat(overlayBitmap, overlayImage);
    //
    // Don't allow overlay unless overlay image has the rgba channels.
    //
    if (overlayImage.channels() < 4) doOverlayImage = false;
}
 
Example 9
Source Project: OpenCV-Android-Object-Detection   Source File: MainActivity.java    License: MIT License 6 votes vote down vote up
private void initializeOpenCVDependencies() throws IOException {
    mOpenCvCameraView.enableView();
    detector = FeatureDetector.create(FeatureDetector.ORB);
    descriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
    matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    img1 = new Mat();
    AssetManager assetManager = getAssets();
    InputStream istr = assetManager.open("a.jpeg");
    Bitmap bitmap = BitmapFactory.decodeStream(istr);
    Utils.bitmapToMat(bitmap, img1);
    Imgproc.cvtColor(img1, img1, Imgproc.COLOR_RGB2GRAY);
    img1.convertTo(img1, 0); //converting the image to match with the type of the cameras image
    descriptors1 = new Mat();
    keypoints1 = new MatOfKeyPoint();
    detector.detect(img1, keypoints1);
    descriptor.compute(img1, keypoints1, descriptors1);

}
 
Example 10
private float[] getPixels(Mat img){
    Bitmap bmp = Bitmap.createBitmap(inputSize, inputSize, Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(img, bmp);
    int[] intValues = new int[inputSize * inputSize];
    bmp.getPixels(intValues, 0, inputSize, 0, 0, inputSize, inputSize);

    float[] floatValues = new float[inputSize * inputSize * channels];
    for (int i = 0; i < intValues.length; ++i) {
        final int val = intValues[i];
        floatValues[i * 3 + 0] = (((float)((val >> 16) & 0xFF)) - imageMean) / imageStd;
        floatValues[i * 3 + 1] = (((float)((val >> 8) & 0xFF)) - imageMean) / imageStd;
        floatValues[i * 3 + 2] = (((float)(val & 0xFF)) - imageMean) / imageStd;
    }

    return floatValues;
}
 
Example 11
Source Project: CVScanner   Source File: DocumentDetector.java    License: GNU General Public License v3.0 6 votes vote down vote up
Document detectDocument(Frame frame){
    Size imageSize = new Size(frame.getMetadata().getWidth(), frame.getMetadata().getHeight());
    Mat src = new Mat();
    Utils.bitmapToMat(frame.getBitmap(), src);
    List<MatOfPoint> contours = CVProcessor.findContours(src);
    src.release();

    if(!contours.isEmpty()){
        CVProcessor.Quadrilateral quad = CVProcessor.getQuadrilateral(contours, imageSize);

        if(quad != null){
            quad.points = CVProcessor.getUpscaledPoints(quad.points, CVProcessor.getScaleRatio(imageSize));
            return new Document(frame, quad);
        }
    }

    return null;
}
 
Example 12
Source Project: AndroidFaceRecognizer   Source File: FaceRecognitionActivity.java    License: MIT License 6 votes vote down vote up
private void onFaceCaptured(Mat faceMat){
	capturingImage = false;
	final boolean willRecognizeButtonAppear = capturedMat == null;
	capturedMat = faceMat;
	final Bitmap bmp = Bitmap.createBitmap(faceMat.cols(), faceMat.rows(), Bitmap.Config.RGB_565);
	Utils.matToBitmap(faceMat, bmp);
	FaceRecognitionActivity.this.runOnUiThread(new Runnable() {
		
		@Override
		public void run() {
			capturedImage.setImageBitmap(bmp);
			captureButton.setBackgroundResource(R.drawable.capturestart);
			captureButton.setText("Start Capturing");
			if(willRecognizeButtonAppear) {
				bringRecognizeButtonAnimatedly();
			}
		}
	});
}
 
Example 13
Source Project: Camdroid   Source File: OCRProcessor.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected void draw() {
    Utils.matToBitmap(out, this.bmp);
    Canvas canvas = new Canvas(this.bmp);

    int y = bounds.height();
    int c = 1;
    for (String line : simpleText.split("\n")) {
        canvas.drawText(line, bounds.width(), y, paint);
        y = y + bounds.height();
        if (c >= lines)
            break;;
    }

    this.drawer.drawBitmap(this.bmp);

}
 
Example 14
Source Project: LPR   Source File: CameraAnalyzer.java    License: Apache License 2.0 5 votes vote down vote up
private Mat ImagetoMat(ImageProxy imageProxy) {
    ImageProxy.PlaneProxy[] plane = imageProxy.getPlanes();
    ByteBuffer yBuffer = plane[0].getBuffer();  // Y
    ByteBuffer uBuffer = plane[1].getBuffer();  // U
    ByteBuffer vBuffer = plane[2].getBuffer();  // V

    int ySize = yBuffer.remaining();
    int uSize = uBuffer.remaining();
    int vSize = vBuffer.remaining();

    byte[] nv21 = new byte[ySize + uSize + vSize];

    //U and V are swapped
    yBuffer.get(nv21, 0, ySize);
    vBuffer.get(nv21, ySize, vSize);
    uBuffer.get(nv21, ySize + vSize, uSize);
    try {
        YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, imageProxy.getWidth(), imageProxy.getHeight(), null);
        ByteArrayOutputStream stream = new ByteArrayOutputStream(nv21.length);
        yuvImage.compressToJpeg(new Rect(0, 0, yuvImage.getWidth(), yuvImage.getHeight()), 90, stream);
        Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
        Matrix matrix = new Matrix();
        matrix.postRotate(90);
        Rect rect = scannerView.getFramingRectInPreview(bitmap.getWidth(), bitmap.getHeight());
        bitmap = Bitmap.createBitmap(bitmap, rect.top, rect.left, rect.height(), rect.width(), matrix, true);
        stream.close();
        Mat mat = new Mat(bitmap.getWidth(), bitmap.getHeight(), CvType.CV_8UC4);
        Utils.bitmapToMat(bitmap, mat);
        return mat;
    } catch (IOException e) {
        e.printStackTrace();
    }
    return null;
}
 
Example 15
@Override
protected List<Classifier.Recognition> doInBackground(Mat... mats) {
    Mat mRgbaTemp = mats[0];
    if (myBitmap != null){
        smallBitmap = Bitmap.createScaledBitmap(myBitmap, INPUT_WIDTH, INPUT_HEIGHT, false);
    }else{

        smallBitmap = Bitmap.createBitmap(INPUT_WIDTH, INPUT_HEIGHT, Bitmap.Config.RGB_565);
        Bitmap bigBitmap = Bitmap.createBitmap(mRgbaF.width(), mRgbaF.height(), Bitmap.Config.RGB_565);
        Mat mRgbaFixedSize = new Mat(INPUT_WIDTH, INPUT_HEIGHT, CvType.CV_8UC4);

        Core.transpose(mRgbaTemp, mRgbaT);
        Imgproc.resize(mRgbaT, mRgbaF, mRgbaF.size(), 0,0, 0);
        Core.flip(mRgbaF, mRgbaTemp, 1 );

        Imgproc.resize(mRgbaTemp, mRgbaFixedSize, new Size(INPUT_WIDTH, INPUT_HEIGHT), 0,0, 0);

        Utils.matToBitmap(mRgbaFixedSize, smallBitmap);
        Utils.matToBitmap(mRgbaTemp, bigBitmap);

        this.publishProgress(bigBitmap);


        //OLD Toast.makeText(getApplicationContext(), "Nessuna immagine caricata", Toast.LENGTH_SHORT).show();
    }

    List<Classifier.Recognition> recognitions = classifier.recognizeImage(smallBitmap);
    return  recognitions;
}
 
Example 16
@Override
protected void onPostExecute(Mat result) {
    ImageView ivGallery = findViewById(R.id.ivGallery);
    ivGallery.setVisibility(View.GONE);
    ImageView iv = findViewById(R.id.ivPreview);
    Bitmap bigBitmap = Bitmap.createBitmap(result.width(), result.height(), Bitmap.Config.RGB_565);
    Utils.matToBitmap(result, bigBitmap);
    iv.setImageBitmap(bigBitmap);
    iv.setVisibility(View.VISIBLE);
    TextView tv = findViewById(R.id.textView);
    tv.setText("Done!");

}
 
Example 17
public void loadImage(Bitmap loadedImage, int yoloWidth, int yoloHeight) {
    this.widthRatio = (float)loadedImage.getWidth() / yoloWidth;
    this.heightRatio = (float)loadedImage.getHeight() / yoloHeight;

    Bitmap bmp32 = loadedImage.copy(Bitmap.Config.ARGB_8888, true);
    Utils.bitmapToMat(bmp32, rgbImage);

}
 
Example 18
Source Project: RobotHelper   Source File: Image.java    License: GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * 模板匹配
 *
 * @param srcImg      //源图像
 * @param templateImg //模板图像
 * @param threshold   //相识度阈值,阈值调小可以一定程度解决不同手机分辨率的问题
 * @return //如果没有找到则返回(-1,-1)点
 */
public static Point matchTemplate(Bitmap srcImg, Bitmap templateImg, double threshold) {

    if (threshold <= 0) {
        threshold = 0.5;
    }


    Mat tpl = new Mat();
    Mat src = new Mat();
    Utils.bitmapToMat(srcImg, src);
    Utils.bitmapToMat(templateImg, tpl);


    int height = src.rows() - tpl.rows() + 1;
    int width = src.cols() - tpl.cols() + 1;
    Mat result = new Mat(height, width, CvType.CV_32FC1);
    int method = Imgproc.TM_CCOEFF_NORMED;
    Imgproc.matchTemplate(src, tpl, result, method);
    Core.MinMaxLocResult minMaxResult = Core.minMaxLoc(result);
    org.opencv.core.Point maxloc = minMaxResult.maxLoc;
    if (minMaxResult.maxVal < threshold) {
        return new Point(-1, -1);
    }
    org.opencv.core.Point minloc = minMaxResult.minLoc;
    org.opencv.core.Point matchloc = null;
    matchloc = maxloc;
    return new Point((int) matchloc.x, (int) matchloc.y);

}
 
Example 19
Source Project: OpenCV-android   Source File: ProcessHelper.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * 灰度处理
 *
 * @param origin   原始bitmap
 * @param callback 回调
 */
public void gray(Bitmap origin, ProcessCallback callback) {
    if (origin == null) {
        return;
    }
    try {
        Bitmap bitmap = Bitmap.createBitmap(origin.getWidth(), origin.getHeight(), Bitmap.Config.RGB_565);
        Utils.bitmapToMat(origin, rgbMat);
        Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
        Utils.matToBitmap(grayMat, bitmap);
        callback.onSuccess(bitmap);
    } catch (Exception e) {
        callback.onFailed(e.getMessage());
    }
}
 
Example 20
Source Project: OpenCV-android   Source File: ProcessHelper.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * 角点检测
 *
 * @param origin   原始bitmap
 * @param callback 回调
 */
public void harris(Bitmap origin, ProcessCallback callback) {
    if (origin == null) {
        return;
    }
    try {
        Bitmap bitmap = Bitmap.createBitmap(origin.getWidth(), origin.getHeight(), Bitmap.Config.RGB_565);
        Utils.bitmapToMat(origin, rgbMat);
        Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
        Mat corners = new Mat();
        Mat tempDst = new Mat();
        Mat tempDstNorm = new Mat();
        // 找出角点
        Imgproc.cornerHarris(grayMat, tempDst, 2, 3, 0.04);
        // 归一化Harris角点的输出
        Core.normalize(tempDst, tempDstNorm, 0, 255, Core.NORM_MINMAX);
        Core.convertScaleAbs(tempDstNorm, corners);
        // 绘制角点
        Random random = new Random();
        for (int i = 0; i < tempDstNorm.cols(); i++) {
            for (int j = 0; j < tempDstNorm.rows(); j++) {
                double[] value = tempDstNorm.get(j, i);
                if (value[0] > 250) {
                    // 决定了画出哪些角点,值越大选择画出的点就越少
                    Imgproc.circle(corners, new Point(i, j), 5, new Scalar(random.nextInt(255)), 2);
                }
            }
        }
        Utils.matToBitmap(corners, bitmap);
        callback.onSuccess(bitmap);
    } catch (Exception e) {
        callback.onFailed(e.getMessage());
    }
}
 
Example 21
private Bitmap scaleImage(Bitmap bitmap, int width, int height)
{

    Mat src = new Mat();
    Mat dst = new Mat();
    Utils.bitmapToMat(bitmap, src);
    //new Size(width, height)
    Imgproc.resize(src, dst, new Size(width,height),0,0,Imgproc.INTER_AREA);
    Bitmap bitmap1 = Bitmap.createBitmap(dst.cols(),dst.rows(),Bitmap.Config.RGB_565);
    Utils.matToBitmap(dst, bitmap1);
    return bitmap1;
}
 
Example 22
@Override
public void onBitmapSelect(final Uri uri) {
    runOnUiThread(new Runnable() {
        @Override
        public void run() {
            mProgressBar.setVisibility(View.VISIBLE);
            mContainer.setVisibility(View.INVISIBLE);
        }
    });

    new Thread(new Runnable() {
        @Override
        public void run() {
            try {
                Bitmap bitmap = MediaStore.Images.Media.getBitmap(getContentResolver(), uri);
                //First convert Bitmap to Mat
                Mat ImageMat = new Mat(bitmap.getHeight(), bitmap.getWidth(), CvType.CV_8U, new Scalar(4));
                Bitmap myBitmap32 = bitmap.copy(Bitmap.Config.ARGB_8888, true);
                Utils.bitmapToMat(myBitmap32, ImageMat);

                doWithMat(ImageMat.getNativeObjAddr());

                //Then convert the processed Mat to Bitmap
                mBitmap = Bitmap.createBitmap(ImageMat.cols(), ImageMat.rows(), Bitmap.Config.ARGB_8888);
                Utils.matToBitmap(ImageMat, mBitmap);
                Intent data = new Intent();
                setResult(Activity.RESULT_OK, data);
                finish();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }).start();
}
 
Example 23
Source Project: MOAAP   Source File: MainActivity.java    License: MIT License 5 votes vote down vote up
void Canny() {
    Mat grayMat = new Mat();
    Mat cannyEdges = new Mat();
    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.Canny(grayMat, cannyEdges, 10, 100);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(cannyEdges, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 24
Source Project: MOAAP   Source File: MainActivity.java    License: MIT License 5 votes vote down vote up
void HoughLines() {

        Mat grayMat = new Mat();
        Mat cannyEdges = new Mat();
        Mat lines = new Mat();

        //Converting the image to grayscale
        Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

        Imgproc.Canny(grayMat, cannyEdges, 10, 100);

        Imgproc.HoughLinesP(cannyEdges, lines, 1, Math.PI / 180, 50, 20, 20);

        Mat houghLines = new Mat();
        houghLines.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC1);

        //Drawing lines on the image
        for (int i = 0; i < lines.cols(); i++) {
            double[] points = lines.get(0, i);
            double x1, y1, x2, y2;

            x1 = points[0];
            y1 = points[1];
            x2 = points[2];
            y2 = points[3];

            Point pt1 = new Point(x1, y1);
            Point pt2 = new Point(x2, y2);

            //Drawing lines on an image
            Imgproc.line(houghLines, pt1, pt2, new Scalar(255, 0, 0), 1);
        }

        //Converting Mat back to Bitmap
        Utils.matToBitmap(houghLines, currentBitmap);
        imageView.setImageBitmap(currentBitmap);

    }
 
Example 25
Source Project: MOAAP   Source File: MainActivity.java    License: MIT License 5 votes vote down vote up
void HoughCircles() {
    Mat grayMat = new Mat();
    Mat cannyEdges = new Mat();
    Mat circles = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.Canny(grayMat, cannyEdges, 10, 100);

    Imgproc.HoughCircles(cannyEdges, circles, Imgproc.CV_HOUGH_GRADIENT, 1, cannyEdges.rows() / 15);//, grayMat.rows() / 8);

    Mat houghCircles = new Mat();
    houghCircles.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC1);

    //Drawing lines on the image
    for (int i = 0; i < circles.cols(); i++) {
        double[] parameters = circles.get(0, i);
        double x, y;
        int r;

        x = parameters[0];
        y = parameters[1];
        r = (int) parameters[2];

        Point center = new Point(x, y);

        //Drawing circles on an image
        Imgproc.circle(houghCircles, center, r, new Scalar(255, 0, 0), 1);
    }

    //Converting Mat back to Bitmap
    Utils.matToBitmap(houghCircles, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 26
Source Project: MOAAP   Source File: MainActivity.java    License: MIT License 5 votes vote down vote up
void HarrisCorner() {
    Mat grayMat = new Mat();
    Mat corners = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Mat tempDst = new Mat();
    //finding contours
    Imgproc.cornerHarris(grayMat, tempDst, 2, 3, 0.04);

    //Normalizing harris corner's output
    Mat tempDstNorm = new Mat();
    Core.normalize(tempDst, tempDstNorm, 0, 255, Core.NORM_MINMAX);
    Core.convertScaleAbs(tempDstNorm, corners);

    //Drawing corners on a new image
    Random r = new Random();
    for (int i = 0; i < tempDstNorm.cols(); i++) {
        for (int j = 0; j < tempDstNorm.rows(); j++) {
            double[] value = tempDstNorm.get(j, i);
            if (value[0] > 150)
                Imgproc.circle(corners, new Point(i, j), 5, new Scalar(r.nextInt(255)), 2);
        }
    }

    //Converting Mat back to Bitmap
    Utils.matToBitmap(corners, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 27
Source Project: FtcSamples   Source File: FtcVuforia.java    License: MIT License 5 votes vote down vote up
/**
 * This method gets a frame from the frame queue and returns the image that matches the format specified by the
 * configVideoSource method.
 *
 * @param frame specifies the frame object to hold image.
 * @return true if success, false otherwise.
 */
@Override
public boolean getFrame(Mat frame)
{
    boolean success = false;

    try
    {
        VuforiaLocalizer.CloseableFrame closeableFrame = localizer.getFrameQueue().take();

        for (int i = 0; i < closeableFrame.getNumImages(); i++)
        {
            Image image = closeableFrame.getImage(i);
            if (image.getWidth() == imageWidth && image.getHeight() == imageHeight &&
                    image.getFormat() == PIXEL_FORMAT.RGB565)
            {
                Bitmap bm = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.RGB_565);
                bm.copyPixelsFromBuffer(image.getPixels());
                Utils.bitmapToMat(bm, frame);
                break;
            }
        }

        closeableFrame.close();
        success = true;
    }
    catch (InterruptedException e)
    {
        e.printStackTrace();
    }

    return success;
}
 
Example 28
public void setFaces(PreProcessorFactory.PreprocessingMode preprocessingMode) {
    List<Mat> images = getImages();

    PreferencesHelper preferencesHelper = new PreferencesHelper(context);
    if (preferencesHelper.getDetectionMethod()){
        faces = faceDetection.getFaces(images.get(0));
        angle = faceDetection.getAngle();
    } else {
        Mat img = images.get(0);
        FaceDetector faceDetector = new FaceDetector(img.cols(), img.rows(), 1);
        Bitmap bmp = Bitmap.createBitmap(img.cols(), img.rows(), Bitmap.Config.RGB_565);
        Utils.matToBitmap(img, bmp);
        FaceDetector.Face[] facesAndroid = new FaceDetector.Face[1];
        if (faceDetector.findFaces(bmp, facesAndroid) > 0){
            faces = new Rect[facesAndroid.length];
            for (int i=0; i<facesAndroid.length; i++){
                PointF pointF = new PointF();
                facesAndroid[i].getMidPoint(pointF);
                int xWidth = (int) (1.34 * facesAndroid[i].eyesDistance());
                int yWidth = (int) (1.12 * facesAndroid[i].eyesDistance());
                int dist = (int) (2.77 * facesAndroid[i].eyesDistance());
                Rect face = new Rect((int) pointF.x - xWidth, (int) pointF.y - yWidth, dist, dist);
                faces[i] = face;
            }
        }
    }

    if (preprocessingMode == PreProcessorFactory.PreprocessingMode.RECOGNITION && preferencesHelper.getDetectionMethod()){
        // Change the image rotation to the angle where the face was detected
        images.remove(0);
        images.add(faceDetection.getImg());
        setImages(images);
    }
}
 
Example 29
Source Project: CVScanner   Source File: ImageSaveTask.java    License: GNU General Public License v3.0 5 votes vote down vote up
/**
 * Override this method to perform a computation on a background thread. The
 * specified parameters are the parameters passed to {@link #execute}
 * by the caller of this task.
 * <p/>
 * This method can call {@link #publishProgress} to publish updates
 * on the UI thread.
 *
 * @param params The parameters of the task.
 * @return A result, defined by the subclass of this task.
 * @see #onPreExecute()
 * @see #onPostExecute
 * @see #publishProgress
 */
@Override
protected String doInBackground(Void... params) {
    Size imageSize = new Size(image.getWidth(), image.getHeight());
    Mat imageMat = new Mat(imageSize, CvType.CV_8UC4);
    Utils.bitmapToMat(image, imageMat);

    image.recycle();

    Mat croppedImage = CVProcessor.fourPointTransform(imageMat, points);
    imageMat.release();

    Mat enhancedImage = CVProcessor.adjustBirghtnessAndContrast(croppedImage, 1);
    croppedImage.release();

    enhancedImage = CVProcessor.sharpenImage(enhancedImage);

    String imagePath = null;
    try {
        imagePath = Util.saveImage(mContext,
                "IMG_CVScanner_" + System.currentTimeMillis(), enhancedImage, false);
        enhancedImage.release();
        Util.setExifRotation(mContext, Util.getUriFromPath(imagePath), rotation);
    } catch (IOException e) {
        e.printStackTrace();
    }

    return imagePath;
}
 
Example 30
Source Project: AndroidFaceRecognizer   Source File: FaceDetectionActivity.java    License: MIT License 5 votes vote down vote up
private void onFaceCaptured(Mat faceMat){
	for(int i = 0; i < faceImages.size(); i++) {
		if(!capturedMats.containsKey(i)) {
			capturedMats.put(i, faceMat);
			final Bitmap bmp = Bitmap.createBitmap(faceMat.cols(), faceMat.rows(), Bitmap.Config.RGB_565);
			Utils.matToBitmap(faceMat, bmp);
			final int index = i;
			FaceDetectionActivity.this.runOnUiThread(new Runnable() {
				
				@Override
				public void run() {
					faceImages.get(index).setImageBitmap(bmp);
					deleteIcons.get(index).setVisibility(View.VISIBLE);
				}
			});
			break;
		}
	}
	if(capturedMats.size() == 10) {
		FaceDetectionActivity.this.runOnUiThread(new Runnable() {
			
			@Override
			public void run() {
				nameEdit.setVisibility(View.VISIBLE);
				saveButton.setVisibility(View.VISIBLE);
				capturingImage = false;
				captureButton.setImageResource(R.drawable.capturestart);
				captureText.setText("Start Capturing");
				if(!isTraining) {
					yTranslateAnimation(deleteButton, 0, deleteButtonSecondPos - deleteButtonFirstPos, true, true);
				}
				yTranslateAnimation(nameEdit, screenHeight, 0, true, false);
				yTranslateAnimation(saveButton, 3*screenHeight, 0, true, false);
			}
		});
	}
}