Java Code Examples for org.opencv.android.Utils

The following are top voted examples for showing how to use org.opencv.android.Utils. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: MOAAP   File: MainActivity.java   Source Code and License 9 votes vote down vote up
void HOGDescriptor() {
    Mat grayMat = new Mat();
    Mat people = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    HOGDescriptor hog = new HOGDescriptor();
    hog.setSVMDetector(HOGDescriptor.getDefaultPeopleDetector());

    MatOfRect faces = new MatOfRect();
    MatOfDouble weights = new MatOfDouble();

    hog.detectMultiScale(grayMat, faces, weights);
    originalMat.copyTo(people);
    //Draw faces on the image
    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
        Imgproc.rectangle(people, facesArray[i].tl(), facesArray[i].br(), new Scalar(100), 3);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(people, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 2
Project: MOAAP   File: MainActivity.java   Source Code and License 8 votes vote down vote up
void HoughLines() {

        Mat grayMat = new Mat();
        Mat cannyEdges = new Mat();
        Mat lines = new Mat();

        //Converting the image to grayscale
        Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

        Imgproc.Canny(grayMat, cannyEdges, 10, 100);

        Imgproc.HoughLinesP(cannyEdges, lines, 1, Math.PI / 180, 50, 20, 20);

        Mat houghLines = new Mat();
        houghLines.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC1);

        //Drawing lines on the image
        for (int i = 0; i < lines.cols(); i++) {
            double[] points = lines.get(0, i);
            double x1, y1, x2, y2;

            x1 = points[0];
            y1 = points[1];
            x2 = points[2];
            y2 = points[3];

            Point pt1 = new Point(x1, y1);
            Point pt2 = new Point(x2, y2);

            //Drawing lines on an image
            Imgproc.line(houghLines, pt1, pt2, new Scalar(255, 0, 0), 1);
        }

        //Converting Mat back to Bitmap
        Utils.matToBitmap(houghLines, currentBitmap);
        imageView.setImageBitmap(currentBitmap);

    }
 
Example 3
Project: fingerblox   File: ImageProcessing.java   Source Code and License 7 votes vote down vote up
private Mat bytesToMat(byte[] data) {
    // Scale down the image for performance
    Bitmap bmp = BitmapFactory.decodeByteArray(data, 0, data.length);
    int targetWidth = 1200;
    if (bmp.getWidth() > targetWidth) {
        float scaleDownFactor = (float)targetWidth / bmp.getWidth();
        bmp = Bitmap.createScaledBitmap(bmp,
                (int)(bmp.getWidth()*scaleDownFactor),
                (int)(bmp.getHeight()*scaleDownFactor),
                true);
    }
    Mat BGRImage = new Mat (bmp.getWidth(), bmp.getHeight(), CvType.CV_8UC3);
    Utils.bitmapToMat(bmp, BGRImage);

    return BGRImage;
}
 
Example 4
Project: MOAAP   File: MainActivity.java   Source Code and License 7 votes vote down vote up
void Sobel() {
    Mat grayMat = new Mat();
    Mat sobel = new Mat(); //Mat to store the final result

    //Matrices to store gradient and absolute gradient respectively
    Mat grad_x = new Mat();
    Mat abs_grad_x = new Mat();

    Mat grad_y = new Mat();
    Mat abs_grad_y = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    //Calculating gradient in horizontal direction
    Imgproc.Sobel(grayMat, grad_x, CvType.CV_16S, 1, 0, 3, 1, 0);

    //Calculating gradient in vertical direction
    Imgproc.Sobel(grayMat, grad_y, CvType.CV_16S, 0, 1, 3, 1, 0);

    //Calculating absolute value of gradients in both the direction
    Core.convertScaleAbs(grad_x, abs_grad_x);
    Core.convertScaleAbs(grad_y, abs_grad_y);

    //Calculating the resultant gradient
    Core.addWeighted(abs_grad_x, 0.5, abs_grad_y, 0.5, 1, sobel);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(sobel, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 5
Project: MOAAP   File: MainActivity.java   Source Code and License 7 votes vote down vote up
void Contours() {
    Mat grayMat = new Mat();
    Mat cannyEdges = new Mat();
    Mat hierarchy = new Mat();

    List<MatOfPoint> contourList = new ArrayList<MatOfPoint>(); //A list to store all the contours

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.Canny(originalMat, cannyEdges, 10, 100);

    //finding contours
    Imgproc.findContours(cannyEdges, contourList, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    //Drawing contours on a new image
    Mat contours = new Mat();
    contours.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC3);
    Random r = new Random();
    for (int i = 0; i < contourList.size(); i++) {
        Imgproc.drawContours(contours, contourList, i, new Scalar(r.nextInt(255), r.nextInt(255), r.nextInt(255)), -1);
    }
    //Converting Mat back to Bitmap
    Utils.matToBitmap(contours, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 6
Project: fingerblox   File: ImageProcessing.java   Source Code and License 6 votes vote down vote up
private Bitmap mat2Bitmap(Mat src, int code) {
    Mat rgbaMat = new Mat(src.width(), src.height(), CvType.CV_8UC4);
    Imgproc.cvtColor(src, rgbaMat, code, 4);
    Bitmap bmp = Bitmap.createBitmap(rgbaMat.cols(), rgbaMat.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(rgbaMat, bmp);
    return bmp;
}
 
Example 7
Project: SudoCAM-Ku   File: CameraView.java   Source Code and License 6 votes vote down vote up
@Override
public void onPictureTaken(byte[] data, Camera camera) {
    Log.i(TAG, "Saving a bitmap to file");
    // The camera preview was automatically stopped. Start it again.
    mCamera.startPreview();
    mCamera.setPreviewCallback(this);

    // Write the image in a file (in jpeg format)
    try {
        /*FileOutputStream fos = new FileOutputStream(mPictureFileName);

        fos.write(data);
        fos.close();*/

        Bitmap bmp = BitmapFactory.decodeByteArray(data , 0, data.length);
        Mat orig = new Mat(bmp.getHeight(),bmp.getWidth(),CvType.CV_8UC3);
        Bitmap myBitmap32 = bmp.copy(Bitmap.Config.ARGB_8888, true);
        Utils.bitmapToMat(myBitmap32, orig);
        mImage = new Mat();
        Imgproc.cvtColor(orig,mImage,Imgproc.COLOR_RGB2GRAY);
        /*Imgproc.cvtColor(orig, orig, Imgproc.COLOR_BGR2RGB,4);
        Mat frame = new Mat(mFrameHeight+mFrameHeight/2,mFrameWidth, CvType.CV_8UC1);
        frame.put(0,0,data);
        //Imgcodecs.imdecode(frame,0);
        Imgproc.cvtColor(frame,mImage,Imgproc.COLOR_YUV2RGBA_NV21);//frame.submat(0, mFrameHeight, 0, mFrameWidth);*/

    } catch (Exception e) {
        Log.e("PictureDemo", "Exception in photoCallback", e);
    }

}
 
Example 8
Project: DNNLibrary   File: MainActivity.java   Source Code and License 6 votes vote down vote up
private float[] getInputDataLeNet(Bitmap bitmap) {
    final int INPUT_LENGTH = 28;

    Mat imageMat = new Mat();
    Mat inputMat = new Mat();

    Utils.bitmapToMat(bitmap, imageMat);

    // convert the image to 28 * 28, grayscale, 0~1, and smaller means whiter
    Imgproc.cvtColor(imageMat, imageMat, Imgproc.COLOR_RGBA2GRAY);
    imageMat = centerCropAndScale(imageMat, INPUT_LENGTH);
    imageMat.convertTo(imageMat, CvType.CV_32F, 1. / 255);
    Core.subtract(Mat.ones(imageMat.size(), CvType.CV_32F), imageMat, inputMat);

    float[] inputData = new float[inputMat.width() * inputMat.height()];

    inputMat.get(0, 0, inputData);

    return inputData;
}
 
Example 9
Project: MOAAP   File: PyramidActivity.java   Source Code and License 6 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
    super.onActivityResult(requestCode, resultCode, imageReturnedIntent);

    switch(requestCode) {
        case SELECT_PHOTO:
            if(resultCode == RESULT_OK){
                try {
                    final Uri imageUri = imageReturnedIntent.getData();
                    final InputStream imageStream = getContentResolver().openInputStream(imageUri);
                    final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
                    src = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC4);
                    Utils.bitmapToMat(selectedImage, src);
                    srcSelected = true;
                    bGaussianPyrUp.setEnabled(true);
                    bGaussianPyrDown.setEnabled(true);
                    bLaplacianPyr.setEnabled(true);
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                }
            }
            break;
    }
}
 
Example 10
Project: MOAAP   File: MainActivity.java   Source Code and License 6 votes vote down vote up
public void DifferenceOfGaussian() {
    Mat grayMat = new Mat();
    Mat blur1 = new Mat();
    Mat blur2 = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.GaussianBlur(grayMat, blur1, new Size(15, 15), 5);
    Imgproc.GaussianBlur(grayMat, blur2, new Size(21, 21), 5);

    //Subtracting the two blurred images
    Mat DoG = new Mat();
    Core.absdiff(blur1, blur2, DoG);

    //Inverse Binary Thresholding
    Core.multiply(DoG, new Scalar(100), DoG);
    Imgproc.threshold(DoG, DoG, 50, 255, Imgproc.THRESH_BINARY_INV);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(DoG, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 11
Project: FtcSamples   File: FtcTestOpenCv.java   Source Code and License 6 votes vote down vote up
/**
 * This method is called when the camera view is started. It will allocate and initialize
 * some global resources.
 *
 * @param width specifies the width of the camera view.
 * @param height specifies the height of the camera view.
 */
@Override
public void onCameraViewStarted(int width, int height)
{
    faceRects = new MatOfRect();
    totalProcessingTime = 0;
    framesProcessed = 0;

    overlayImage = new Mat();
    Bitmap overlayBitmap =
            BitmapFactory.decodeResource(activity.getResources(), R.drawable.mustache);
    Utils.bitmapToMat(overlayBitmap, overlayImage);
    //
    // Don't allow overlay unless overlay image has the rgba channels.
    //
    if (overlayImage.channels() < 4) doOverlayImage = false;
}
 
Example 12
Project: PerfectShow   File: Feature.java   Source Code and License 6 votes vote down vote up
private static String loadClassifier(Context context)
{
	if(BuildConfig.DEBUG)
	{
		String fullname = context.getResources().getResourceName(R.raw.haarcascade_frontalface_alt2);
		Log.i(TAG, "fullname: " + fullname);
		String resName = fullname.substring(fullname.lastIndexOf("/") + 1);
		Log.i(TAG, "resName: " + resName);
		
		// Enter "OpenCV_data", you will get "/data/data/<PACKAGE_NAME>/app_OpenCV_data", why a "app_" prefix?
		File resDir = context.getDir("OpenCV_data", Context.MODE_PRIVATE);
		Log.i(TAG, "resDir: " + resDir.getAbsolutePath());
	}
	
	String path = Utils.exportResource(context, R.raw.haarcascade_frontalface_alt2);
	Utils.exportResource(context, R.raw.haarcascade_mcs_lefteye);
	Utils.exportResource(context, R.raw.haarcascade_mcs_mouth);
	Utils.exportResource(context, R.raw.haarcascade_mcs_righteye);
	String classifier_dir = path.substring(0, path.lastIndexOf('/'));
	Log.d(TAG, "cascade data directory: " + classifier_dir);
	
	return classifier_dir;
}
 
Example 13
Project: DigitalImageProcessing   File: DetectLightActivity.java   Source Code and License 6 votes vote down vote up
private void detectLight(Bitmap bitmap, double gaussianBlurValue) {
    Mat rgba = new Mat();
    Utils.bitmapToMat(bitmap, rgba);

    Mat grayScaleGaussianBlur = new Mat();
    Imgproc.cvtColor(rgba, grayScaleGaussianBlur, Imgproc.COLOR_BGR2GRAY);
    Imgproc.GaussianBlur(grayScaleGaussianBlur, grayScaleGaussianBlur, new Size(gaussianBlurValue, gaussianBlurValue), 0);

    Core.MinMaxLocResult minMaxLocResultBlur = Core.minMaxLoc(grayScaleGaussianBlur);
    Imgproc.circle(rgba, minMaxLocResultBlur.maxLoc, 30, new Scalar(255), 3);

    // Don't do that at home or work it's for visualization purpose.
    Bitmap resultBitmap = Bitmap.createBitmap(rgba.cols(), rgba.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(rgba, resultBitmap);
    BitmapHelper.showBitmap(this, resultBitmap, detectLightImageView);

    Bitmap blurryBitmap = Bitmap.createBitmap(grayScaleGaussianBlur.cols(), grayScaleGaussianBlur.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(grayScaleGaussianBlur, blurryBitmap);
    BitmapHelper.showBitmap(this, blurryBitmap, gaussianBlurImageView);

}
 
Example 14
Project: Android-Face-Recognition-with-Deep-Learning-Library   File: TensorFlow.java   Source Code and License 6 votes vote down vote up
private float[] getPixels(Mat img){
    Bitmap bmp = Bitmap.createBitmap(inputSize, inputSize, Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(img, bmp);
    int[] intValues = new int[inputSize * inputSize];
    bmp.getPixels(intValues, 0, inputSize, 0, 0, inputSize, inputSize);

    float[] floatValues = new float[inputSize * inputSize * channels];
    for (int i = 0; i < intValues.length; ++i) {
        final int val = intValues[i];
        floatValues[i * 3 + 0] = (((float)((val >> 16) & 0xFF)) - imageMean) / imageStd;
        floatValues[i * 3 + 1] = (((float)((val >> 8) & 0xFF)) - imageMean) / imageStd;
        floatValues[i * 3 + 2] = (((float)(val & 0xFF)) - imageMean) / imageStd;
    }

    return floatValues;
}
 
Example 15
Project: CVScanner   File: DocumentDetector.java   Source Code and License 6 votes vote down vote up
Document detectDocument(Frame frame){
    Size imageSize = new Size(frame.getMetadata().getWidth(), frame.getMetadata().getHeight());
    Mat src = new Mat();
    Utils.bitmapToMat(frame.getBitmap(), src);
    List<MatOfPoint> contours = CVProcessor.findContours(src);
    src.release();

    if(!contours.isEmpty()){
        CVProcessor.Quadrilateral quad = CVProcessor.getQuadrilateral(contours, imageSize);

        if(quad != null){
            quad.points = CVProcessor.getUpscaledPoints(quad.points, CVProcessor.getScaleRatio(imageSize));
            return new Document(frame, quad);
        }
    }

    return null;
}
 
Example 16
Project: CVScanner   File: StepByStepTestActivity.java   Source Code and License 6 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    super.onActivityResult(requestCode, resultCode, data);

    if(requestCode == REQ_PICK_IMAGE && resultCode == RESULT_OK && data != null){
        try {
            Bitmap image = BitmapFactory.decodeStream(getContentResolver().openInputStream(data.getData()));
            if(mData != null){
                mData.release();
                mData = null;
            }
            mData = new Mat();
            Utils.bitmapToMat(image, mData);
            image.recycle();

            startTests();
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    }
}
 
Example 17
Project: Handy-Meter   File: MeasurementsActivity.java   Source Code and License 6 votes vote down vote up
@Override
public void onCreate(Bundle savedInstanceState) {
	super.onCreate(savedInstanceState);
	setContentView(R.layout.measurements_layout);
	
	Mat firstImage = MainActivity.getFirstImage();
	currentScene = Bitmap.createBitmap(firstImage.cols(),
			firstImage.rows(), Bitmap.Config.ARGB_8888);
	Utils.matToBitmap(MainActivity.getFirstImage(), currentScene);
	
	Mat secondImage = MainActivity.getSecondImage();
	currentScene2 = Bitmap.createBitmap(secondImage.cols(),
			secondImage.rows(), Bitmap.Config.ARGB_8888);
	Utils.matToBitmap(MainActivity.getSecondImage(), currentScene2);
	
	ImageView view = (ImageView) findViewById(R.id.current_scene);
	view.setImageBitmap(currentScene);
	view.setOnTouchListener(this);
}
 
Example 18
Project: second_eyes   File: toPathRigidTransformationTest.java   Source Code and License 6 votes vote down vote up
@Test
public void object_to_left() {
    Assert.assertTrue(true);
    ExtractPath mExtractPath = new ExtractPath();

    Path myPath = new Path();
    Bitmap bmp = null;

    Mat currFrame = new Mat();
    Utils.bitmapToMat(bmp,currFrame);

    Mat prevFrame = new Mat();
    Utils.bitmapToMat(bmp,currFrame);
    Assert.assertEquals(currFrame, prevFrame);
    Assert.assertSame(currFrame, mExtractPath.withRigidTransformation(currFrame, prevFrame, myPath));

}
 
Example 19
Project: Pixtern-Library   File: CardRealtimeActivity.java   Source Code and License 6 votes vote down vote up
/**
 * Draws the outline of the Pattern to the preview screen.
 */
public void drawTemplateOutline() {
	//Load the template outline 
	cardType = DataHolder.getInstance().getData();
	if(!cardType.equals("-DETECT-")) getTemplate();
	Bitmap icon = BitmapFactory.decodeResource(this.getResources(),
			R.drawable.card_frame);
	Mat outline = new Mat ( icon.getHeight(), icon.getWidth(), CvType.CV_8U, new Scalar(0, 0, 0));
	Utils.bitmapToMat(icon, outline);
	Imgproc.cvtColor(outline, outline, Imgproc.COLOR_BGRA2RGBA);
	if(showOutline) {
		for (String key : card.getPatternMap().keySet()) {
			if(card.getPattern(key).getResource().matches("[t][e][x][t].*")) Core.rectangle(outline, new Point(Math.abs(outline.cols() - (card.getPattern(key).getTl().y * outline.cols())), card.getPattern(key).getTl().x * outline.rows()),new Point(Math.abs(outline.cols() - (card.getPattern(key).getBr().y * outline.cols())), card.getPattern(key).getBr().x * outline.rows()), new Scalar(0, 255, 0, 255), 1);
			//Core.rectangle(outline, new Point(Math.abs(outline.cols() - (card.getPattern(key).getTl().y * outline.cols())), card.getPattern(key).getTl().x * outline.rows()),new Point(Math.abs(outline.cols() - (card.getPattern(key).getBr().y * outline.cols())), card.getPattern(key).getBr().x * outline.rows()), new Scalar(255, 0, 0, 0), 1);
		}
		Core.rectangle(outline, new Point(Math.abs(outline.cols() - (facetl.y * outline.cols())), facetl.x * outline.rows()),new Point(Math.abs(outline.cols() - (facebr.y * outline.cols())),facebr.x * outline.rows()), new Scalar(0, 255, 0, 255), 1);
	}
	Bitmap bimage = Bitmap.createBitmap(outline.cols(),  outline.rows(),Bitmap.Config.ARGB_8888);
	Imgproc.cvtColor(outline, outline, Imgproc.COLOR_RGBA2BGRA);
	Utils.matToBitmap(outline, bimage);
	ImageView imgV = (ImageView )findViewById(R.id.frame_det);
	imgV.setImageBitmap(bimage);
}
 
Example 20
Project: Pixtern-Library   File: CardValidationActivity.java   Source Code and License 6 votes vote down vote up
@Override
protected void onPostExecute(String result) {
	if(result.equals("success")) {
		ProgressWheel pBar = (ProgressWheel) findViewById(R.id.progressBar);
		pBar.setProgress(0);
		pBar.setVisibility(View.GONE);
		ImageView imageView = (ImageView) findViewById(R.id.imgView);

		imageView.setVisibility(View.VISIBLE);
		Utils.matToBitmap(showBit, bimage); 
		imageView.setImageBitmap(bimage);
		incremented = 0;
		new TextOperation().execute("");
	} else {
		Log.w("Error", "Loading image error");
		theText.put("Error", "Image Loading");
		Intent theResult = new Intent();
		theResult.putExtra("theValidation", theText);
		setResult(Activity.RESULT_OK, theResult);
		finish();
	}
}
 
Example 21
Project: AndroidFaceRecognizer   File: FaceRecognitionActivity.java   Source Code and License 6 votes vote down vote up
private void onFaceCaptured(Mat faceMat){
	capturingImage = false;
	final boolean willRecognizeButtonAppear = capturedMat == null;
	capturedMat = faceMat;
	final Bitmap bmp = Bitmap.createBitmap(faceMat.cols(), faceMat.rows(), Bitmap.Config.RGB_565);
	Utils.matToBitmap(faceMat, bmp);
	FaceRecognitionActivity.this.runOnUiThread(new Runnable() {
		
		@Override
		public void run() {
			capturedImage.setImageBitmap(bmp);
			captureButton.setBackgroundResource(R.drawable.capturestart);
			captureButton.setText("Start Capturing");
			if(willRecognizeButtonAppear) {
				bringRecognizeButtonAnimatedly();
			}
		}
	});
}
 
Example 22
Project: cvRecognition   File: PersonRecognizer.java   Source Code and License 6 votes vote down vote up
void add(Mat m, String description) {
	Bitmap bmp= Bitmap.createBitmap(m.width(), m.height(), Bitmap.Config.ARGB_8888);
	 
	Utils.matToBitmap(m,bmp);
	bmp= Bitmap.createScaledBitmap(bmp, WIDTH, HEIGHT, false);
	
	FileOutputStream f;
	try {
		f = new FileOutputStream(mPath+description+"-"+count+".jpg",true);
		count++;
		bmp.compress(Bitmap.CompressFormat.JPEG, 100, f);
		f.close();

	} catch (Exception e) {
		Log.e("error",e.getCause()+" "+e.getMessage());
		e.printStackTrace();
		
	}
}
 
Example 23
Project: Camdroid   File: OCRProcessor.java   Source Code and License 6 votes vote down vote up
@Override
protected void draw() {
    Utils.matToBitmap(out, this.bmp);
    Canvas canvas = new Canvas(this.bmp);

    int y = bounds.height();
    int c = 1;
    for (String line : simpleText.split("\n")) {
        canvas.drawText(line, bounds.width(), y, paint);
        y = y + bounds.height();
        if (c >= lines)
            break;;
    }

    this.drawer.drawBitmap(this.bmp);

}
 
Example 24
Project: EyeDroid   File: DetectAndDrawPupilFilter.java   Source Code and License 6 votes vote down vote up
@Override
protected Bundle execute(Bundle arg0) {

	Mat rgba = (Mat) arg0.get(Constants.SOURCE_MAT_RGB);
	Mat gray = (Mat) arg0.get(Constants.SOURCE_MAT_GRAY);
	long pupilRoiRect = (Long) arg0.get(Constants.PUPIL_ROI_RECT);
	long detectedCircles = (Long) arg0.get(Constants.DETECTED_CIRCLES);
	arg0 = null;

	detectPupilAndDraw(rgba.getNativeObjAddr(), gray.getNativeObjAddr(),
			pupilRoiRect, detectedCircles);

	Log.i(RGB2GRAYFilter.TAG, this.getFilterName() + "start");
	Bitmap bitmap = Bitmap.createBitmap(rgba.cols(), rgba.rows(),
			Config.ARGB_8888);
	Utils.matToBitmap(rgba, bitmap);

	Bundle newBundle = new Bundle();
	newBundle.put(Constants.SINK_BITMAP, bitmap);
	Log.i(RGB2GRAYFilter.TAG, this.getFilterName());

	return newBundle;
}
 
Example 25
Project: EyeDroid   File: InputStreamUSBCamera.java   Source Code and License 6 votes vote down vote up
/**
 * Read frame and create a bundle
 * 
 * @return Frame Bundle
 */
@Override
public Bundle read() throws IOException {
	// obtaining a camera image (pixel data are stored in an array in JNI).
	if (cameraExists) {
		processCamera();
		// camera image to bmp
		pixeltobmp(bmp);

		Mat mat = new Mat();
		Utils.bitmapToMat(bmp, mat);

		Bundle bundle = new Bundle();
		Log.i(TAG, "Usb camera got new frame " + cameraId
				+ "Could not open");
		bundle.put(Constants.SOURCE_MAT_RGB, mat);
		bundle.put(Constants.SOURCE_BITMAP, bmp);
		return bundle;
	}
	throw new IOException("Usb camera is not open. Could not read frame");
}
 
Example 26
Project: opencv-documentscanner-android   File: ScanActivity.java   Source Code and License 5 votes vote down vote up
@Override
public void onBitmapSelect(final Uri uri) {
    runOnUiThread(new Runnable() {
        @Override
        public void run() {
            mProgressBar.setVisibility(View.VISIBLE);
            mContainer.setVisibility(View.INVISIBLE);
        }
    });

    new Thread(new Runnable() {
        @Override
        public void run() {
            try {
                Bitmap bitmap = MediaStore.Images.Media.getBitmap(getContentResolver(), uri);
                //First convert Bitmap to Mat
                Mat ImageMat = new Mat(bitmap.getHeight(), bitmap.getWidth(), CvType.CV_8U, new Scalar(4));
                Bitmap myBitmap32 = bitmap.copy(Bitmap.Config.ARGB_8888, true);
                Utils.bitmapToMat(myBitmap32, ImageMat);

                doWithMat(ImageMat.getNativeObjAddr());

                //Then convert the processed Mat to Bitmap
                mBitmap = Bitmap.createBitmap(ImageMat.cols(), ImageMat.rows(), Bitmap.Config.ARGB_8888);
                Utils.matToBitmap(ImageMat, mBitmap);
                Intent data = new Intent();
                setResult(Activity.RESULT_OK, data);
                finish();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }).start();
}
 
Example 27
Project: fingerblox   File: ImageProcessing.java   Source Code and License 5 votes vote down vote up
static Bitmap preprocess(Mat frame, int width, int height) {
    // convert to grayscale
    Mat frameGrey = new Mat(height, width, CvType.CV_8UC1);
    Imgproc.cvtColor(frame, frameGrey, Imgproc.COLOR_BGR2GRAY, 1);

    // rotate
    Mat rotatedFrame = new Mat(width, height, frameGrey.type());
    Core.transpose(frameGrey, rotatedFrame);
    Core.flip(rotatedFrame, rotatedFrame, Core.ROTATE_180);

    // resize to match the surface view
    Mat resizedFrame = new Mat(width, height, rotatedFrame.type());
    Imgproc.resize(rotatedFrame, resizedFrame, new Size(width, height));

    // crop
    Mat ellipseMask = getEllipseMask(width, height);
    Mat frameCropped = new Mat(resizedFrame.rows(), resizedFrame.cols(), resizedFrame.type(), new Scalar(0));
    resizedFrame.copyTo(frameCropped, ellipseMask);

    // histogram equalisation
    Mat frameHistEq = new Mat(frame.rows(), frameCropped.cols(), frameCropped.type());
    Imgproc.equalizeHist(frameCropped, frameHistEq);

    // convert back to rgba
    Mat frameRgba = new Mat(frameHistEq.rows(), frameHistEq.cols(), CvType.CV_8UC4);
    Imgproc.cvtColor(frameHistEq, frameRgba, Imgproc.COLOR_GRAY2RGBA);

    // crop again to correct alpha
    Mat frameAlpha = new Mat(frameRgba.rows(), frameRgba.cols(), CvType.CV_8UC4, new Scalar(0, 0, 0, 0));
    frameRgba.copyTo(frameAlpha, ellipseMask);

    // convert to bitmap
    Bitmap bmp = Bitmap.createBitmap(frameAlpha.cols(), frameAlpha.rows(), Bitmap.Config.ARGB_4444);
    Utils.matToBitmap(frameAlpha, bmp);

    return bmp;
}
 
Example 28
Project: Android-Code-Demos   File: MainActivity.java   Source Code and License 5 votes vote down vote up
private void convertGray() {
    Mat src = new Mat();
    Mat temp = new Mat();
    Mat dst = new Mat();
    Utils.bitmapToMat(selectbp, src);
    Imgproc.cvtColor(src, temp, Imgproc.COLOR_BGRA2BGR);
    Log.i("CV", "image type:" + (temp.type() == CvType.CV_8UC3));
    Imgproc.cvtColor(temp, dst, Imgproc.COLOR_BGR2GRAY);
    Utils.matToBitmap(dst, selectbp);
    myImageView.setImageBitmap(selectbp);
}
 
Example 29
Project: android-imaging-utils   File: ImagingUtils.java   Source Code and License 5 votes vote down vote up
/**
 * Resize, crop and rotate the camera preview frame
 *
 * @param bytes  preview data
 * @param width  original width
 * @param height original height
 * @param params image processing parameters
 * @return
 */
public static Bitmap rotateCropAndResizePreview(byte[] bytes, int width, int height, PreviewResizeParams params)
{
    Size finalSize = new Size(params.newWidth, params.newHeight);
    Rect cropRect = new Rect(params.cropX, params.cropY, params.cropWidth, params.cropHeight);

    Mat rawMat = new Mat(height * 3 / 2, width, CvType.CV_8UC1); // YUV data
    rawMat.put(0, 0, bytes);
    Mat rgbMat = new Mat(height, width, CvType.CV_8UC4); // RGBA image
    Imgproc.cvtColor(rawMat, rgbMat, Imgproc.COLOR_YUV2RGBA_NV21);

    //rotate clockwise
    Mat rotatedMat = rotateFrame(rgbMat, params.rotation);

    //crop rect from image
    Mat croppedMat = new Mat(rotatedMat, cropRect);

    //resize
    if (finalSize.area() > 0)
        Imgproc.resize(croppedMat, croppedMat, finalSize);


    Bitmap bmp = Bitmap.createBitmap(croppedMat.cols(), croppedMat.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(croppedMat, bmp);

    return bmp;
}
 
Example 30
Project: TinyPlanetMaker   File: PlanetMaker.java   Source Code and License 5 votes vote down vote up
public void setInputImage(Bitmap bitmap, boolean isPano) {

        if (bitmap == null)
            return;

        mIsFaded = !isPano;
        mIsImageLoaded = true;
        mInputImage = new Mat();

//        Check if the bitmap has the correct type for the OpenCV bitmapToMat function:
        if (bitmap.getConfig() != null) { // bitmap.getConfig() just returns a valid value if the format is in one of the public formats.
            if (bitmap.getConfig() != Bitmap.Config.ARGB_8888 && bitmap.getConfig() != Bitmap.Config.RGB_565)
                bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, false);
        }
        else
            bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, false);

        Utils.bitmapToMat(bitmap, mInputImage);

        mOriginalImage = mInputImage.clone();

        mFullOutputSize = Math.max(mInputImage.width(), mInputImage.height());

        if (mFullOutputSize > MainActivity.MAX_IMG_SIZE)
            mFullOutputSize = MainActivity.MAX_IMG_SIZE;

        initImages();

    }
 
Example 31
Project: Ftc2018RelicRecovery   File: FtcVuforia.java   Source Code and License 5 votes vote down vote up
/**
 * This method gets a frame from the frame queue and returns the image that matches the format specified by the
 * configVideoSource method.
 *
 * @param frame specifies the frame object to hold image.
 * @return true if success, false otherwise.
 */
@Override
public boolean getFrame(Mat frame)
{
    boolean success = false;

    try
    {
        VuforiaLocalizer.CloseableFrame closeableFrame = localizer.getFrameQueue().take();

        for (int i = 0; i < closeableFrame.getNumImages(); i++)
        {
            Image image = closeableFrame.getImage(i);
            if (image.getWidth() == imageWidth && image.getHeight() == imageHeight &&
                    image.getFormat() == PIXEL_FORMAT.RGB565)
            {
                Bitmap bm = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.RGB_565);
                bm.copyPixelsFromBuffer(image.getPixels());
                Utils.bitmapToMat(bm, frame);
                break;
            }
        }

        closeableFrame.close();
        success = true;
    }
    catch (InterruptedException e)
    {
        e.printStackTrace();
    }

    return success;
}
 
Example 32
Project: Paper-Melody   File: ImageUtil.java   Source Code and License 5 votes vote down vote up
public static Bitmap imageToBitmap(Mat bgr) {
    Mat rgbaMat = new Mat(bgr.cols(), bgr.rows(), CvType.CV_8U, new Scalar(4));
    Imgproc.cvtColor(bgr, rgbaMat, Imgproc.COLOR_BGR2RGBA, 0);
    Bitmap bmp = Bitmap.createBitmap(rgbaMat.cols(), rgbaMat.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(bgr, bmp);
    return bmp;
}
 
Example 33
Project: MOAAP   File: MainActivity.java   Source Code and License 5 votes vote down vote up
void Canny() {
    Mat grayMat = new Mat();
    Mat cannyEdges = new Mat();
    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.Canny(grayMat, cannyEdges, 10, 100);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(cannyEdges, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 34
Project: MOAAP   File: MainActivity.java   Source Code and License 5 votes vote down vote up
void HoughCircles() {
    Mat grayMat = new Mat();
    Mat cannyEdges = new Mat();
    Mat circles = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Imgproc.Canny(grayMat, cannyEdges, 10, 100);

    Imgproc.HoughCircles(cannyEdges, circles, Imgproc.CV_HOUGH_GRADIENT, 1, cannyEdges.rows() / 15);//, grayMat.rows() / 8);

    Mat houghCircles = new Mat();
    houghCircles.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC1);

    //Drawing lines on the image
    for (int i = 0; i < circles.cols(); i++) {
        double[] parameters = circles.get(0, i);
        double x, y;
        int r;

        x = parameters[0];
        y = parameters[1];
        r = (int) parameters[2];

        Point center = new Point(x, y);

        //Drawing circles on an image
        Imgproc.circle(houghCircles, center, r, new Scalar(255, 0, 0), 1);
    }

    //Converting Mat back to Bitmap
    Utils.matToBitmap(houghCircles, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 35
Project: MOAAP   File: MainActivity.java   Source Code and License 5 votes vote down vote up
void HarrisCorner() {
    Mat grayMat = new Mat();
    Mat corners = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    Mat tempDst = new Mat();
    //finding contours
    Imgproc.cornerHarris(grayMat, tempDst, 2, 3, 0.04);

    //Normalizing harris corner's output
    Mat tempDstNorm = new Mat();
    Core.normalize(tempDst, tempDstNorm, 0, 255, Core.NORM_MINMAX);
    Core.convertScaleAbs(tempDstNorm, corners);

    //Drawing corners on a new image
    Random r = new Random();
    for (int i = 0; i < tempDstNorm.cols(); i++) {
        for (int j = 0; j < tempDstNorm.rows(); j++) {
            double[] value = tempDstNorm.get(j, i);
            if (value[0] > 150)
                Imgproc.circle(corners, new Point(i, j), 5, new Scalar(r.nextInt(255)), 2);
        }
    }

    //Converting Mat back to Bitmap
    Utils.matToBitmap(corners, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example 36
Project: FtcSamples   File: FtcVuforia.java   Source Code and License 5 votes vote down vote up
/**
 * This method gets a frame from the frame queue and returns the image that matches the format specified by the
 * configVideoSource method.
 *
 * @param frame specifies the frame object to hold image.
 * @return true if success, false otherwise.
 */
@Override
public boolean getFrame(Mat frame)
{
    boolean success = false;

    try
    {
        VuforiaLocalizer.CloseableFrame closeableFrame = localizer.getFrameQueue().take();

        for (int i = 0; i < closeableFrame.getNumImages(); i++)
        {
            Image image = closeableFrame.getImage(i);
            if (image.getWidth() == imageWidth && image.getHeight() == imageHeight &&
                    image.getFormat() == PIXEL_FORMAT.RGB565)
            {
                Bitmap bm = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.RGB_565);
                bm.copyPixelsFromBuffer(image.getPixels());
                Utils.bitmapToMat(bm, frame);
                break;
            }
        }

        closeableFrame.close();
        success = true;
    }
    catch (InterruptedException e)
    {
        e.printStackTrace();
    }

    return success;
}
 
Example 37
Project: snobot-2017   File: JavaVisionAlgorithm.java   Source Code and License 5 votes vote down vote up
public Mat processImage(Mat aMat, long aSystemTimeNs) {

        Pair<Integer, Integer> hue = mPreferences.getHueThreshold();
        Pair<Integer, Integer> sat = mPreferences.getSatThreshold();
        Pair<Integer, Integer> lum = mPreferences.getLumThreshold();

        Pair<Integer, Integer> filterWidth = mPreferences.getFilterWidthThreshold();
        Pair<Integer, Integer> filterHeight = mPreferences.getFilterHeightThreshold();
        Pair<Integer, Integer> filterVertices = mPreferences.getFilterVerticesThreshold();
        Pair<Float, Float> filterRatio = mPreferences.getFilterRatioRange();

        mFilterParams.minWidth = filterWidth.first;
        mFilterParams.maxWidth = filterWidth.second;
        mFilterParams.minHeight = filterHeight.first;
        mFilterParams.maxHeight = filterHeight.second;
        mFilterParams.minVertices = filterVertices.first;
        mFilterParams.maxVertices = filterVertices.second;
        mFilterParams.minRatio = filterRatio.first;
        mFilterParams.maxRatio = filterRatio.second;

        mPegGripAlgorithm.setHslThreshold(hue.first, hue.second, sat.first, sat.second, lum.first, lum.second);

        Bitmap bitmap = Bitmap.createBitmap(aMat.cols(), aMat.rows(), Bitmap.Config.ARGB_8888);
        Utils.matToBitmap(aMat, bitmap);

        if(mRecordingImages && mRobotConnected)
        {
            writeImage(bitmap);
        }

        if(cameraDirection == CameraBridgeViewBase.CAMERA_ID_FRONT)
        {
            return processPegImage(aMat, aSystemTimeNs);
        }
        else
        {
            return processRopeImage(aMat, aSystemTimeNs);
        }
    }
 
Example 38
Project: snobot-2017   File: SnobotVisionGLSurfaceView.java   Source Code and License 5 votes vote down vote up
@Override
protected void processTexture(int texIn, int texOut, int width, int height, long aSystemTimeNs) {

    int size = 1228800; // TODO magic number
    byte[] byteBuff = new byte[size];
    ByteBuffer screenData = ByteBuffer.wrap(byteBuff);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, screenData);
    Mat originalMat = new Mat(height, width, CvType.CV_8UC4);
    originalMat.put(0, 0, screenData.array());

    ByteBuffer outputTextureData;
    Mat imageToDisplay;

    if(mVisionAlgorithm != null) {
        imageToDisplay = mVisionAlgorithm.processImage(originalMat, aSystemTimeNs);
        byte[] byteBuff2 = new byte[size];
        imageToDisplay.get(0, 0, byteBuff2);
        outputTextureData = ByteBuffer.wrap(byteBuff2);
    }
    else
    {
        imageToDisplay = originalMat;
        outputTextureData = screenData;
    }

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texOut);
    GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, outputTextureData);


    // Convert to JPEG and sent to server
    Bitmap bitmap = Bitmap.createBitmap(imageToDisplay.cols(), imageToDisplay.rows(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(imageToDisplay, bitmap);

    ByteArrayOutputStream os = new ByteArrayOutputStream();
    bitmap.compress(Bitmap.CompressFormat.JPEG, 90, os);

    MjpgServer.getInstance().update(os.toByteArray());
}
 
Example 39
Project: Face-detect-framework   File: DetectFaceFromVideo.java   Source Code and License 5 votes vote down vote up
private void initAnalysisVideo(String mChosenFile) {
    try {
        if (!carregandoVideo) {
            carregandoVideo = true;

            File videoFile = new File(mChosenFile);
            Uri videoFileUri = Uri.parse(videoFile.toString());

            MediaMetadataRetriever retriever = new MediaMetadataRetriever();
            FileInputStream inputStream = new FileInputStream(videoFile.getAbsolutePath());
            retriever.setDataSource(inputStream.getFD());
            //Create a new Media Player
            ProgressDialog pd = new ProgressDialog(activity);
            pd.setMessage("aguarde!");
            pd.show();
            MediaPlayer mp = MediaPlayer.create(activity, videoFileUri);
            int millis = mp.getDuration();

            for (int i = 0; i < millis; i += 100) {
                Bitmap bitmap = retriever.getFrameAtTime(i * 1000, MediaMetadataRetriever.OPTION_CLOSEST);
                Mat mat = new Mat();
                Utils.bitmapToMat(bitmap, mat);
                Utils.matToBitmap(faceDetectController.detectface(mat), bitmap);

                framePackage.addImage(Bitmap.createScaledBitmap(bitmap, bitmap.getWidth() / 3, bitmap.getHeight() / 3, false));
            }

            new MyVideoProgressBar(activity, framePackage, Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + "/out.mp4", "MOBILE").execute();
            pd.dismiss();

        }
    } catch (Exception e) {
        e.printStackTrace();
    }

}
 
Example 40
Project: Face-detect-framework   File: FaceDetect.java   Source Code and License 5 votes vote down vote up
public Bitmap analysePicture(Uri selectedImageUri) {
    Bitmap bitmap = null;
    Mat imageMat = new Mat(100, 100, CvType.CV_8U, new Scalar(4));
    try {
        bitmap = MediaStore.Images.Media.getBitmap(context.getContentResolver(), selectedImageUri);
        Utils.bitmapToMat(bitmap, imageMat);
        Utils.matToBitmap(detectface(imageMat), bitmap);

    } catch (IOException e) {
        e.printStackTrace();
    }
    return bitmap;
}