Java Code Examples for org.opencv.core.CvType#CV_8UC4

The following examples show how to use org.opencv.core.CvType#CV_8UC4 . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CVProcessor.java    From CVScanner with GNU General Public License v3.0 6 votes vote down vote up
/**
 *
 * @param src - actual image
 * @param pts - points scaled up with respect to actual image
 * @return
 */
public static Mat fourPointTransform( Mat src , Point[] pts ) {
    Point tl = pts[0];
    Point tr = pts[1];
    Point br = pts[2];
    Point bl = pts[3];

    double widthA = Math.sqrt(Math.pow(br.x - bl.x, 2) + Math.pow(br.y - bl.y, 2));
    double widthB = Math.sqrt(Math.pow(tr.x - tl.x, 2) + Math.pow(tr.y - tl.y, 2));

    double dw = Math.max(widthA, widthB);
    int maxWidth = Double.valueOf(dw).intValue();


    double heightA = Math.sqrt(Math.pow(tr.x - br.x, 2) + Math.pow(tr.y - br.y, 2));
    double heightB = Math.sqrt(Math.pow(tl.x - bl.x, 2) + Math.pow(tl.y - bl.y, 2));

    double dh = Math.max(heightA, heightB);
    int maxHeight = Double.valueOf(dh).intValue();

    Mat doc = new Mat(maxHeight, maxWidth, CvType.CV_8UC4);

    Mat src_mat = new Mat(4, 1, CvType.CV_32FC2);
    Mat dst_mat = new Mat(4, 1, CvType.CV_32FC2);

    src_mat.put(0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
    dst_mat.put(0, 0, 0.0, 0.0, dw, 0.0, dw, dh, 0.0, dh);

    Mat m = Imgproc.getPerspectiveTransform(src_mat, dst_mat);

    Imgproc.warpPerspective(src, doc, m, doc.size());

    return doc;
}
 
Example 2
Source File: PyramidActivity.java    From MOAAP with MIT License 6 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
    super.onActivityResult(requestCode, resultCode, imageReturnedIntent);

    switch(requestCode) {
        case SELECT_PHOTO:
            if(resultCode == RESULT_OK){
                try {
                    final Uri imageUri = imageReturnedIntent.getData();
                    final InputStream imageStream = getContentResolver().openInputStream(imageUri);
                    final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
                    src = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC4);
                    Utils.bitmapToMat(selectedImage, src);
                    srcSelected = true;
                    bGaussianPyrUp.setEnabled(true);
                    bGaussianPyrDown.setEnabled(true);
                    bLaplacianPyr.setEnabled(true);
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                }
            }
            break;
    }
}
 
Example 3
Source File: Puzzle15Processor.java    From OpenCV-AndroidSamples with MIT License 6 votes vote down vote up
public synchronized void prepareGameSize(int width, int height) {
    mRgba15 = new Mat(height, width, CvType.CV_8UC4);
    mCells15 = new Mat[GRID_AREA];

    for (int i = 0; i < GRID_SIZE; i++) {
        for (int j = 0; j < GRID_SIZE; j++) {
            int k = i * GRID_SIZE + j;
            mCells15[k] = mRgba15.submat(i * height / GRID_SIZE, (i + 1) * height / GRID_SIZE, j * width / GRID_SIZE, (j + 1) * width / GRID_SIZE);
        }
    }

    for (int i = 0; i < GRID_AREA; i++) {
        Size s = Imgproc.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null);
        mTextHeights[i] = (int) s.height;
        mTextWidths[i] = (int) s.width;
    }
}
 
Example 4
Source File: MainActivity.java    From MOAAP with MIT License 6 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
    //Put it there, just in case:)
    super.onActivityResult(requestCode, resultCode, imageReturnedIntent);

    switch(requestCode) {
        case SELECT_PHOTO:
            if(resultCode == RESULT_OK && read_external_storage_granted){
                try {
                    final Uri imageUri = imageReturnedIntent.getData();
                    final InputStream imageStream = getContentResolver().openInputStream(imageUri);
                    final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
                    src = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC4);
                    Utils.bitmapToMat(selectedImage, src);
                    src_gray = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC1);
                    switch (ACTION_MODE) {
                        case HomeActivity.GAUSSIAN_BLUR:
                            Imgproc.GaussianBlur(src, src, new Size(9, 9), 0);
                            break;
                        case HomeActivity.MEAN_BLUR:
                            Imgproc.blur(src, src, new Size(9, 9));
                            break;
                        case HomeActivity.MEDIAN_BLUR:
                            Imgproc.medianBlur(src, src, 9);
                            break;
                        case HomeActivity.SHARPEN:
                            Mat kernel = new Mat(3, 3, CvType.CV_16SC1);
                            //int[] values = {0, -1, 0, -1, 5, -1, 0, -1, 0};
                            Log.d("imageType", CvType.typeToString(src.type()) + "");
                            kernel.put(0, 0, 0, -1, 0, -1, 5, -1, 0, -1, 0);
                            Imgproc.filter2D(src, src, src_gray.depth(), kernel);
                            break;
                        case HomeActivity.DILATE:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Mat kernelDilate = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
                            Imgproc.dilate(src_gray, src_gray, kernelDilate);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.ERODE:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Mat kernelErode = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
                            Imgproc.erode(src_gray, src_gray, kernelErode);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.THRESHOLD:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.ADAPTIVE_THRESHOLD:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.adaptiveThreshold(src_gray, src_gray, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 3, 0);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                    }
                    Bitmap processedImage = Bitmap.createBitmap(src.cols(), src.rows(), Bitmap.Config.ARGB_8888);
                    Log.i("imageType", CvType.typeToString(src.type()) + "");
                    Utils.matToBitmap(src, processedImage);
                    ivImage.setImageBitmap(selectedImage);
                    ivImageProcessed.setImageBitmap(processedImage);
                    Log.i("process", "process done");
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                }
            }
            break;
    }
}
 
Example 5
Source File: OldMainActivity.java    From pasm-yolov3-Android with GNU General Public License v3.0 5 votes vote down vote up
@Override
public void onCameraViewStarted(int width, int height) {
    mRgba = new Mat(height, width, CvType.CV_8UC4);
    mRgbaF = new Mat(width, height, CvType.CV_8UC4);
    mRgbaT = new Mat(width, height, CvType.CV_8UC4);
    Log.i(TAG, "height : " + height);
    Log.i(TAG, "width : " + width);
    //Log.i(TAG, "mOpenCvCameraView size (w,h):" + mOpenCvCameraView.getWidth() +  " - " + mOpenCvCameraView.getHeight());
}
 
Example 6
Source File: OldMainActivity.java    From pasm-yolov3-Android with GNU General Public License v3.0 5 votes vote down vote up
@Override
protected List<Classifier.Recognition> doInBackground(Mat... mats) {
    Mat mRgbaTemp = mats[0];
    if (myBitmap != null){
        smallBitmap = Bitmap.createScaledBitmap(myBitmap, INPUT_WIDTH, INPUT_HEIGHT, false);
    }else{

        smallBitmap = Bitmap.createBitmap(INPUT_WIDTH, INPUT_HEIGHT, Bitmap.Config.RGB_565);
        Bitmap bigBitmap = Bitmap.createBitmap(mRgbaF.width(), mRgbaF.height(), Bitmap.Config.RGB_565);
        Mat mRgbaFixedSize = new Mat(INPUT_WIDTH, INPUT_HEIGHT, CvType.CV_8UC4);

        Core.transpose(mRgbaTemp, mRgbaT);
        Imgproc.resize(mRgbaT, mRgbaF, mRgbaF.size(), 0,0, 0);
        Core.flip(mRgbaF, mRgbaTemp, 1 );

        Imgproc.resize(mRgbaTemp, mRgbaFixedSize, new Size(INPUT_WIDTH, INPUT_HEIGHT), 0,0, 0);

        Utils.matToBitmap(mRgbaFixedSize, smallBitmap);
        Utils.matToBitmap(mRgbaTemp, bigBitmap);

        this.publishProgress(bigBitmap);


        //OLD Toast.makeText(getApplicationContext(), "Nessuna immagine caricata", Toast.LENGTH_SHORT).show();
    }

    List<Classifier.Recognition> recognitions = classifier.recognizeImage(smallBitmap);
    return  recognitions;
}
 
Example 7
Source File: MainActivity.java    From SimpleDocumentScanner-Android with MIT License 5 votes vote down vote up
/**
 * NOTE:
 * Based off of http://www.pyimagesearch.com/2014/08/25/4-point-opencv-getperspective-transform-example/
 *
 * @param src
 * @param pts
 * @return
 */
private Mat fourPointTransform(Mat src, Point[] pts) {
    double ratio = src.size().height / (double) MAX_HEIGHT;

    Point ul = pts[0];
    Point ur = pts[1];
    Point lr = pts[2];
    Point ll = pts[3];

    double widthA = Math.sqrt(Math.pow(lr.x - ll.x, 2) + Math.pow(lr.y - ll.y, 2));
    double widthB = Math.sqrt(Math.pow(ur.x - ul.x, 2) + Math.pow(ur.y - ul.y, 2));
    double maxWidth = Math.max(widthA, widthB) * ratio;

    double heightA = Math.sqrt(Math.pow(ur.x - lr.x, 2) + Math.pow(ur.y - lr.y, 2));
    double heightB = Math.sqrt(Math.pow(ul.x - ll.x, 2) + Math.pow(ul.y - ll.y, 2));
    double maxHeight = Math.max(heightA, heightB) * ratio;

    Mat resultMat = new Mat(Double.valueOf(maxHeight).intValue(), Double.valueOf(maxWidth).intValue(), CvType.CV_8UC4);

    Mat srcMat = new Mat(4, 1, CvType.CV_32FC2);
    Mat dstMat = new Mat(4, 1, CvType.CV_32FC2);
    srcMat.put(0, 0, ul.x * ratio, ul.y * ratio, ur.x * ratio, ur.y * ratio, lr.x * ratio, lr.y * ratio, ll.x * ratio, ll.y * ratio);
    dstMat.put(0, 0, 0.0, 0.0, maxWidth, 0.0, maxWidth, maxHeight, 0.0, maxHeight);

    Mat M = Imgproc.getPerspectiveTransform(srcMat, dstMat);
    Imgproc.warpPerspective(src, resultMat, M, resultMat.size());

    srcMat.release();
    dstMat.release();
    M.release();

    return resultMat;
}
 
Example 8
Source File: ColorBlobDetectionActivity.java    From OpenCV-AndroidSamples with MIT License 5 votes vote down vote up
public void onCameraViewStarted(int width, int height) {
    mRgba = new Mat(height, width, CvType.CV_8UC4);
    mDetector = new ColorBlobDetector();
    mSpectrum = new Mat();
    mBlobColorRgba = new Scalar(255);
    mBlobColorHsv = new Scalar(255);
    SPECTRUM_SIZE = new Size(200, 64);
    CONTOUR_COLOR = new Scalar(255,0,0,255);
}
 
Example 9
Source File: ImageSaveTask.java    From CVScanner with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Override this method to perform a computation on a background thread. The
 * specified parameters are the parameters passed to {@link #execute}
 * by the caller of this task.
 * <p/>
 * This method can call {@link #publishProgress} to publish updates
 * on the UI thread.
 *
 * @param params The parameters of the task.
 * @return A result, defined by the subclass of this task.
 * @see #onPreExecute()
 * @see #onPostExecute
 * @see #publishProgress
 */
@Override
protected String doInBackground(Void... params) {
    Size imageSize = new Size(image.getWidth(), image.getHeight());
    Mat imageMat = new Mat(imageSize, CvType.CV_8UC4);
    Utils.bitmapToMat(image, imageMat);

    image.recycle();

    Mat croppedImage = CVProcessor.fourPointTransform(imageMat, points);
    imageMat.release();

    Mat enhancedImage = CVProcessor.adjustBirghtnessAndContrast(croppedImage, 1);
    croppedImage.release();

    enhancedImage = CVProcessor.sharpenImage(enhancedImage);

    String imagePath = null;
    try {
        imagePath = Util.saveImage(mContext,
                "IMG_CVScanner_" + System.currentTimeMillis(), enhancedImage, false);
        enhancedImage.release();
        Util.setExifRotation(mContext, Util.getUriFromPath(imagePath), rotation);
    } catch (IOException e) {
        e.printStackTrace();
    }

    return imagePath;
}
 
Example 10
Source File: MainActivity.java    From open-quartz with Apache License 2.0 5 votes vote down vote up
public void onCameraViewStarted(int width, int height) {
    mGray = new Mat(height, width, CvType.CV_8UC4);
    mRgba = new Mat(height, width, CvType.CV_8UC4);
    mIntermediateMat = new Mat(height, width, CvType.CV_8UC4);
    mSize0 = new Size();

    //		mResolutionList = mOpenCvCameraView.getResolutionList();
    //		for (int i = 0; i < mResolutionList.size(); i++)
    //		{
    //			Log.e("TAG",  i + " " + mResolutionList.get(i).height + " " + mResolutionList.get(i).width);
    //		}
}
 
Example 11
Source File: MainActivity.java    From MOAAP with MIT License 4 votes vote down vote up
@Override
public void onCameraViewStarted(int width, int height) {
    mRgba = new Mat(height, width, CvType.CV_8UC4);
}
 
Example 12
Source File: CVProcessor.java    From CVScanner with GNU General Public License v3.0 4 votes vote down vote up
public static List<MatOfPoint> findContours(Mat src){
    Mat img = src.clone();

    //find contours
    double ratio = getScaleRatio(img.size());
    int width = (int) (img.size().width / ratio);
    int height = (int) (img.size().height / ratio);
    Size newSize = new Size(width, height);
    Mat resizedImg = new Mat(newSize, CvType.CV_8UC4);
    Imgproc.resize(img, resizedImg, newSize);
    img.release();

    Imgproc.medianBlur(resizedImg, resizedImg, 7);

    Mat cannedImg = new Mat(newSize, CvType.CV_8UC1);
    Imgproc.Canny(resizedImg, cannedImg, 70, 200, 3, true);
    resizedImg.release();

    Imgproc.threshold(cannedImg, cannedImg, 70, 255, Imgproc.THRESH_OTSU);

    Mat dilatedImg = new Mat(newSize, CvType.CV_8UC1);
    Mat morph = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
    Imgproc.dilate(cannedImg, dilatedImg, morph, new Point(-1, -1), 2, 1, new Scalar(1));
    cannedImg.release();
    morph.release();

    ArrayList<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(dilatedImg, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
    hierarchy.release();
    dilatedImg.release();

    Log.d(TAG, "contours found: " + contours.size());

    Collections.sort(contours, new Comparator<MatOfPoint>() {
        @Override
        public int compare(MatOfPoint o1, MatOfPoint o2) {
            return Double.valueOf(Imgproc.contourArea(o2)).compareTo(Imgproc.contourArea(o1));
        }
    });

    return contours;
}
 
Example 13
Source File: CameraViewerFragment.java    From PixaToon with GNU General Public License v3.0 4 votes vote down vote up
@Override
public void onCameraViewStarted(int width, int height) {
    mInputMat = new Mat(height, width, CvType.CV_8UC4);
    mOutputMat = new Mat(height, width, CvType.CV_8UC4);
    mStarting = true;
}
 
Example 14
Source File: DocumentScannerActivity.java    From Document-Scanner with GNU General Public License v3.0 4 votes vote down vote up
@Override
public void onPreviewFrame(byte[] data, Camera camera) {

    android.hardware.Camera.Size pictureSize = camera.getParameters().getPreviewSize();

    Log.d(TAG, "onPreviewFrame - received image " + pictureSize.width + "x" + pictureSize.height
            + " focused: " + mFocused + " imageprocessor: " + (imageProcessorBusy ? "busy" : "available"));

    if (mFocused && !imageProcessorBusy) {
        setImageProcessorBusy(true);
        Mat yuv = new Mat(new Size(pictureSize.width, pictureSize.height * 1.5), CvType.CV_8UC1);
        yuv.put(0, 0, data);

        Mat mat = new Mat(new Size(pictureSize.width, pictureSize.height), CvType.CV_8UC4);
        Imgproc.cvtColor(yuv, mat, Imgproc.COLOR_YUV2RGBA_NV21, 4);

        yuv.release();

        sendImageProcessorMessage("previewFrame", new PreviewFrame(mat, autoMode, !(autoMode || scanClicked)));
    }

}
 
Example 15
Source File: Tutorial2Activity.java    From OpenCV-AndroidSamples with MIT License 4 votes vote down vote up
public void onCameraViewStarted(int width, int height) {
    mRgba = new Mat(height, width, CvType.CV_8UC4);
    mIntermediateMat = new Mat(height, width, CvType.CV_8UC4);
    mGray = new Mat(height, width, CvType.CV_8UC1);
}
 
Example 16
Source File: ProcessHelper.java    From OpenCV-android with Apache License 2.0 4 votes vote down vote up
/**
 * 直线检测
 *
 * @param origin   原始bitmap
 * @param callback 回调
 */
public void hough(Bitmap origin, ProcessCallback callback) {
    if (origin == null) {
        return;
    }
    try {
        Bitmap bitmap = Bitmap.createBitmap(origin.getWidth(), origin.getHeight(), Bitmap.Config.RGB_565);
        Utils.bitmapToMat(origin, rgbMat);
        Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
        Mat edges = new Mat();
        Mat src = new Mat(origin.getHeight(), origin.getWidth(), CvType.CV_8UC4);
        Mat lines = new Mat();
        // 拷贝
        Mat origination = new Mat(src.size(), CvType.CV_8UC1);
        src.copyTo(origination);
        // 通过Canny得到边缘图
        Imgproc.cvtColor(origination, grayMat, Imgproc.COLOR_BGR2GRAY);
        Imgproc.Canny(grayMat, edges, 50, 200);
        // 获取直线图
        Imgproc.HoughLinesP(edges, lines, 1, Math.PI / 180, 10, 0, 10);
        Mat houghLines = new Mat();
        houghLines.create(edges.rows(), edges.cols(), CvType.CV_8UC1);
        // 绘制直线
        for (int i = 0; i < lines.rows(); i++) {
            double[] points = lines.get(i, 0);
            if (null != points) {
                double x1, y1, x2, y2;
                x1 = points[0];
                y1 = points[1];
                x2 = points[2];
                y2 = points[3];
                Point pt1 = new Point(x1, y1);
                Point pt2 = new Point(x2, y2);
                Imgproc.line(houghLines, pt1, pt2, new Scalar(55, 100, 195), 3);
            }
        }
        Utils.matToBitmap(houghLines, bitmap);
        callback.onSuccess(bitmap);
    } catch (Exception e) {
        callback.onFailed(e.getMessage());
    }
}
 
Example 17
Source File: OpenNoteCameraView.java    From react-native-documentscanner-android with MIT License 4 votes vote down vote up
@Override
public void onPreviewFrame(byte[] data, Camera camera) {

    Camera.Size pictureSize = camera.getParameters().getPreviewSize();

    if ( mFocused && ! imageProcessorBusy ) {
        setImageProcessorBusy(true);
        Mat yuv = new Mat(new org.opencv.core.Size(pictureSize.width, pictureSize.height * 1.5), CvType.CV_8UC1);
        yuv.put(0, 0, data);

        Mat mat = new Mat(new org.opencv.core.Size(pictureSize.width, pictureSize.height), CvType.CV_8UC4);
        Imgproc.cvtColor(yuv, mat, Imgproc.COLOR_YUV2RGBA_NV21, 4);

        yuv.release();

        if(!manualCapture){
            sendImageProcessorMessage("previewFrame", new PreviewFrame( mat, autoMode, !(autoMode) ));
        }

    }

}
 
Example 18
Source File: MainActivity.java    From pasm-yolov3-Android with GNU General Public License v3.0 4 votes vote down vote up
@Override
protected Mat doInBackground(Mat... mats) {
    Mat mRgbaTemp = mats[0];
    ImageProcessor processor = new ImageProcessor(getApplicationContext(), classifier.getLabels());
    if (myBitmap != null){
        smallBitmap = Bitmap.createScaledBitmap(myBitmap, INPUT_SIZE, INPUT_SIZE, false);
        Display display = getWindowManager().getDefaultDisplay();
        Point size = new Point();
        display.getSize(size);
        int width = size.x;
        int height = size.y;

        float ratio = (float)myBitmap.getWidth() / (float)myBitmap.getHeight();
        Bitmap reducedBitmap = Bitmap.createScaledBitmap(myBitmap, (int) (height * ratio), height, false);

        this.publishProgress(reducedBitmap);
        processor.loadImage(myBitmap, INPUT_SIZE, INPUT_SIZE);
    }else{
        smallBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Bitmap.Config.RGB_565);
        Bitmap bigBitmap = Bitmap.createBitmap(mRgbaF.width(), mRgbaF.height(), Bitmap.Config.RGB_565);
        Mat mRgbaFixedSize = new Mat(INPUT_SIZE, INPUT_SIZE, CvType.CV_8UC4);

        Core.transpose(mRgbaTemp, mRgbaT);
        Imgproc.resize(mRgbaT, mRgbaF, mRgbaF.size(), 0,0, 0);
        Core.flip(mRgbaF, mRgbaTemp, 1 );

        Imgproc.resize(mRgbaTemp, mRgbaFixedSize, new Size(INPUT_SIZE, INPUT_SIZE), 0,0, 0);

        Utils.matToBitmap(mRgbaFixedSize, smallBitmap);
        Utils.matToBitmap(mRgbaTemp, bigBitmap);

        this.publishProgress(bigBitmap);
        processor.loadImage(bigBitmap, INPUT_SIZE, INPUT_SIZE);
        //OLD Toast.makeText(getApplicationContext(), "Nessuna immagine caricata", Toast.LENGTH_SHORT).show();
    }

    List<Classifier.Recognition> recognitions = classifier.recognizeImage(smallBitmap);
    Mat mat = processor.drawBoxes(recognitions, 0.2);
    imageSaver.save(mat); // remove for realtime processing!
    return mat;
}
 
Example 19
Source File: MainActivity.java    From effective_android_sample with Apache License 2.0 4 votes vote down vote up
/**
 * OpenCVで
 * @param bmpOrig
 */
private void extractObject(Bitmap bmpOrig) {

    // まずオリジナルのビットマップを表示
    mImageView1.setImageBitmap(bmpOrig);
    // 高さと幅を取得
    int height = bmpOrig.getHeight();
    int width = bmpOrig.getWidth();
    
    // OpenCVオブジェクトの用意
    Mat matOrig = new Mat(height,width,CvType.CV_8UC4); 
    // ビットマップをOpenCVオブジェクトに変換
    Utils.bitmapToMat(bmpOrig, matOrig);
    
    /**
     * グレースケールに変換
     */
    Mat matGray = new Mat(height,width,CvType.CV_8UC1);
    Imgproc.cvtColor(matOrig, matGray, Imgproc.COLOR_RGB2GRAY);
    // 表示
    Bitmap bmpGray = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(matGray, bmpGray);
    mImageView2.setImageBitmap(bmpGray);

    /**
     * グレースケール→二値化    
     */
    Mat matBlack = new Mat(height,width,CvType.CV_8UC1);
    // 二値化
    Imgproc.threshold(matGray, matBlack, sTH, 255, Imgproc.THRESH_BINARY);
    // 表示
    Bitmap bmpBlack = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(matBlack, bmpBlack);
    mImageView3.setImageBitmap(bmpBlack);

    /**
     * グレースケール→二値化→輪郭塗りつぶし
     */
    // 輪郭を抽出する
    ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat(matBlack.height(),matBlack.width(),CvType.CV_8UC1);
    int mode = Imgproc.RETR_EXTERNAL;
    int method = Imgproc.CHAIN_APPROX_SIMPLE;

    // 輪郭を抽出する
    Imgproc.findContours(matBlack, contours, hierarchy, mode, method);
    // 輪郭を描く
    Scalar color = new Scalar(255.f, 0.f, 0.f, 0.f);
    Imgproc.drawContours(matBlack, contours, -1, color, 2);
    // 表示
    Bitmap bmpContour = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    org.opencv.android.Utils.matToBitmap(matBlack, bmpContour);
    mImageView4.setImageBitmap(bmpContour);

    // 抽出した輪郭の内部を塗りつぶす
    Imgproc.drawContours(matBlack, contours, -1, color, -1);
    // 表示
    Bitmap bmpContour2 = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    org.opencv.android.Utils.matToBitmap(matBlack, bmpContour2);
    mImageView5.setImageBitmap(bmpContour2);

    
    /**
     * 二値化したマスクを使ってオブジェクトだけをとりだす
     */
    Mat matObject = new Mat(height,width,CvType.CV_8UC4); 
    Core.add(matObject, matOrig, matObject, matBlack);  
    // 表示
    Bitmap bmpObject = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
    org.opencv.android.Utils.matToBitmap(matObject, bmpObject);
    mImageView6.setImageBitmap(bmpObject);

    /**
     * とりだしたオブジェクトに外接する矩形のみをBMPとして抜き出す
     */
    Rect rect = Imgproc.boundingRect(contours.get(0));
    Mat matCut = new Mat(matObject, rect);
    // 表示
    Bitmap bmpCut = Bitmap.createBitmap(matCut.cols(), matCut.rows(), Bitmap.Config.ARGB_8888);
    org.opencv.android.Utils.matToBitmap(matCut, bmpCut);
    mImageView7.setImageBitmap(bmpCut);
}
 
Example 20
Source File: Color.java    From FTCVision with MIT License 2 votes vote down vote up
/**
 * Create a blank RGBA matrix (8-bit color info * 4 channels)
 *
 * @param width  Matrix width
 * @param height Matrix height
 * @return RGBA image matrix
 */
public static Mat createMatRGBA(int width, int height) {
    return new Mat(height, width, CvType.CV_8UC4);
}