Java Code Examples for org.opencv.imgproc.Imgproc#getStructuringElement()

The following examples show how to use org.opencv.imgproc.Imgproc#getStructuringElement() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GeneralUtils.java    From super-cloudops with Apache License 2.0 6 votes vote down vote up
/**
 * 图像腐蚀/膨胀处理 腐蚀和膨胀对处理没有噪声的图像很有利,慎用
 */
public static Mat erodeDilateImg(Mat src) {
	Mat outImage = new Mat();

	// size 越小,腐蚀的单位越小,图片越接近原图
	Mat structImage = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2));

	/**
	 * 图像腐蚀 腐蚀说明: 图像的一部分区域与指定的核进行卷积, 求核的最`小`值并赋值给指定区域。
	 * 腐蚀可以理解为图像中`高亮区域`的'领域缩小'。 意思是高亮部分会被不是高亮部分的像素侵蚀掉,使高亮部分越来越少。
	 */
	Imgproc.erode(src, outImage, structImage, new Point(-1, -1), 2);
	src = outImage;

	/**
	 * 膨胀 膨胀说明: 图像的一部分区域与指定的核进行卷积, 求核的最`大`值并赋值给指定区域。
	 * 膨胀可以理解为图像中`高亮区域`的'领域扩大'。 意思是高亮部分会侵蚀不是高亮的部分,使高亮部分越来越多。
	 */
	Imgproc.dilate(src, outImage, structImage, new Point(-1, -1), 1);
	src = outImage;

	return src;
}
 
Example 2
Source File: GeneralUtils.java    From super-cloudops with Apache License 2.0 6 votes vote down vote up
/**
 * 图像腐蚀
 * 
 * @param src
 * @return
 */
public static Mat erode(Mat src) {
	Mat outImage = new Mat();

	// size 越小,腐蚀的单位越小,图片越接近原图
	Mat structImage = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2));

	/**
	 * 图像腐蚀 腐蚀说明: 图像的一部分区域与指定的核进行卷积, 求核的最`小`值并赋值给指定区域。
	 * 腐蚀可以理解为图像中`高亮区域`的'领域缩小'。 意思是高亮部分会被不是高亮部分的像素侵蚀掉,使高亮部分越来越少。
	 */
	Imgproc.erode(src, outImage, structImage, new Point(-1, -1), 1);
	src = outImage;

	return src;
}
 
Example 3
Source File: MainActivity.java    From MOAAP with MIT License 6 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
    //Put it there, just in case:)
    super.onActivityResult(requestCode, resultCode, imageReturnedIntent);

    switch(requestCode) {
        case SELECT_PHOTO:
            if(resultCode == RESULT_OK && read_external_storage_granted){
                try {
                    final Uri imageUri = imageReturnedIntent.getData();
                    final InputStream imageStream = getContentResolver().openInputStream(imageUri);
                    final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
                    src = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC4);
                    Utils.bitmapToMat(selectedImage, src);
                    src_gray = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC1);
                    switch (ACTION_MODE) {
                        case HomeActivity.GAUSSIAN_BLUR:
                            Imgproc.GaussianBlur(src, src, new Size(9, 9), 0);
                            break;
                        case HomeActivity.MEAN_BLUR:
                            Imgproc.blur(src, src, new Size(9, 9));
                            break;
                        case HomeActivity.MEDIAN_BLUR:
                            Imgproc.medianBlur(src, src, 9);
                            break;
                        case HomeActivity.SHARPEN:
                            Mat kernel = new Mat(3, 3, CvType.CV_16SC1);
                            //int[] values = {0, -1, 0, -1, 5, -1, 0, -1, 0};
                            Log.d("imageType", CvType.typeToString(src.type()) + "");
                            kernel.put(0, 0, 0, -1, 0, -1, 5, -1, 0, -1, 0);
                            Imgproc.filter2D(src, src, src_gray.depth(), kernel);
                            break;
                        case HomeActivity.DILATE:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Mat kernelDilate = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
                            Imgproc.dilate(src_gray, src_gray, kernelDilate);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.ERODE:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Mat kernelErode = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
                            Imgproc.erode(src_gray, src_gray, kernelErode);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.THRESHOLD:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.ADAPTIVE_THRESHOLD:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.adaptiveThreshold(src_gray, src_gray, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 3, 0);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                    }
                    Bitmap processedImage = Bitmap.createBitmap(src.cols(), src.rows(), Bitmap.Config.ARGB_8888);
                    Log.i("imageType", CvType.typeToString(src.type()) + "");
                    Utils.matToBitmap(src, processedImage);
                    ivImage.setImageBitmap(selectedImage);
                    ivImageProcessed.setImageBitmap(processedImage);
                    Log.i("process", "process done");
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                }
            }
            break;
    }
}
 
Example 4
Source File: OpenCVoperation.java    From Human-hair-detection with Apache License 2.0 6 votes vote down vote up
public void skinSegmentation()
{
    matrix3_skindetection = new Mat(matrix2_grabcut.size(),matrix2_grabcut.type()); 
    matrix3_skindetection.setTo(new Scalar(0,0,255));
    Mat skinMask = new Mat();
    Mat hsvMatrix = new Mat();
    
    Scalar lower = new Scalar(0,48,80);
    Scalar upper = new Scalar(20,255,255);
    
    Imgproc.cvtColor(matrix2_grabcut, hsvMatrix, Imgproc.COLOR_BGR2HSV);
    Core.inRange(hsvMatrix, lower, upper, skinMask);
    
    Mat kernel =Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE,new Size(11,11));
    Imgproc.erode(skinMask, skinMask, kernel);
    Imgproc.dilate(skinMask, skinMask, kernel);  
    
    Imgproc.GaussianBlur(skinMask,skinMask, new Size(3,3), 0);
    
    Core.bitwise_and(matrix2_grabcut, matrix2_grabcut, matrix3_skindetection,skinMask);
    Imgcodecs.imwrite(resultDirectory+skinDetectionOutput , matrix3_skindetection);
}
 
Example 5
Source File: OpenCVoperation.java    From Human-hair-detection with Apache License 2.0 6 votes vote down vote up
public void performErosion_Dilution()
{
        erosion_dilutionMatrix = new Mat(this.matrix7_output.size(),this.matrix7_output.type());
        int erosion_size=2;
       
        //erosion
        Mat element1 = Imgproc.getStructuringElement(Imgproc.MORPH_ERODE,  new Size(2*erosion_size + 1, 2*erosion_size+1));
        Imgproc.erode(matrix7_output, erosion_dilutionMatrix, element1);
        Imgcodecs.imwrite(resultDirectory+erosionOutput,erosion_dilutionMatrix);
        
        /*
        //dilation
        Mat element2 = Imgproc.getStructuringElement(Imgproc.MORPH_DILATE,  new Size(2*erosion_size + 1, 2*erosion_size+1));
        Imgproc.dilate(erosion_dilutionMatrix, erosion_dilutionMatrix, element2);
        Imgcodecs.imwrite(resultDirectory+this.dilationOutput,erosion_dilutionMatrix);
        */
}
 
Example 6
Source File: StepByStepTestActivity.java    From CVScanner with GNU General Public License v3.0 6 votes vote down vote up
Mat buildSkeleton(Mat img){
    Mat morph = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_CROSS, new Size(3, 3));
    Mat skel = new Mat(img.size(), CvType.CV_8UC1, Scalar.all(0));
    Mat eroded = new Mat();
    Mat temp = new Mat();

    boolean done = false;

    do{
        Imgproc.morphologyEx(img, eroded, Imgproc.MORPH_ERODE, morph);
        Imgproc.morphologyEx(eroded, temp, Imgproc.MORPH_DILATE, morph);
        Core.subtract(img, temp, temp);
        Core.bitwise_or(skel, temp, skel);
        eroded.copyTo(img);

        done = Core.countNonZero(img) == 0;
    }while (!done);

    return skel;
}
 
Example 7
Source File: Finder.java    From SikuliNG with MIT License 5 votes vote down vote up
public static List<Element> detectChanges(Mat base, Mat mChanged) {
  int PIXEL_DIFF_THRESHOLD = 3;
  int IMAGE_DIFF_THRESHOLD = 5;
  Mat mBaseGray = Element.getNewMat();
  Mat mChangedGray = Element.getNewMat();
  Mat mDiffAbs = Element.getNewMat();
  Mat mDiffTresh = Element.getNewMat();
  Mat mChanges = Element.getNewMat();
  List<Element> rectangles = new ArrayList<>();

  Imgproc.cvtColor(base, mBaseGray, toGray);
  Imgproc.cvtColor(mChanged, mChangedGray, toGray);
  Core.absdiff(mBaseGray, mChangedGray, mDiffAbs);
  Imgproc.threshold(mDiffAbs, mDiffTresh, PIXEL_DIFF_THRESHOLD, 0.0, Imgproc.THRESH_TOZERO);
  if (Core.countNonZero(mDiffTresh) > IMAGE_DIFF_THRESHOLD) {
    Imgproc.threshold(mDiffAbs, mDiffAbs, PIXEL_DIFF_THRESHOLD, 255, Imgproc.THRESH_BINARY);
    Imgproc.dilate(mDiffAbs, mDiffAbs, Element.getNewMat());
    Mat se = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
    Imgproc.morphologyEx(mDiffAbs, mDiffAbs, Imgproc.MORPH_CLOSE, se);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat mHierarchy = Element.getNewMat();
    Imgproc.findContours(mDiffAbs, contours, mHierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
    rectangles = contoursToRectangle(contours);

    Core.subtract(mDiffAbs, mDiffAbs, mChanges);
    Imgproc.drawContours(mChanges, contours, -1, new Scalar(255));
    //logShow(mDiffAbs);
  }
  return rectangles;
}
 
Example 8
Source File: Finder.java    From SikuliX1 with MIT License 5 votes vote down vote up
public static List<Region> findChanges(FindInput2 findInput) {
  findInput.setAttributes();
  Mat previousGray = SXOpenCV.newMat();
  Mat nextGray = SXOpenCV.newMat();
  Mat mDiffAbs = SXOpenCV.newMat();
  Mat mDiffThresh = SXOpenCV.newMat();
  Imgproc.cvtColor(findInput.getBase(), previousGray, toGray);
  Imgproc.cvtColor(findInput.getTarget(), nextGray, toGray);
  Core.absdiff(previousGray, nextGray, mDiffAbs);
  Imgproc.threshold(mDiffAbs, mDiffThresh, PIXEL_DIFF_THRESHOLD, 0.0, Imgproc.THRESH_TOZERO);

  List<Region> rectangles = new ArrayList<>();
  if (Core.countNonZero(mDiffThresh) > IMAGE_DIFF_THRESHOLD) {
    Imgproc.threshold(mDiffAbs, mDiffAbs, PIXEL_DIFF_THRESHOLD, 255, Imgproc.THRESH_BINARY);
    Imgproc.dilate(mDiffAbs, mDiffAbs, SXOpenCV.newMat());
    Mat se = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
    Imgproc.morphologyEx(mDiffAbs, mDiffAbs, Imgproc.MORPH_CLOSE, se);
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat mHierarchy = SXOpenCV.newMat();
    Imgproc.findContours(mDiffAbs, contours, mHierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
    for (MatOfPoint contour : contours) {
      int x1 = 99999;
      int y1 = 99999;
      int x2 = 0;
      int y2 = 0;
      List<org.opencv.core.Point> points = contour.toList();
      for (Point point : points) {
        int x = (int) point.x;
        int y = (int) point.y;
        if (x < x1) x1 = x;
        if (x > x2) x2 = x;
        if (y < y1) y1 = y;
        if (y > y2) y2 = y;
      }
      Region rect = new Region(x1, y1, x2 - x1, y2 - y1);
      rectangles.add(rect);
    }
  }
  return rectangles;
}
 
Example 9
Source File: MovementDetectionProcessor.java    From Camdroid with Apache License 2.0 5 votes vote down vote up
protected void execute() {
    out = gray();

    Imgproc.equalizeHist(out, out);

    synchronized (mog) {
        mog.apply(out, this.mask, (double) (-10 + learning_rate) / 10);
    }

    Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_DILATE,
            new Size(3, 3));
    Imgproc.dilate(mask, mask, kernel);

    ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(this.mask, contours, new Mat(),
            Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxheight = object_max_size * this.in.height() / 100;
    double minheight = object_min_size * this.in.height() / 100;

    Iterator<MatOfPoint> each = contours.iterator();
    each = contours.iterator();
    while (each.hasNext()) {
        MatOfPoint contour = each.next();
        Rect rect = Imgproc.boundingRect(contour);
        if (rect.height > minheight && rect.height < maxheight) {
            Imgproc.rectangle(out, rect.tl(), rect.br(), new Scalar(255,
                    0, 0), 1);
        }
    }
}
 
Example 10
Source File: Filter.java    From FTCVision with MIT License 5 votes vote down vote up
/**
 * Dilate the image using morphological transformations
 *
 * @param img    Image matrix
 * @param amount Amount to dilate = 0
 */
public static void dilate(Mat img, int amount) {
    Mat kernel = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT,
            new Size(2 * amount + 1, 2 * amount + 1),
            new Point(amount, amount));
    Imgproc.dilate(img, img, kernel);
}
 
Example 11
Source File: Filter.java    From FTCVision with MIT License 5 votes vote down vote up
/**
 * Erode the image using morphological transformations
 *
 * @param img    Image matrix
 * @param amount Amount to erode = 0
 */
public static void erode(Mat img, int amount) {
    Mat kernel = Imgproc.getStructuringElement(Imgproc.CV_SHAPE_RECT,
            new Size(2 * amount + 1, 2 * amount + 1),
            new Point(amount, amount));
    Imgproc.erode(img, img, kernel);
}
 
Example 12
Source File: CVProcessor.java    From CVScanner with GNU General Public License v3.0 5 votes vote down vote up
public static List<MatOfPoint> findContoursForMRZ(Mat src){
    Mat img = src.clone();
    src.release();
    double ratio = getScaleRatio(img.size());
    int width = (int) (img.size().width / ratio);
    int height = (int) (img.size().height / ratio);
    Size newSize = new Size(width, height);
    Mat resizedImg = new Mat(newSize, CvType.CV_8UC4);
    Imgproc.resize(img, resizedImg, newSize);

    Mat gray = new Mat();
    Imgproc.cvtColor(resizedImg, gray, Imgproc.COLOR_BGR2GRAY);
    Imgproc.medianBlur(gray, gray, 3);
    //Imgproc.blur(gray, gray, new Size(3, 3));

    Mat morph = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(13, 5));
    Mat dilatedImg = new Mat();
    Imgproc.morphologyEx(gray, dilatedImg, Imgproc.MORPH_BLACKHAT, morph);
    gray.release();

    Mat gradX = new Mat();
    Imgproc.Sobel(dilatedImg, gradX, CvType.CV_32F, 1, 0);
    dilatedImg.release();
    Core.convertScaleAbs(gradX, gradX, 1, 0);
    Core.MinMaxLocResult minMax = Core.minMaxLoc(gradX);
    Core.convertScaleAbs(gradX, gradX, (255/(minMax.maxVal - minMax.minVal)),
            - ((minMax.minVal * 255) / (minMax.maxVal - minMax.minVal)));
    Imgproc.morphologyEx(gradX, gradX, Imgproc.MORPH_CLOSE, morph);

    Mat thresh = new Mat();
    Imgproc.threshold(gradX, thresh, 0, 255, Imgproc.THRESH_OTSU);
    gradX.release();
    morph.release();

    morph = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(21, 21));
    Imgproc.morphologyEx(thresh, thresh, Imgproc.MORPH_CLOSE, morph);
    Imgproc.erode(thresh, thresh, new Mat(), new Point(-1, -1), 4);
    morph.release();

    int col = (int) resizedImg.size().width;
    int p = (int) (resizedImg.size().width * 0.05);
    int row = (int) resizedImg.size().height;
    for(int i = 0; i < row; i++)
    {
        for(int j = 0; j < p; j++){
            thresh.put(i, j, 0);
            thresh.put(i, col-j, 0);
        }
    }

    List<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(thresh, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
    hierarchy.release();

    Log.d(TAG, "contours found: " + contours.size());

    Collections.sort(contours, new Comparator<MatOfPoint>() {
        @Override
        public int compare(MatOfPoint o1, MatOfPoint o2) {
            return Double.valueOf(Imgproc.contourArea(o2)).compareTo(Imgproc.contourArea(o1));
        }
    });

    return contours;
}
 
Example 13
Source File: ALTMRetinex.java    From OptimizedImageEnhance with MIT License 5 votes vote down vote up
private static Mat localAdaptation(Mat Lg, int rows, int cols, int r, double eps, double krnlRatio) {
	int krnlSz = Stream.of(3.0, rows * krnlRatio, cols * krnlRatio).max(Double::compare).orElse(3.0).intValue();
	// maximum filter: using dilate to extract the local maximum of a image block, which acts as the maximum filter
	// Meanwhile, minimum filter can be achieved by using erode function
	Mat Lg_ = new Mat();
	Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(krnlSz, krnlSz), new Point(-1, -1));
	Imgproc.dilate(Lg, Lg_, kernel);
	// guided image filter
	return Filters.GuidedImageFilter(Lg, Lg_, r, eps);
}
 
Example 14
Source File: GeneralUtils.java    From super-cloudops with Apache License 2.0 5 votes vote down vote up
/**
 * 膨胀
 * 
 * @param src
 * @return
 */
public static Mat dialate(Mat src) {
	Mat outImage = new Mat();

	// size 越小,腐蚀的单位越小,图片越接近原图
	Mat structImage = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(2, 2));

	Imgproc.dilate(src, outImage, structImage, new Point(-1, -1), 2);
	src = outImage;

	return src;
}
 
Example 15
Source File: SXOpenCV.java    From SikuliX1 with MIT License 5 votes vote down vote up
public static List<Match> doFindChanges(Image original, Image changed) {
  List<Match> changes = new ArrayList<>();
  if (changed.isValid()) {
    int PIXEL_DIFF_THRESHOLD = 3;
    int IMAGE_DIFF_THRESHOLD = 5;
    Mat previousGray = SXOpenCV.newMat();
    Mat nextGray = SXOpenCV.newMat();
    Mat mDiffAbs = SXOpenCV.newMat();
    Mat mDiffTresh = SXOpenCV.newMat();

    Imgproc.cvtColor(original.getContent(), previousGray, toGray);
    Imgproc.cvtColor(changed.getContent(), nextGray, toGray);
    Core.absdiff(previousGray, nextGray, mDiffAbs);
    Imgproc.threshold(mDiffAbs, mDiffTresh, PIXEL_DIFF_THRESHOLD, 0.0, Imgproc.THRESH_TOZERO);

    if (Core.countNonZero(mDiffTresh) > IMAGE_DIFF_THRESHOLD) {
      Imgproc.threshold(mDiffAbs, mDiffAbs, PIXEL_DIFF_THRESHOLD, 255, Imgproc.THRESH_BINARY);
      Imgproc.dilate(mDiffAbs, mDiffAbs, SXOpenCV.newMat());
      Mat se = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
      Imgproc.morphologyEx(mDiffAbs, mDiffAbs, Imgproc.MORPH_CLOSE, se);

      List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
      Mat mHierarchy = SXOpenCV.newMat();
      Imgproc.findContours(mDiffAbs, contours, mHierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
      changes = contoursToRectangle(contours);
    }
  }
  return changes;
}
 
Example 16
Source File: CVProcessor.java    From CVScanner with GNU General Public License v3.0 4 votes vote down vote up
public static List<MatOfPoint> findContours(Mat src){
    Mat img = src.clone();

    //find contours
    double ratio = getScaleRatio(img.size());
    int width = (int) (img.size().width / ratio);
    int height = (int) (img.size().height / ratio);
    Size newSize = new Size(width, height);
    Mat resizedImg = new Mat(newSize, CvType.CV_8UC4);
    Imgproc.resize(img, resizedImg, newSize);
    img.release();

    Imgproc.medianBlur(resizedImg, resizedImg, 7);

    Mat cannedImg = new Mat(newSize, CvType.CV_8UC1);
    Imgproc.Canny(resizedImg, cannedImg, 70, 200, 3, true);
    resizedImg.release();

    Imgproc.threshold(cannedImg, cannedImg, 70, 255, Imgproc.THRESH_OTSU);

    Mat dilatedImg = new Mat(newSize, CvType.CV_8UC1);
    Mat morph = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
    Imgproc.dilate(cannedImg, dilatedImg, morph, new Point(-1, -1), 2, 1, new Scalar(1));
    cannedImg.release();
    morph.release();

    ArrayList<MatOfPoint> contours = new ArrayList<>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(dilatedImg, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
    hierarchy.release();
    dilatedImg.release();

    Log.d(TAG, "contours found: " + contours.size());

    Collections.sort(contours, new Comparator<MatOfPoint>() {
        @Override
        public int compare(MatOfPoint o1, MatOfPoint o2) {
            return Double.valueOf(Imgproc.contourArea(o2)).compareTo(Imgproc.contourArea(o1));
        }
    });

    return contours;
}
 
Example 17
Source File: DarkChannelPriorDehaze.java    From OptimizedImageEnhance with MIT License 4 votes vote down vote up
public static Mat enhance(Mat image, double krnlRatio, double minAtmosLight, double eps) {
	image.convertTo(image, CvType.CV_32F);
	// extract each color channel
	List<Mat> rgb = new ArrayList<>();
	Core.split(image, rgb);
	Mat rChannel = rgb.get(0);
	Mat gChannel = rgb.get(1);
	Mat bChannel = rgb.get(2);
	int rows = rChannel.rows();
	int cols = rChannel.cols();
	// derive the dark channel from original image
	Mat dc = rChannel.clone();
	for (int i = 0; i < image.rows(); i++) {
		for (int j = 0; j < image.cols(); j++) {
			double min = Math.min(rChannel.get(i, j)[0], Math.min(gChannel.get(i, j)[0], bChannel.get(i, j)[0]));
			dc.put(i, j, min);
		}
	}
	// minimum filter
	int krnlSz = Double.valueOf(Math.max(Math.max(rows * krnlRatio, cols * krnlRatio), 3.0)).intValue();
	Mat kernel = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(krnlSz, krnlSz), new Point(-1, -1));
	Imgproc.erode(dc, dc, kernel);
	// get coarse transmission map
	Mat t = dc.clone();
	Core.subtract(t, new Scalar(255.0), t);
	Core.multiply(t, new Scalar(-1.0), t);
	Core.divide(t, new Scalar(255.0), t);
	// obtain gray scale image
	Mat gray = new Mat();
	Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGB2GRAY);
	Core.divide(gray, new Scalar(255.0), gray);
	// refine transmission map
	int r = krnlSz * 4;
	t = Filters.GuidedImageFilter(gray, t, r, eps);
	// get minimum atmospheric light
	minAtmosLight = Math.min(minAtmosLight, Core.minMaxLoc(dc).maxVal);
	// dehaze each color channel
	rChannel = dehaze(rChannel, t, minAtmosLight);
	gChannel = dehaze(gChannel, t, minAtmosLight);
	bChannel = dehaze(bChannel, t, minAtmosLight);
	// merge three color channels to a image
	Mat outval = new Mat();
	Core.merge(new ArrayList<>(Arrays.asList(rChannel, gChannel, bChannel)), outval);
	outval.convertTo(outval, CvType.CV_8UC1);
	return outval;
}