org.opencv.imgproc.Imgproc Java Examples

The following examples show how to use org.opencv.imgproc.Imgproc. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Masking.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 7 votes vote down vote up
public PreProcessor preprocessImage(PreProcessor preProcessor) {
    List<Mat> images = preProcessor.getImages();
    List<Mat> processed = new ArrayList<Mat>();
    for (Mat img : images){
        preProcessor.normalize0255(img);

        /***************************************************************************************
         *    Title: Automatic calculation of low and high thresholds for the Canny operation in opencv
         *    Author: VP
         *    Date: 16.04.2013
         *    Code version: -
         *    Availability: http://stackoverflow.com
         *
         ***************************************************************************************/

        double otsu_thresh_val = Imgproc.threshold(img, img, 0, 255, Imgproc.THRESH_OTSU);
        Imgproc.Canny(img, img, otsu_thresh_val * 0.5, otsu_thresh_val);
        processed.add(img);
    }
    preProcessor.setImages(processed);
    return preProcessor;
}
 
Example #2
Source File: WeightCalculate.java    From ImageEnhanceViaFusion with MIT License 7 votes vote down vote up
public static Mat LaplacianContrast(Mat img) {
	Mat laplacian = new Mat();
	Imgproc.Laplacian(img, laplacian, img.depth());
	//Imgproc.Laplacian(img, laplacian, img.depth(), 3, 1, 0);
	Core.convertScaleAbs(laplacian, laplacian);
	return laplacian;
}
 
Example #3
Source File: RemoveBackScatter.java    From OptimizedImageEnhance with MIT License 6 votes vote down vote up
private static Mat calWeight(Mat img) {
	Mat L = new Mat();
	img.convertTo(img, CvType.CV_8UC1);
	Imgproc.cvtColor(img, L, Imgproc.COLOR_BGR2GRAY);
	L.convertTo(L, CvType.CV_32F);
	Core.divide(L, new Scalar(255.0), L);
	// calculate Luminance weight
	Mat WC = FeatureWeight.LuminanceWeight(img, L);
	WC.convertTo(WC, L.type());
	// calculate the Saliency weight
	Mat WS = FeatureWeight.Saliency(img);
	WS.convertTo(WS, L.type());
	// calculate the Exposedness weight
	Mat WE = FeatureWeight.Exposedness(L);
	WE.convertTo(WE, L.type());
	// sum
	Mat weight = WC.clone();
	Core.add(weight, WS, weight);
	Core.add(weight, WE, weight);
	return weight;
}
 
Example #4
Source File: MainActivity.java    From SimpleDocumentScanner-Android with MIT License 6 votes vote down vote up
/**
 * Find the largest 4 point contour in the given Mat.
 *
 * @param src A valid Mat
 * @return The largest contour as a Mat
 */
private MatOfPoint2f findLargestContour(Mat src) {
    List<MatOfPoint> contours = new ArrayList<>();
    Imgproc.findContours(src, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    // Get the 5 largest contours
    Collections.sort(contours, new Comparator<MatOfPoint>() {
        public int compare(MatOfPoint o1, MatOfPoint o2) {
            double area1 = Imgproc.contourArea(o1);
            double area2 = Imgproc.contourArea(o2);
            return (int) (area2 - area1);
        }
    });
    if (contours.size() > 5) contours.subList(4, contours.size() - 1).clear();

    MatOfPoint2f largest = null;
    for (MatOfPoint contour : contours) {
        MatOfPoint2f approx = new MatOfPoint2f();
        MatOfPoint2f c = new MatOfPoint2f();
        contour.convertTo(c, CvType.CV_32FC2);
        Imgproc.approxPolyDP(c, approx, Imgproc.arcLength(c, true) * 0.02, true);

        if (approx.total() == 4 && Imgproc.contourArea(contour) > 150) {
            // the contour has 4 points, it's valid
            largest = approx;
            break;
        }
    }

    return largest;
}
 
Example #5
Source File: DetectFaceDemo.java    From Java-for-Data-Science with MIT License 6 votes vote down vote up
public void run() {
  System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
  String base = "C:/Books in Progress/Java for Data Science/Chapter 10/OpenCVExamples/src/resources";
  CascadeClassifier faceDetector = 
          new CascadeClassifier(base + "/lbpcascade_frontalface.xml");
  
  Mat image = Imgcodecs.imread(base + "/images.jpg");

  MatOfRect faceVectors = new MatOfRect();
  faceDetector.detectMultiScale(image, faceVectors);

  out.println(faceVectors.toArray().length + " faces found");

  for (Rect rect : faceVectors.toArray()) {
      Imgproc.rectangle(image, new Point(rect.x, rect.y), 
              new Point(rect.x + rect.width, rect.y + rect.height), 
              new Scalar(0, 255, 0));
  }
  Imgcodecs.imwrite("faceDetection.png", image);
}
 
Example #6
Source File: Finder.java    From SikuliNG with MIT License 6 votes vote down vote up
public static Mat detectEdges(Mat mSource) {
  Mat mSourceGray = Element.getNewMat();
  Mat mDetectedEdges = Element.getNewMat();

  int edgeThresh = 1;
  int lowThreshold = 100;
  int ratio = 3;
  int kernelSize = 5;
  int blurFilterSize = 3;

  if (mSource.channels() == 1) {
    mSourceGray = mSource;
  } else {
    Imgproc.cvtColor(mSource, mSourceGray, toGray);
  }
  Imgproc.blur(mSourceGray, mDetectedEdges, new Size(blurFilterSize, blurFilterSize));
  Imgproc.Canny(mDetectedEdges, mDetectedEdges,
          lowThreshold, lowThreshold * ratio, kernelSize, false);
  return mDetectedEdges;
}
 
Example #7
Source File: MainActivity.java    From MOAAP with MIT License 6 votes vote down vote up
void HOGDescriptor() {
    Mat grayMat = new Mat();
    Mat people = new Mat();

    //Converting the image to grayscale
    Imgproc.cvtColor(originalMat, grayMat, Imgproc.COLOR_BGR2GRAY);

    HOGDescriptor hog = new HOGDescriptor();
    hog.setSVMDetector(HOGDescriptor.getDefaultPeopleDetector());

    MatOfRect faces = new MatOfRect();
    MatOfDouble weights = new MatOfDouble();

    hog.detectMultiScale(grayMat, faces, weights);
    originalMat.copyTo(people);
    //Draw faces on the image
    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
        Imgproc.rectangle(people, facesArray[i].tl(), facesArray[i].br(), new Scalar(100), 3);

    //Converting Mat back to Bitmap
    Utils.matToBitmap(people, currentBitmap);
    imageView.setImageBitmap(currentBitmap);
}
 
Example #8
Source File: MathFTC.java    From DogeCV with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Crops an image to two specified corners
 * @param image The image to be cropped
 * @param topLeftCorner The top-left corner of the desired final image, in pixel coordinates
 * @param bottomRightCorner The bottom-right corner of the desired final image, in pixel coordinates
 * @return The cropped image
 */
public static Mat crop(Mat image, Point topLeftCorner, Point bottomRightCorner) {
    if (topLeftCorner != null) {
        if(topLeftCorner.y > 0 && topLeftCorner.y < image.height()-1 && topLeftCorner.x > 0 && topLeftCorner.x < image.width()) {
            Imgproc.rectangle(image, new Point(0,0), new Point(image.width(),topLeftCorner.y), new Scalar(0), -1);
            Imgproc.rectangle(image, new Point(0,0), new Point(topLeftCorner.x, image.height()), new Scalar(0), -1);
        }
    }
    if(bottomRightCorner != null) {
        if(bottomRightCorner.y > 0 && bottomRightCorner.y < image.height()-1 && bottomRightCorner.x > 0 && bottomRightCorner.x < image.width()) {
            Imgproc.rectangle(image, new Point(image.width(),image.height()), new Point(bottomRightCorner.x,0), new Scalar(0), -1);
            Imgproc.rectangle(image, new Point(image.width(),image.height()), new Point(0, bottomRightCorner.y), new Scalar(0), -1);
        }
    }
    return image;
}
 
Example #9
Source File: FaceDetection.java    From tutorials with MIT License 6 votes vote down vote up
public static void detectFace(String sourceImagePath, String targetImagePath) {
    Mat loadedImage = loadImage(sourceImagePath);
    MatOfRect facesDetected = new MatOfRect();
    CascadeClassifier cascadeClassifier = new CascadeClassifier();
    int minFaceSize = Math.round(loadedImage.rows() * 0.1f);
    cascadeClassifier.load("./src/main/resources/haarcascades/haarcascade_frontalface_alt.xml");
    cascadeClassifier.detectMultiScale(loadedImage,
            facesDetected,
            1.1,
            3,
            Objdetect.CASCADE_SCALE_IMAGE,
            new Size(minFaceSize, minFaceSize),
            new Size()
    );
    Rect[] facesArray =  facesDetected.toArray();
    for(Rect face : facesArray) {
        Imgproc.rectangle(loadedImage, face.tl(), face.br(), new Scalar(0, 0, 255), 3 );
    }
    saveImage(loadedImage, targetImagePath);
}
 
Example #10
Source File: VideoFaceTests.java    From super-cloudops with Apache License 2.0 6 votes vote down vote up
/**
 * OpenCV-4.0.0 人脸识别
 * 
 * @date: 2019年5月7日12:16:55
 * @param image
 *            待处理Mat图片(视频中的某一帧)
 * @return 处理后的图片
 * @throws IOException
 */
public static Mat getFace(Mat image) throws IOException {
	// 1 读取OpenCV自带的人脸识别特征XML文件
	File faceFile = new ClassPathResourcePatternResolver().getResource("opencv/data/haarcascade_frontalface_alt.xml")
			.getFile();
	CascadeClassifier facebook = new CascadeClassifier(faceFile.getAbsolutePath());
	// 2 特征匹配类
	MatOfRect face = new MatOfRect();
	// 3 特征匹配
	facebook.detectMultiScale(image, face);
	Rect[] rects = face.toArray();
	System.out.println("匹配到 " + rects.length + " 个人脸");
	// 4 为每张识别到的人脸画一个圈
	for (int i = 0; i < rects.length; i++) {
		Imgproc.rectangle(image, new Point(rects[i].x, rects[i].y),
				new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(0, 255, 0));
		Imgproc.putText(image, "Human", new Point(rects[i].x, rects[i].y), Imgproc.FONT_HERSHEY_SCRIPT_SIMPLEX, 1.0,
				new Scalar(0, 255, 0), 1, Imgproc.LINE_AA, false);
		// Mat dst=image.clone();
		// Imgproc.resize(image, image, new Size(300,300));
	}
	return image;
}
 
Example #11
Source File: ResizeUtils.java    From super-cloudops with Apache License 2.0 6 votes vote down vote up
public static Mat resize(Mat src, Size dsize) {
	try {
		Mat temp = trimImg(src);
		src = temp;
	} catch (Exception e) {
		System.out.println(e);
	}

	src = clearWhite(src);

	Mat dst = new Mat();
	// 区域插值(INTER_AREA):图像放大时类似于线性插值,图像缩小时可以避免波纹出现。
	Imgproc.resize(src, dst, dsize, 0, 0, Imgproc.INTER_AREA);

	// 腐蚀
	dst = GeneralUtils.erode(dst);
	return dst;
}
 
Example #12
Source File: GripPipeline.java    From FtcSamples with MIT License 6 votes vote down vote up
/**
 * Softens an image using one of several filters.
 * @param input The image on which to perform the blur.
 * @param type The blurType to perform.
 * @param doubleRadius The radius for the blur.
 * @param output The image in which to store the output.
 */
private void blur(Mat input, BlurType type, double doubleRadius,
	Mat output) {
	int radius = (int)(doubleRadius + 0.5);
	int kernelSize;
	switch(type){
		case BOX:
			kernelSize = 2 * radius + 1;
			Imgproc.blur(input, output, new Size(kernelSize, kernelSize));
			break;
		case GAUSSIAN:
			kernelSize = 6 * radius + 1;
			Imgproc.GaussianBlur(input,output, new Size(kernelSize, kernelSize), radius);
			break;
		case MEDIAN:
			kernelSize = 2 * radius + 1;
			Imgproc.medianBlur(input, output, kernelSize);
			break;
		case BILATERAL:
			Imgproc.bilateralFilter(input, output, -1, radius, radius);
			break;
	}
}
 
Example #13
Source File: AutoCalibrationManager.java    From ShootOFF with GNU General Public License v3.0 6 votes vote down vote up
public Optional<MatOfPoint2f> findChessboard(Mat mat) {

		final MatOfPoint2f imageCorners = new MatOfPoint2f();

		final boolean found = Calib3d.findChessboardCorners(mat, boardSize, imageCorners,
				Calib3d.CALIB_CB_ADAPTIVE_THRESH | Calib3d.CALIB_CB_NORMALIZE_IMAGE);

		if (logger.isTraceEnabled()) logger.trace("found chessboard corners {}", found);

		if (found) {
			// optimization
			Imgproc.cornerSubPix(mat, imageCorners, new Size(1, 1), new Size(-1, -1), term);

			return Optional.of(imageCorners);
		}
		return Optional.empty();
	}
 
Example #14
Source File: VideoMotionDetector.java    From video-stream-analytics with Apache License 2.0 6 votes vote down vote up
private static ArrayList<Rect> getContourArea(Mat mat) {
	Mat hierarchy = new Mat();
	Mat image = mat.clone();
	List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
	Imgproc.findContours(image, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
	Rect rect = null;
	double maxArea = 300;
	ArrayList<Rect> arr = new ArrayList<Rect>();
	for (int i = 0; i < contours.size(); i++) {
		Mat contour = contours.get(i);
		double contourArea = Imgproc.contourArea(contour);
		if (contourArea > maxArea) {
			rect = Imgproc.boundingRect(contours.get(i));
			arr.add(rect);
		}
	}
	return arr;
}
 
Example #15
Source File: MainActivity.java    From MOAAP with MIT License 6 votes vote down vote up
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
    //Put it there, just in case:)
    super.onActivityResult(requestCode, resultCode, imageReturnedIntent);

    switch(requestCode) {
        case SELECT_PHOTO:
            if(resultCode == RESULT_OK && read_external_storage_granted){
                try {
                    final Uri imageUri = imageReturnedIntent.getData();
                    final InputStream imageStream = getContentResolver().openInputStream(imageUri);
                    final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
                    src = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC4);
                    Utils.bitmapToMat(selectedImage, src);
                    src_gray = new Mat(selectedImage.getHeight(), selectedImage.getWidth(), CvType.CV_8UC1);
                    switch (ACTION_MODE) {
                        case HomeActivity.GAUSSIAN_BLUR:
                            Imgproc.GaussianBlur(src, src, new Size(9, 9), 0);
                            break;
                        case HomeActivity.MEAN_BLUR:
                            Imgproc.blur(src, src, new Size(9, 9));
                            break;
                        case HomeActivity.MEDIAN_BLUR:
                            Imgproc.medianBlur(src, src, 9);
                            break;
                        case HomeActivity.SHARPEN:
                            Mat kernel = new Mat(3, 3, CvType.CV_16SC1);
                            //int[] values = {0, -1, 0, -1, 5, -1, 0, -1, 0};
                            Log.d("imageType", CvType.typeToString(src.type()) + "");
                            kernel.put(0, 0, 0, -1, 0, -1, 5, -1, 0, -1, 0);
                            Imgproc.filter2D(src, src, src_gray.depth(), kernel);
                            break;
                        case HomeActivity.DILATE:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Mat kernelDilate = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
                            Imgproc.dilate(src_gray, src_gray, kernelDilate);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.ERODE:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Mat kernelErode = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
                            Imgproc.erode(src_gray, src_gray, kernelErode);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.THRESHOLD:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.threshold(src_gray, src_gray, 100, 255, Imgproc.THRESH_BINARY);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                        case HomeActivity.ADAPTIVE_THRESHOLD:
                            Imgproc.cvtColor(src, src_gray, Imgproc.COLOR_BGR2GRAY);
                            Imgproc.adaptiveThreshold(src_gray, src_gray, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 3, 0);
                            Imgproc.cvtColor(src_gray, src, Imgproc.COLOR_GRAY2RGBA, 4);
                            break;
                    }
                    Bitmap processedImage = Bitmap.createBitmap(src.cols(), src.rows(), Bitmap.Config.ARGB_8888);
                    Log.i("imageType", CvType.typeToString(src.type()) + "");
                    Utils.matToBitmap(src, processedImage);
                    ivImage.setImageBitmap(selectedImage);
                    ivImageProcessed.setImageBitmap(processedImage);
                    Log.i("process", "process done");
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                }
            }
            break;
    }
}
 
Example #16
Source File: MainActivity.java    From SimpleDocumentScanner-Android with MIT License 6 votes vote down vote up
/**
     * Apply a threshold to give the "scanned" look
     *
     * NOTE:
     * See the following link for more info http://docs.opencv.org/3.1.0/d7/d4d/tutorial_py_thresholding.html#gsc.tab=0
     * @param src A valid Mat
     * @return The processed Bitmap
     */
    private Bitmap applyThreshold(Mat src) {
        Imgproc.cvtColor(src, src, Imgproc.COLOR_BGR2GRAY);

        // Some other approaches
//        Imgproc.adaptiveThreshold(src, src, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 15, 15);
//        Imgproc.threshold(src, src, 0, 255, Imgproc.THRESH_BINARY + Imgproc.THRESH_OTSU);

        Imgproc.GaussianBlur(src, src, new Size(5, 5), 0);
        Imgproc.adaptiveThreshold(src, src, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 11, 2);

        Bitmap bm = Bitmap.createBitmap(src.width(), src.height(), Bitmap.Config.ARGB_8888);
        org.opencv.android.Utils.matToBitmap(src, bm);

        return bm;
    }
 
Example #17
Source File: MainActivity.java    From OpenCV-Android-Object-Detection with MIT License 6 votes vote down vote up
private void initializeOpenCVDependencies() throws IOException {
    mOpenCvCameraView.enableView();
    detector = FeatureDetector.create(FeatureDetector.ORB);
    descriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
    matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    img1 = new Mat();
    AssetManager assetManager = getAssets();
    InputStream istr = assetManager.open("a.jpeg");
    Bitmap bitmap = BitmapFactory.decodeStream(istr);
    Utils.bitmapToMat(bitmap, img1);
    Imgproc.cvtColor(img1, img1, Imgproc.COLOR_RGB2GRAY);
    img1.convertTo(img1, 0); //converting the image to match with the type of the cameras image
    descriptors1 = new Mat();
    keypoints1 = new MatOfKeyPoint();
    detector.detect(img1, keypoints1);
    descriptor.compute(img1, keypoints1, descriptors1);

}
 
Example #18
Source File: SXOpenCV.java    From SikuliX1 with MIT License 6 votes vote down vote up
public static Mat optimize(Mat mimg, float rFactor, Image.Interpolation interpolation) {
  Imgproc.cvtColor(mimg, mimg, Imgproc.COLOR_BGR2GRAY);

  // sharpen original image to primarily get rid of sub pixel rendering artifacts
  mimg = unsharpMask(mimg, 3);

  if (rFactor > 0 && rFactor != 1) {
    resize(mimg, rFactor, interpolation);
  }

  // sharpen the enlarged image again
  mimg = unsharpMask(mimg, 5);

  // invert in case of mainly dark background
  if (Core.mean(mimg).val[0] < 127) {
    Core.bitwise_not(mimg, mimg);
  }
  return mimg;
}
 
Example #19
Source File: AutoCalibrationManager.java    From ShootOFF with GNU General Public License v3.0 5 votes vote down vote up
private Mat warpPerspective(final Mat frame) {
	if (warpInitialized) {
		final Mat mat = new Mat();
		Imgproc.warpPerspective(frame, mat, perspMat, frame.size(), Imgproc.INTER_LINEAR);

		return mat;
	} else {
		logger.warn("warpPerspective called when warpInitialized is false - {} {} - {}", perspMat, boundingBox,
				isCalibrated);

		return frame;
	}
}
 
Example #20
Source File: Resize.java    From Android-Face-Recognition-with-Deep-Learning-Library with Apache License 2.0 5 votes vote down vote up
private static List<Mat> preprocessImages(List<Mat> images, Size size){
    List<Mat> processed = new ArrayList<Mat>();
    for (Mat img : images){
        Imgproc.resize(img, img, size);
        processed.add(img);
    }
    return processed;
}
 
Example #21
Source File: JavaCameraView.java    From FaceRecognitionApp with GNU General Public License v2.0 5 votes vote down vote up
@Override
public Mat rgba() {
    if (mPreviewFormat == ImageFormat.NV21)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
    else if (mPreviewFormat == ImageFormat.YV12)
        Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4);  // COLOR_YUV2RGBA_YV12 produces inverted colors
    else
        throw new IllegalArgumentException("Preview Format can be NV21 or YV12");

    return mRgba;
}
 
Example #22
Source File: MainActivity.java    From OpenCvFaceDetect with Apache License 2.0 5 votes vote down vote up
@Override
public void run() {
    super.run();
    while (isStart && isLoadSuccess) {
        synchronized (mLock) {
            try {
                mCameraRawData = mFrameQueue.poll(20, TimeUnit.MILLISECONDS);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }

            if (mCameraRawData == null) {
                continue;
            }
            frameDatas = mCameraRawData.getRawData();
            mSrcMat.put(0, 0, frameDatas);
            Imgproc.cvtColor(mSrcMat, mDesMat, Imgproc.COLOR_YUV2GRAY_420);
            mFaceCascade.detectMultiScale(mDesMat, matOfRect, 1.1, 5
                    , 2, mMinSize, mMaxSize);
            if (matOfRect.toArray().length != 0) {
                Rect rect = getBiggestFace(matOfRect.toArray());
                mResultView.showFace(rect);
            } else {
                mResultView.clear();
            }
            mFreeQueue.offer(mCameraRawData);
            mCamera.addCallbackBuffer(frameDatas);
        }

    }
}
 
Example #23
Source File: PerspectiveTransform.java    From opencv-fun with GNU Affero General Public License v3.0 5 votes vote down vote up
private static void doProjection (ImgWindow projWnd) {
	if(corners.size() == 4) {
		Mat cornersMat = Converters.vector_Point2f_to_Mat(corners);
		Mat targetMat = Converters.vector_Point2f_to_Mat(target);
		trans = Imgproc.getPerspectiveTransform(cornersMat, targetMat);
		invTrans = Imgproc.getPerspectiveTransform(targetMat, cornersMat);
		proj = new Mat();
		Imgproc.warpPerspective(img, proj, trans, new Size(img.cols(), img.rows()));
		if(projWnd.isClicked()) {
			perspPoints.add(new Point(projWnd.mouseX, projWnd.mouseY));
		}
	}
}
 
Example #24
Source File: MainActivity.java    From MOAAP with MIT License 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        //Get image size and draw a rectangle on the image for reference
        Mat temp = inputFrame.rgba();
        Imgproc.rectangle(temp, new Point(temp.cols()/2 - 200, temp.rows() / 2 - 200), new Point(temp.cols() / 2 + 200, temp.rows() / 2 + 200), new Scalar(255,255,255),1);
        Mat digit = temp.submat(temp.rows()/2 - 180, temp.rows() / 2 + 180, temp.cols() / 2 - 180, temp.cols() / 2 + 180).clone();
        Core.transpose(digit,digit);
        int predict_result = mnist.FindMatch(digit);
        Imgproc.putText(temp, Integer.toString(predict_result), new Point(50, 150), FONT_HERSHEY_SIMPLEX, 3.0, new Scalar(0, 0, 255), 5);

        return temp;
    }
 
Example #25
Source File: ContoursUtils.java    From super-cloudops with Apache License 2.0 5 votes vote down vote up
/**
 * 寻找轮廓,并按照递增排序
 *
 * @param cannyMat
 * @return
 */
public static List<MatOfPoint> findContours(Mat cannyMat) {
	List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
	Mat hierarchy = new Mat();

	// 寻找轮廓
	Imgproc.findContours(cannyMat, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

	if (contours.size() <= 0) {
		// throw new RuntimeException("未找到图像轮廓");
	} else {
		// 对contours进行了排序,按递增顺序
		contours.sort(new Comparator<MatOfPoint>() {
			@Override
			public int compare(MatOfPoint o1, MatOfPoint o2) {
				MatOfPoint2f mat1 = new MatOfPoint2f(o1.toArray());
				RotatedRect rect1 = Imgproc.minAreaRect(mat1);
				Rect r1 = rect1.boundingRect();

				MatOfPoint2f mat2 = new MatOfPoint2f(o2.toArray());
				RotatedRect rect2 = Imgproc.minAreaRect(mat2);
				Rect r2 = rect2.boundingRect();

				return (int) (r1.area() - r2.area());
			}
		});

	}
	return contours;
}
 
Example #26
Source File: SXOpenCV.java    From SikuliX1 with MIT License 5 votes vote down vote up
public static List<Match> doFindChanges(Image original, Image changed) {
  List<Match> changes = new ArrayList<>();
  if (changed.isValid()) {
    int PIXEL_DIFF_THRESHOLD = 3;
    int IMAGE_DIFF_THRESHOLD = 5;
    Mat previousGray = SXOpenCV.newMat();
    Mat nextGray = SXOpenCV.newMat();
    Mat mDiffAbs = SXOpenCV.newMat();
    Mat mDiffTresh = SXOpenCV.newMat();

    Imgproc.cvtColor(original.getContent(), previousGray, toGray);
    Imgproc.cvtColor(changed.getContent(), nextGray, toGray);
    Core.absdiff(previousGray, nextGray, mDiffAbs);
    Imgproc.threshold(mDiffAbs, mDiffTresh, PIXEL_DIFF_THRESHOLD, 0.0, Imgproc.THRESH_TOZERO);

    if (Core.countNonZero(mDiffTresh) > IMAGE_DIFF_THRESHOLD) {
      Imgproc.threshold(mDiffAbs, mDiffAbs, PIXEL_DIFF_THRESHOLD, 255, Imgproc.THRESH_BINARY);
      Imgproc.dilate(mDiffAbs, mDiffAbs, SXOpenCV.newMat());
      Mat se = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
      Imgproc.morphologyEx(mDiffAbs, mDiffAbs, Imgproc.MORPH_CLOSE, se);

      List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
      Mat mHierarchy = SXOpenCV.newMat();
      Imgproc.findContours(mDiffAbs, contours, mHierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
      changes = contoursToRectangle(contours);
    }
  }
  return changes;
}
 
Example #27
Source File: SXOpenCV.java    From SikuliX1 with MIT License 5 votes vote down vote up
private static Match doFindMatch(Mat where, FindAttributes findAttributes, boolean findAll) {
  Mat result = new Mat();
  Mat finalWhere = where;
  if (findAttributes.gray()) {
    Imgproc.cvtColor(where, finalWhere, Imgproc.COLOR_BGR2GRAY);
  }
  Mat what = findAttributes.what();
  Mat mask = findAttributes.mask();
  if (findAttributes.target().plain()) {
    Mat finalWherePlain = finalWhere;
    Mat finalWhatPlain = what;
    if (findAttributes.target().black()) {
      Core.bitwise_not(finalWhere, finalWherePlain);
      Core.bitwise_not(what, finalWhatPlain);
    }
    if (mask.empty()) {
      Imgproc.matchTemplate(finalWherePlain, finalWhatPlain, result, Imgproc.TM_SQDIFF_NORMED);
    } else {
      Imgproc.matchTemplate(finalWherePlain, what, result, Imgproc.TM_SQDIFF_NORMED, mask);
    }
    Core.subtract(Mat.ones(result.size(), CvType.CV_32F), result, result);
  } else if (mask.empty()) {
    Imgproc.matchTemplate(where, what, result, Imgproc.TM_CCOEFF_NORMED);
  } else {
    Imgproc.matchTemplate(where, what, result, Imgproc.TM_CCORR_NORMED, mask);
  }
  Core.MinMaxLocResult minMax = Core.minMaxLoc(result);
  double maxVal = minMax.maxVal;
  if (maxVal > findAttributes.target().similarity()) {
    Point point = new Point((int) minMax.maxLoc.x, (int) minMax.maxLoc.y);
    if (!findAll) {
      result = null;
    }
    return new Match(point, maxVal, result);
  }
  return null;
}
 
Example #28
Source File: Blur.java    From go-bees with GNU General Public License v3.0 5 votes vote down vote up
@Override
public Mat process(@NonNull Mat frame) {
    if (frame.empty()) {
        Log.e("Invalid input frame.");
        return null;
    }
    Mat tmp = frame.clone();
    // Apply gaussian blur
    for (int i = 0; i < REPETITIONS; i++) {
        Imgproc.GaussianBlur(tmp, tmp, new Size(KERNEL_SIZE, KERNEL_SIZE), 0);
    }
    return tmp;
}
 
Example #29
Source File: OpenCVNonMavenExamples.java    From Java-for-Data-Science with MIT License 5 votes vote down vote up
public void noiseRemoval() {
//        Mat Kernel = cv::Mat(cv::Size(Maximum_Width_of_Noise,Maximum_Height_of_noise),CV_8UC1,cv::Scalar(255));        
        Mat Kernel = new Mat(new Size(3, 3), CvType.CV_8U, new Scalar(255));
        Mat source = Imgcodecs.imread("noiseExample.png");
        Mat temp = new Mat();
        Mat topHat = new Mat();
        Mat destination = new Mat();

        Imgproc.morphologyEx(source, temp, Imgproc.MORPH_OPEN, Kernel);
        Imgproc.morphologyEx(temp, destination, Imgproc.MORPH_CLOSE, Kernel);
//        Imgproc.morphologyEx(temp, topHat, Imgproc.MORPH_GRADIENT, Kernel);
//        Imgproc.morphologyEx(topHat, destination, Imgproc.MORPH_CLOSE, Kernel);
        Imgcodecs.imwrite("noiseRemovedExample.png", source);
    }
 
Example #30
Source File: ColorSpace.java    From opencv-fun with GNU Affero General Public License v3.0 5 votes vote down vote up
public static Mat getChannel(Mat orig, int colorSpace, int channelIdx) {
	Mat hsv = new Mat();
	Imgproc.cvtColor(orig, hsv, colorSpace);
	List<Mat> channels = new ArrayList<Mat>();
	for(int i = 0; i < hsv.channels(); i++) {
		Mat channel = new Mat();
		channels.add(channel);
	}
	Core.split(hsv, channels);
	return channels.get(channelIdx);
}