Java Code Examples for org.opencv.imgproc.Imgproc

The following are top voted examples for showing how to use org.opencv.imgproc.Imgproc. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: SudoCAM-Ku   File: Thresholding.java   View source code 17 votes vote down vote up
public static void gridDetection(Mat img){
       List<MatOfPoint> contours = new ArrayList<>();
       Imgproc.findContours(img,contours,new Mat(),Imgproc.RETR_TREE,Imgproc.CHAIN_APPROX_SIMPLE);

       double maxArea = 0;
       MatOfPoint max_contour = new MatOfPoint();

       Iterator<MatOfPoint> iterator = contours.iterator();
       while (iterator.hasNext()){
           MatOfPoint contour = iterator.next();
           double area = Imgproc.contourArea(contour);
           if(area > maxArea){
               maxArea = area;
               max_contour = contour;
           }
       }

       double epsilon = 0.1*Imgproc.arcLength(new MatOfPoint2f(max_contour.toArray()),true);
       MatOfPoint2f approx = new MatOfPoint2f();
       Imgproc.approxPolyDP(new MatOfPoint2f(max_contour.toArray()),approx,epsilon,true);
	
	RotatedRect rect = Imgproc.minAreaRect(new MatOfPoint2f(max_contour.toArray()));
	
	Mat grid = Thresholding.orderPoints(approx);
	
	Thresholding.approx = approx;
	Thresholding.grid = grid;
	Thresholding.rect = rect;
}
 
Example 2
Project: MOAAP   File: MainActivity.java   View source code 10 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
        final int viewMode = mViewMode;
        switch (viewMode) {
            case VIEW_MODE_OPTICAL_FLOW:
                mGray = inputFrame.gray();
                if(features.toArray().length==0){
                    int rowStep = 50, colStep = 100;
                    int nRows = mGray.rows()/rowStep, nCols = mGray.cols()/colStep;

//                    Log.d(TAG, "\nRows: "+nRows+"\nCols: "+nCols+"\n");

                    Point points[] = new Point[nRows*nCols];
                    for(int i=0; i<nRows; i++){
                        for(int j=0; j<nCols; j++){
                            points[i*nCols+j]=new Point(j*colStep, i*rowStep);
//                            Log.d(TAG, "\nRow: "+i*rowStep+"\nCol: "+j*colStep+"\n: ");
                        }
                    }

                    features.fromArray(points);

                    prevFeatures.fromList(features.toList());
                    mPrevGray = mGray.clone();
                    break;
                }

                nextFeatures.fromArray(prevFeatures.toArray());
                Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err);

                List<Point> prevList=features.toList(), nextList=nextFeatures.toList();
                Scalar color = new Scalar(255);

                for(int i = 0; i<prevList.size(); i++){
//                    Core.circle(mGray, prevList.get(i), 5, color);
                    Imgproc.line(mGray, prevList.get(i), nextList.get(i), color);
                }

                mPrevGray = mGray.clone();
                break;
            case VIEW_MODE_KLT_TRACKER:
                mGray = inputFrame.gray();

                if(features.toArray().length==0){
                    Imgproc.goodFeaturesToTrack(mGray, features, 10, 0.01, 10);
                    Log.d(TAG, features.toList().size()+"");
                    prevFeatures.fromList(features.toList());
                    mPrevGray = mGray.clone();
//                    prevFeatures.fromList(nextFeatures.toList());
                    break;
                }

//                OpticalFlow(mPrevGray.getNativeObjAddr(), mGray.getNativeObjAddr(), prevFeatures.getNativeObjAddr(), nextFeatures.getNativeObjAddr());
                Video.calcOpticalFlowPyrLK(mPrevGray, mGray, prevFeatures, nextFeatures, status, err);
                List<Point> drawFeature = nextFeatures.toList();
//                Log.d(TAG, drawFeature.size()+"");
                for(int i = 0; i<drawFeature.size(); i++){
                    Point p = drawFeature.get(i);
                    Imgproc.circle(mGray, p, 5, new Scalar(255));
                }
                mPrevGray = mGray.clone();
                prevFeatures.fromList(nextFeatures.toList());
                break;
            default: mViewMode = VIEW_MODE_KLT_TRACKER;
        }

        return mGray;
    }
 
Example 3
Project: SudoCAM-Ku   File: Thresholding.java   View source code 8 votes vote down vote up
public static Mat adaptativeProcess(Mat img){
	Mat im = new Mat();
	Imgproc.threshold(img,im,120,255,Imgproc.THRESH_TRUNC);
	im = Thresholding.adaptativeThresholding(im);
	Imgproc.medianBlur(im,im,7);
	Mat threshImg = Thresholding.InvertImageColor(im);
	Thresholding.gridDetection(threshImg);
	
		Mat mat = Mat.zeros(4,2,CvType.CV_32F);
	mat.put(0,0,0); mat.put(0,1,512);
	mat.put(1,0,0); mat.put(1,1,0);
	mat.put(2,0,512); mat.put(2,1,0);
	mat.put(3,0,512); mat.put(3,1,512);
	
	mat = Imgproc.getPerspectiveTransform(Thresholding.grid,mat);
	
	Mat M = new Mat();
	
	Imgproc.warpPerspective(threshImg,M,mat, new Size(512,512));
	
	Imgproc.medianBlur(M,M,3);
	Imgproc.threshold(M,M,254,255,Imgproc.THRESH_BINARY);
	
	return Thresholding.InvertImageColor(M);
}
 
Example 4
Project: FlashLib   File: CvTemplateMatcher.java   View source code 8 votes vote down vote up
public MatchResult match(Mat scene, Mat templ, Method method, Mat img) {
	
	int result_cols = scene.cols() - templ.cols() + 1;
	int result_rows = scene.rows() - templ.rows() + 1;
	Mat result = new Mat(result_rows, result_cols, CV_32FC1);
	Imgproc.matchTemplate(scene, templ, result, method.ordinal());
	//Core.normalize(result, result, 0, 1, 32,-1,new Mat());
		
	MinMaxLocResult mmr = Core.minMaxLoc(result);

	
	Point matchLoc;
	double maxVal;
	if (method.ordinal() == Imgproc.TM_SQDIFF
	        || method.ordinal() == Imgproc.TM_SQDIFF_NORMED) {
	    
		matchLoc = mmr.minLoc;
		maxVal = mmr.minVal;
	}
	else {
	    matchLoc = mmr.maxLoc;
	    maxVal = mmr.maxVal;
	}
	
	MatchResult currResult = new MatchResult(matchLoc.x +(templ.cols()/2),matchLoc.y +(templ.rows()/2),0,maxVal);
	return currResult;
}
 
Example 5
Project: mao-android   File: FdActivity.java   View source code 7 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
        } else if (mDetectorType == NATIVE_DETECTOR) {
            if (mNativeDetector != null)
                mNativeDetector.detect(mGray, faces);
        } else {
            Log.e(TAG, "Detection method is not selected!");
        }

        Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++) {
            Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
        }

        //转置90度
        Mat rotateMat = Imgproc.getRotationMatrix2D(new Point(mRgba.rows() / 2, mRgba.cols() / 2), 90, 1);
        Imgproc.warpAffine(mRgba, mRgba, rotateMat, mRgba.size());

        //以y轴翻转
        Core.flip(mRgba, mRgba, 1);

        return mRgba;
    }
 
Example 6
Project: ImageEnhanceViaFusion   File: ImShow.java   View source code 7 votes vote down vote up
public void showImage(Mat img) {
	if (SizeCustom) {
		Imgproc.resize(img, img, new Size(Height, Width));
	}
	// Highgui.imencode(".jpg", img, matOfByte);
	// byte[] byteArray = matOfByte.toArray();
	BufferedImage bufImage = null;
	try {
		// InputStream in = new ByteArrayInputStream(byteArray);
		// bufImage = ImageIO.read(in);
		bufImage = toBufferedImage(img);
		image.setImage(bufImage);
		Window.pack();
		label.updateUI();
		Window.setVisible(true);
	} catch (Exception e) {
		e.printStackTrace();
	}
}
 
Example 7
Project: FTC2016   File: Transform.java   View source code 7 votes vote down vote up
/**
 * Rotate an image by an angle (counterclockwise)
 *
 * @param image Transform matrix
 * @param angle Angle to rotate by (counterclockwise) from -360 to 360
 */
public static void rotate(Mat image, double angle) {
    //Calculate size of new matrix
    double radians = Math.toRadians(angle);
    double sin = Math.abs(Math.sin(radians));
    double cos = Math.abs(Math.cos(radians));

    int newWidth = (int) (image.width() * cos + image.height() * sin);
    int newHeight = (int) (image.width() * sin + image.height() * cos);

    // rotating image
    Point center = new Point(newWidth / 2, newHeight / 2);
    Mat rotMatrix = Imgproc.getRotationMatrix2D(center, angle, 1.0); //1.0 means 100 % scale

    Size size = new Size(newWidth, newHeight);
    Imgproc.warpAffine(image, image, rotMatrix, image.size());
}
 
Example 8
Project: SudoCAM-Ku   File: Thresholding.java   View source code 7 votes vote down vote up
public static Mat normalProcess(Mat img){
	Mat threshImg = Thresholding.InvertImageColor(img);
	Thresholding.gridDetection(threshImg);
	Mat mat = Mat.zeros(4,2,CvType.CV_32F);
	mat.put(0,0,0); mat.put(0,1,512);
	mat.put(1,0,0); mat.put(1,1,0);
	mat.put(2,0,512); mat.put(2,1,0);
	mat.put(3,0,512); mat.put(3,1,512);
	
	mat = Imgproc.getPerspectiveTransform(Thresholding.grid,mat);
	
	Mat M = new Mat();
	
	Imgproc.warpPerspective(threshImg,M,mat, new Size(512,512));
	return Thresholding.InvertImageColor(M);
}
 
Example 9
Project: AndroidCameraSudokuSolver   File: PortraitCameraView.java   View source code 6 votes vote down vote up
public Mat rgba() {
    Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2BGR_NV12, 4);
    if (mRotated != null) mRotated.release();
    mRotated = mRgba.t();
    Core.flip(mRotated, mRotated, 1);
    return mRotated;
}
 
Example 10
Project: FRC6706_JAVA2017   File: VisionSubsystem2.java   View source code 6 votes vote down vote up
public void initDefaultCommand() {
  	visionThread = new Thread(() -> {
	// Get the UsbCamera from CameraServer
	UsbCamera camera = CameraServer.getInstance().startAutomaticCapture();
	// Set the resolution
	camera.setResolution(640, 480);

	// Get a CvSink. This will capture Mats from the camera
	CvSink cvSink = CameraServer.getInstance().getVideo();
	// Setup a CvSource. This will send images back to the Dashboard
	CvSource outputStream = CameraServer.getInstance().putVideo("Rectangle", 640, 480);

	// Mats are very memory expensive. Lets reuse this Mat.
	Mat mat = new Mat();

	// This cannot be 'true'. The program will never exit if it is. This
	// lets the robot stop this thread when restarting robot code or
	// deploying.
	while (!Thread.interrupted()) {
		// Tell the CvSink to grab a frame from the camera and put it
		// in the source mat.  If there is an error notify the output.
		if (cvSink.grabFrame(mat) == 0) {
			// Send the output the error.
			outputStream.notifyError(cvSink.getError());
			// skip the rest of the current iteration
			continue;
		}
		// Put a rectangle on the image
		Imgproc.rectangle(mat, new Point(100, 100), new Point(400, 400),
				new Scalar(255, 255, 255), 5);
		// Give the output stream a new image to display
		outputStream.putFrame(mat);
	}
});
visionThread.setDaemon(true);
visionThread.start();
  }
 
Example 11
Project: OptimizedImageEnhance   File: RemoveBackScatter.java   View source code 6 votes vote down vote up
private static Mat calWeight(Mat img) {
	Mat L = new Mat();
	img.convertTo(img, CvType.CV_8UC1);
	Imgproc.cvtColor(img, L, Imgproc.COLOR_BGR2GRAY);
	L.convertTo(L, CvType.CV_32F);
	Core.divide(L, new Scalar(255.0), L);
	// calculate Luminance weight
	Mat WC = FeatureWeight.LuminanceWeight(img, L);
	WC.convertTo(WC, L.type());
	// calculate the Saliency weight
	Mat WS = FeatureWeight.Saliency(img);
	WS.convertTo(WS, L.type());
	// calculate the Exposedness weight
	Mat WE = FeatureWeight.Exposedness(L);
	WE.convertTo(WE, L.type());
	// sum
	Mat weight = WC.clone();
	Core.add(weight, WS, weight);
	Core.add(weight, WE, weight);
	return weight;
}
 
Example 12
Project: OptimizedImageEnhance   File: FeatureWeight.java   View source code 6 votes vote down vote up
public static Mat Saliency(Mat img) {
	// blur image with a 3x3 or 5x5 Gaussian filter
	Mat gfbgr = new Mat();
	Imgproc.GaussianBlur(img, gfbgr, new Size(3, 3), 3);
	// Perform sRGB to CIE Lab color space conversion
	Mat LabIm = new Mat();
	Imgproc.cvtColor(gfbgr, LabIm, Imgproc.COLOR_BGR2Lab);
	// Compute Lab average values (note that in the paper this average is found from the
	// un-blurred original image, but the results are quite similar)
	List<Mat> lab = new ArrayList<>();
	Core.split(LabIm, lab);
	Mat l = lab.get(0);
	l.convertTo(l, CvType.CV_32F);
	Mat a = lab.get(1);
	a.convertTo(a, CvType.CV_32F);
	Mat b = lab.get(2);
	b.convertTo(b, CvType.CV_32F);
	double lm = Core.mean(l).val[0];
	double am = Core.mean(a).val[0];
	double bm = Core.mean(b).val[0];
	// Finally compute the saliency map
	Mat sm = Mat.zeros(l.rows(), l.cols(), l.type());
	Core.subtract(l, new Scalar(lm), l);
	Core.subtract(a, new Scalar(am), a);
	Core.subtract(b, new Scalar(bm), b);
	Core.add(sm, l.mul(l), sm);
	Core.add(sm, a.mul(a), sm);
	Core.add(sm, b.mul(b), sm);
	return sm;
}
 
Example 13
Project: DNNLibrary   File: MainActivity.java   View source code 6 votes vote down vote up
private float[] getInputDataLeNet(Bitmap bitmap) {
    final int INPUT_LENGTH = 28;

    Mat imageMat = new Mat();
    Mat inputMat = new Mat();

    Utils.bitmapToMat(bitmap, imageMat);

    // convert the image to 28 * 28, grayscale, 0~1, and smaller means whiter
    Imgproc.cvtColor(imageMat, imageMat, Imgproc.COLOR_RGBA2GRAY);
    imageMat = centerCropAndScale(imageMat, INPUT_LENGTH);
    imageMat.convertTo(imageMat, CvType.CV_32F, 1. / 255);
    Core.subtract(Mat.ones(imageMat.size(), CvType.CV_32F), imageMat, inputMat);

    float[] inputData = new float[inputMat.width() * inputMat.height()];

    inputMat.get(0, 0, inputData);

    return inputData;
}
 
Example 14
Project: Face-detection-and-recognition-desktop-application   File: FaceDetectCropTest.java   View source code 6 votes vote down vote up
private Mat conv_Mat(BufferedImage img) {
    byte[] data = ((DataBufferByte) img.getRaster().getDataBuffer()).getData();
    Mat mat = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC3);
    mat.put(0, 0, data);
    Mat mat1 = new Mat(img.getHeight(), img.getWidth(), CvType.CV_8UC3);
    Imgproc.cvtColor(mat, mat1, Imgproc.COLOR_RGB2HSV);

    return mat1;
}
 
Example 15
Project: Java-Data-Science-Made-Easy   File: DetectFaceDemo.java   View source code 6 votes vote down vote up
public void run() {
  System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
  String base = "C:/Books in Progress/Java for Data Science/Chapter 10/OpenCVExamples/src/resources";
  CascadeClassifier faceDetector = 
          new CascadeClassifier(base + "/lbpcascade_frontalface.xml");
  
  Mat image = Imgcodecs.imread(base + "/images.jpg");

  MatOfRect faceVectors = new MatOfRect();
  faceDetector.detectMultiScale(image, faceVectors);

  out.println(faceVectors.toArray().length + " faces found");

  for (Rect rect : faceVectors.toArray()) {
      Imgproc.rectangle(image, new Point(rect.x, rect.y), 
              new Point(rect.x + rect.width, rect.y + rect.height), 
              new Scalar(0, 255, 0));
  }
  Imgcodecs.imwrite("faceDetection.png", image);
}
 
Example 16
Project: FlashLib   File: CvProcessing.java   View source code 6 votes vote down vote up
/**
 * Filters contours by shape. Iterates through the list of contours and approximates their shape. 
 * Compares the vertices of the shape to the desired vertices and removes the contour if they do not match.
 * 
 * @param contours list of contours
 * @param vertices vertices of the desired shape
 * @param accuracy the accuracy of approximation
 * @see Imgproc#approxPolyDP(MatOfPoint2f, MatOfPoint2f, double, boolean)
 */
public static void detectContoursByShape(List<MatOfPoint> contours, int vertices, double accuracy){
	MatOfPoint2f matOfPoint2f = new MatOfPoint2f();
	MatOfPoint2f approxCurve = new MatOfPoint2f();
	
	for(int idx = contours.size() - 1; idx >= 0; idx--){
		MatOfPoint contour = contours.get(idx);
	   
	    matOfPoint2f.fromList(contour.toList());
	    Imgproc.approxPolyDP(matOfPoint2f, approxCurve, Imgproc.arcLength(matOfPoint2f, true) * accuracy, true);
	    long total = approxCurve.total();
	    
	    if (total != vertices)
	    	contours.remove(idx);
	}
}
 
Example 17
Project: Ftc2018RelicRecovery   File: TrcOpenCvDetector.java   View source code 6 votes vote down vote up
/**
 * This method is called to overlay rectangles on an image to the video output.
 *
 * @param image specifies the frame to be rendered to the video output.
 * @param detectedObjectRects specifies the detected object rectangles.
 * @param color specifies the color of the rectangle outline.
 * @param thickness specifies the thickness of the rectangle outline.
 */
public void drawRectangles(Mat image, Rect[] detectedObjectRects, Scalar color, int thickness)
{
    //
    // Overlay a rectangle on each detected object.
    //
    synchronized (image)
    {
        if (detectedObjectRects != null)
        {
            for (Rect r: detectedObjectRects)
            {
                //
                // Draw a rectangle around the detected object.
                //
                Imgproc.rectangle(
                    image, new Point(r.x, r.y), new Point(r.x + r.width, r.y + r.height), color, thickness);
            }
        }

        videoSource.putFrame(image);
    }
}
 
Example 18
Project: FlashLib   File: CvProcessing.java   View source code 6 votes vote down vote up
/**
 * Converts a mat to gray.
 * @param mat a mat to convert
 * @param gray a mat to fill with gray data
 * @return the gray mat 
 * @see Imgproc#cvtColor(Mat, Mat, int)
 */
public static Mat rgbToGray(Mat mat, Mat gray){
	if(mat.type() == CvType.CV_8UC1)
		mat.copyTo(gray);
	else if(mat.type() == CvType.CV_8UC3)
		Imgproc.cvtColor(mat, gray, Imgproc.COLOR_RGB2GRAY);
	return gray;
}
 
Example 19
Project: Paper-Melody   File: ImageUtil.java   View source code 6 votes vote down vote up
public static Mat yuvToBgr(Image image, Mat yuvMat) {
    Mat bgrMat = new Mat(image.getHeight(), image.getWidth(), CvType.CV_8UC4);
    //Log.d("TESTCALL", bgrMat.rows() + " " + bgrMat.cols());
    Imgproc.cvtColor(yuvMat, bgrMat, Imgproc.COLOR_YUV2BGR_I420);
    //Log.d("TESTCALL", yuvMat.rows() + " " + yuvMat.cols());
    return bgrMat;
}
 
Example 20
Project: RobotIGS   File: Transform.java   View source code 6 votes vote down vote up
/**
 * Rotate an image by an angle (counterclockwise)
 *
 * @param image Transform matrix
 * @param angle Angle to rotate by (counterclockwise) from -360 to 360
 */
public static void rotate(Mat image, double angle) {
    //Calculate size of new matrix
    double radians = Math.toRadians(angle);
    double sin = Math.abs(Math.sin(radians));
    double cos = Math.abs(Math.cos(radians));

    int newWidth = (int) (image.width() * cos + image.height() * sin);
    int newHeight = (int) (image.width() * sin + image.height() * cos);

    // rotating image
    Point center = new Point(newWidth / 2, newHeight / 2);
    Mat rotMatrix = Imgproc.getRotationMatrix2D(center, angle, 1.0); //1.0 means 100 % scale

    Size size = new Size(newWidth, newHeight);
    Imgproc.warpAffine(image, image, rotMatrix, image.size());
}
 
Example 21
Project: frc-2017   File: Vision.java   View source code 6 votes vote down vote up
/**
 * Sets the values of pixels in a binary image to their distance to the nearest black pixel.
 * 
 * @param input
 *            The image on which to perform the Distance Transform.
 * @param type
 *            The Transform.
 * @param maskSize
 *            the size of the mask.
 * @param output
 *            The image in which to store the output.
 */
private void findContours(Mat input, boolean externalOnly, List<MatOfPoint> contours) {
	Mat hierarchy = new Mat();
	contours.clear();
	int mode;
	if (externalOnly) {
		mode = Imgproc.RETR_EXTERNAL;
	} else {
		mode = Imgproc.RETR_LIST;
	}
	int method = Imgproc.CHAIN_APPROX_SIMPLE;
	Imgproc.findContours(input, contours, hierarchy, mode, method);
}
 
Example 22
Project: OptimizedImageEnhance   File: ImShow.java   View source code 6 votes vote down vote up
public void showImage(Mat img) {
	if (SizeCustom) {
		Imgproc.resize(img, img, new Size(Height, Width));
	}
	// Highgui.imencode(".jpg", img, matOfByte);
	// byte[] byteArray = matOfByte.toArray();
	BufferedImage bufImage = null;
	try {
		// InputStream in = new ByteArrayInputStream(byteArray);
		// bufImage = ImageIO.read(in);
		bufImage = toBufferedImage(img);
		image.setImage(bufImage);
		Window.pack();
		label.updateUI();
		Window.setVisible(true);
	} catch (Exception e) {
		e.printStackTrace();
	}
}
 
Example 23
Project: Java-for-Data-Science   File: DetectFaceDemo.java   View source code 6 votes vote down vote up
public void run() {
  System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
  String base = "C:/Books in Progress/Java for Data Science/Chapter 10/OpenCVExamples/src/resources";
  CascadeClassifier faceDetector = 
          new CascadeClassifier(base + "/lbpcascade_frontalface.xml");
  
  Mat image = Imgcodecs.imread(base + "/images.jpg");

  MatOfRect faceVectors = new MatOfRect();
  faceDetector.detectMultiScale(image, faceVectors);

  out.println(faceVectors.toArray().length + " faces found");

  for (Rect rect : faceVectors.toArray()) {
      Imgproc.rectangle(image, new Point(rect.x, rect.y), 
              new Point(rect.x + rect.width, rect.y + rect.height), 
              new Scalar(0, 255, 0));
  }
  Imgcodecs.imwrite("faceDetection.png", image);
}
 
Example 24
Project: Java-for-Data-Science   File: OpenCVNonMavenExamples.java   View source code 6 votes vote down vote up
public static void denoise() {
        String imgInPath = "captchaExample.jpg";
        imgInPath = "MyCaptcha.PNG";
        imgInPath = "blurredtext.jpg";
        String imgOutPath = "captchaNoiseRemovedExample.png";
        imgOutPath = "MyNoiseRemovedCaptcha.PNG";

        Mat image = Imgcodecs.imread(imgInPath);
        Mat out = new Mat();
        Mat tmp = new Mat();
        Mat kernel = new Mat(new Size(3, 3), CvType.CV_8UC1, new Scalar(255));
//        Mat kernel = new Mat(image.size(), CvType.CV_8UC1, new Scalar(255));
        Imgproc.morphologyEx(image, tmp, Imgproc.MORPH_OPEN, kernel);
        Imgproc.morphologyEx(tmp, out, Imgproc.MORPH_CLOSE, kernel);
        Imgcodecs.imwrite(imgOutPath, out);
    }
 
Example 25
Project: FTC2016   File: Color.java   View source code 6 votes vote down vote up
/**
 * Convert a matrix in one color space to another
 *
 * @param in       Input matrix
 * @param spaceIn  Input colorspace
 * @param spaceOut Output colorspace
 * @return Matrix in output colorspace
 */
public static Mat convertColorMat(Mat in, ColorSpace spaceIn, ColorSpace spaceOut) {
    if (spaceIn == spaceOut)
        return in;
    if (!spaceIn.canConvertTo(spaceOut))
        throw new IllegalArgumentException("Cannot convert color to the desired color space.");

    Mat output = in.clone();

    try {
        for (int i = 0; i < spaceIn.getConversionsTo(spaceOut).length; i += 3) {
            int conversion = spaceIn.getConversionsTo(spaceOut)[i];
            int inputDim = spaceIn.getConversionsTo(spaceOut)[i + 1];
            int outputDim = spaceIn.getConversionsTo(spaceOut)[i + 2];

            Imgproc.cvtColor(output, output, conversion, outputDim);
        }
    } catch (Exception ignored) {
        throw new IllegalArgumentException("Cannot convert color to the desired color space.");
    }

    return output;
}
 
Example 26
Project: ImageEnhanceViaFusion   File: Pyramid.java   View source code 6 votes vote down vote up
public static Mat[] LaplacianPyramid(Mat img, int level) {
	Mat[] lapPyr = new Mat[level];
	//Mat mask = filterMask(img);
	lapPyr[0] = img.clone();
	Mat tmpImg = img.clone();
	for (int i = 1; i < level; i++) {
		// resize image
		Imgproc.resize(tmpImg, tmpImg, new Size(), 0.5, 0.5, Imgproc.INTER_LINEAR);
		lapPyr[i] = tmpImg.clone();
	}
	// calculate the DoG
	for (int i = 0; i < level - 1; i++) {
		Mat tmpPyr = new Mat();
		Imgproc.resize(lapPyr[i + 1], tmpPyr, lapPyr[i].size(), 0, 0, Imgproc.INTER_LINEAR);
		Core.subtract(lapPyr[i], tmpPyr, lapPyr[i]);
	}
	return lapPyr;
}
 
Example 27
Project: SpotSpotter   File: Draw.java   View source code 6 votes vote down vote up
public static void pointMapList(Mat input, List<Map<Point, Double>> list, int radiusRatio, int thickness,
			int mosaicLength) {
		for (int i = 0; i < list.size(); i++) {
			Map dataMap = new HashMap<>();
			dataMap = list.get(i);

			final Set<Map.Entry<Point, Double>> entryseSet = dataMap.entrySet();
			for (final Map.Entry<Point, Double> entry : entryseSet) {
				final Point p = entry.getKey();
				double v = entry.getValue();
				if (Double.isInfinite(entry.getValue())) {
					v = 1;
				}
				final int radius = (new Double(radiusRatio * v)).intValue();
				Imgproc.circle(input, new Point(p.x * mosaicLength, p.y * mosaicLength), radius,
						new Scalar(255, 0, 255), thickness);
			}
		}
//		System.out.println("Done Drawing");
	}
 
Example 28
Project: JuniperBotJ   File: OpenCVService.java   View source code 6 votes vote down vote up
public BufferedImage blur(BufferedImage source, int radius) throws IOException {
    if (!initialized) {
        throw new IOException("OpenCV unavailable");
    }
    Mat sourceMat = getMat(source);
    Mat destination = new Mat(sourceMat.rows(), sourceMat.cols(), sourceMat.type());
    Imgproc.GaussianBlur(sourceMat, destination, new Size(radius, radius), 0);
    return getImage(destination);
}
 
Example 29
Project: Android-opencv-native-samples   File: FdActivity.java   View source code 5 votes vote down vote up
@Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();
    rotateFlipImage(ImageControllers.getInstance().getFlipType(),
                        ImageControllers.getInstance().getMakeTranspose(),
                                                mRgba.getNativeObjAddr());
    if (mAbsoluteFaceSize == 0) {
        int height = mGray.rows();
        if (Math.round(height * mRelativeFaceSize) > 0) {
            mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
        }
        mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
    }

    MatOfRect faces = new MatOfRect();

    if (mDetectorType == JAVA_DETECTOR) {
        if (mJavaDetector != null)
            mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                    new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
    }
    else if (mDetectorType == NATIVE_DETECTOR) {
        if (mNativeDetector != null)
            mNativeDetector.detect(mGray, faces);
        Log.v(LOG_TAG, "native detect");
    }
    else {
        Log.e(TAG, "Detection method is not selected!");
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
        Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

    return mRgba;
}
 
Example 30
Project: FRC-6416-frc2017   File: GripPipeline.java   View source code 5 votes vote down vote up
/**
 * Filters out contours that do not meet certain criteria.
 * @param inputContours is the input list of contours
 * @param output is the the output list of contours
 * @param minArea is the minimum area of a contour that will be kept
 * @param minPerimeter is the minimum perimeter of a contour that will be kept
 * @param minWidth minimum width of a contour
 * @param maxWidth maximum width
 * @param minHeight minimum height
 * @param maxHeight maximimum height
 * @param Solidity the minimum and maximum solidity of a contour
 * @param minVertexCount minimum vertex Count of the contours
 * @param maxVertexCount maximum vertex Count
 * @param minRatio minimum ratio of width to height
 * @param maxRatio maximum ratio of width to height
 */
private void filterContours(List<MatOfPoint> inputContours, double minArea,
	double minPerimeter, double minWidth, double maxWidth, double minHeight, double
	maxHeight, double[] solidity, double maxVertexCount, double minVertexCount, double
	minRatio, double maxRatio, List<MatOfPoint> output) {
	final MatOfInt hull = new MatOfInt();
	output.clear();
	//operation
	for (int i = 0; i < inputContours.size(); i++) {
		final MatOfPoint contour = inputContours.get(i);
		final Rect bb = Imgproc.boundingRect(contour);
		if (bb.width < minWidth || bb.width > maxWidth) continue;
		if (bb.height < minHeight || bb.height > maxHeight) continue;
		final double area = Imgproc.contourArea(contour);
		if (area < minArea) continue;
		if (Imgproc.arcLength(new MatOfPoint2f(contour.toArray()), true) < minPerimeter) continue;
		Imgproc.convexHull(contour, hull);
		MatOfPoint mopHull = new MatOfPoint();
		mopHull.create((int) hull.size().height, 1, CvType.CV_32SC2);
		for (int j = 0; j < hull.size().height; j++) {
			int index = (int)hull.get(j, 0)[0];
			double[] point = new double[] { contour.get(index, 0)[0], contour.get(index, 0)[1]};
			mopHull.put(j, 0, point);
		}
		final double solid = 100 * area / Imgproc.contourArea(mopHull);
		if (solid < solidity[0] || solid > solidity[1]) continue;
		if (contour.rows() < minVertexCount || contour.rows() > maxVertexCount)	continue;
		final double ratio = bb.width / (double)bb.height;
		if (ratio < minRatio || ratio > maxRatio) continue;
		output.add(contour);
	}
}
 
Example 31
Project: DemonVision   File: TargetSource.java   View source code 5 votes vote down vote up
/**
 * Utilizes the processed contours and wraps them
 * into bounding rectangles
 * @return the unprocessed targets as bounding rectangles
 * @see TargetSource#getContoursReport()
 * @see Rect
 */
default Rect[] getRawTargets() {
	List<MatOfPoint> contours = this.getContoursReport();
	Rect[] rawTargets = new Rect[contours.size()];
	
	for(int i = 0; i < rawTargets.length; i++) {
		rawTargets[i] = Imgproc.boundingRect(contours.get(i));
	}
	
	return rawTargets;
}
 
Example 32
Project: classchecks   File: ImgprocessUtils.java   View source code 5 votes vote down vote up
/**
 * 其主要思路为:
	1、求取源图I的平均灰度,并记录rows和cols;
	2、按照一定大小,分为N*M个方块,求出每块的平均值,得到子块的亮度矩阵D;
	3、用矩阵D的每个元素减去源图的平均灰度,得到子块的亮度差值矩阵E;
	4、用双立方差值法,将矩阵E差值成与源图一样大小的亮度分布矩阵R;
	5、得到矫正后的图像result=I-R;
* @Title: unevenLightCompensate 
* @Description: 光线补偿 
* @param image
* @param blockSize
* void 
* @throws
 */
public static void unevenLightCompensate(Mat image, int blockSize) {
	if(image.channels() == 3) {
		Imgproc.cvtColor(image, image, 7);
	}
	double average = Core.mean(image).val[0];
	Scalar scalar = new Scalar(average);
	int rowsNew = (int) Math.ceil((double)image.rows() / (double)blockSize);
	int colsNew = (int) Math.ceil((double)image.cols() / (double)blockSize);
	Mat blockImage = new Mat();
	blockImage = Mat.zeros(rowsNew, colsNew, CvType.CV_32FC1);
	for(int i = 0; i < rowsNew; i ++) {
		for(int j = 0; j < colsNew; j ++) {
			int rowmin = i * blockSize;
			int rowmax = (i + 1) * blockSize;
			if(rowmax > image.rows()) rowmax = image.rows();
			int colmin = j * blockSize;
			int colmax = (j +1) * blockSize;
			if(colmax > image.cols()) colmax = image.cols();
			Range rangeRow = new Range(rowmin, rowmax);
			Range rangeCol = new Range(colmin, colmax);
			Mat imageROI = new Mat(image, rangeRow, rangeCol);
			double temaver = Core.mean(imageROI).val[0];
			blockImage.put(i, j, temaver);
		}
	}
	
	Core.subtract(blockImage, scalar, blockImage);
	Mat blockImage2 = new Mat();
	int INTER_CUBIC = 2;
	Imgproc.resize(blockImage, blockImage2, image.size(), 0, 0, INTER_CUBIC);
	Mat image2 = new Mat();
	image.convertTo(image2, CvType.CV_32FC1);
	Mat dst = new Mat();
	Core.subtract(image2, blockImage2, dst);
	dst.convertTo(image, CvType.CV_8UC1);
}
 
Example 33
Project: Robot-Vision-API   File: ConvexHullContourFilter.java   View source code 5 votes vote down vote up
@Override
public List<MatOfPoint> filterContours(List<MatOfPoint> contours) {
    List<MatOfPoint> output = new ArrayList<>();
    // Generated and tested by GRIP, variables slightly renamed
    final MatOfInt hull = new MatOfInt();
    for (int i = 0; i < contours.size(); i++) {
        final MatOfPoint contour = contours.get(i);
        final Rect bb = Imgproc.boundingRect(contour);
        if (bb.width < width.start || bb.width > width.end)
            continue;
        if (bb.height < height.start || bb.height > height.end)
            continue;
        final double area = Imgproc.contourArea(contour);
        if (area < minArea)
            continue;
        if (Imgproc.arcLength(new MatOfPoint2f(contour.toArray()), true) < minPerimeter)
            continue;
        Imgproc.convexHull(contour, hull);
        MatOfPoint mopHull = new MatOfPoint();
        mopHull.create((int) hull.size().height, 1, CvType.CV_32SC2);
        for (int j = 0; j < hull.size().height; j++) {
            int index = (int) hull.get(j, 0)[0];
            double[] point = new double[]{contour.get(index, 0)[0], contour.get(index, 0)[1]};
            mopHull.put(j, 0, point);
        }
        final double solid = 100 * area / Imgproc.contourArea(mopHull);
        if (solid < solidity.start || solid > solidity.end)
            continue;
        if (contour.rows() < vertexCount.start || contour.rows() > vertexCount.end)
            continue;
        final double ratio = bb.width / (double) bb.height;
        if (ratio < widthToHeightRatio.start || ratio > widthToHeightRatio.end)
            continue;
        output.add(contour);
    }
    return output;
}
 
Example 34
Project: SpotSpotter   File: Resize.java   View source code 5 votes vote down vote up
public static Mat tillFit(Mat input, double targetX, double targetY) {
	final Mat inputClone = input.clone();
	double newWidth = input.width();
	double newHeight = input.height();
	while (newWidth > targetX || newHeight > targetY) {
		Imgproc.resize(input, inputClone, new Size(newWidth * 0.9, newHeight * 0.9));
		newWidth *= 0.9;
		newHeight *= 0.9;
	}
	return inputClone;
}
 
Example 35
Project: Android-Code-Demos   File: MainActivity.java   View source code 5 votes vote down vote up
private void convertGray() {
    Mat src = new Mat();
    Mat temp = new Mat();
    Mat dst = new Mat();
    Utils.bitmapToMat(selectbp, src);
    Imgproc.cvtColor(src, temp, Imgproc.COLOR_BGRA2BGR);
    Log.i("CV", "image type:" + (temp.type() == CvType.CV_8UC3));
    Imgproc.cvtColor(temp, dst, Imgproc.COLOR_BGR2GRAY);
    Utils.matToBitmap(dst, selectbp);
    myImageView.setImageBitmap(selectbp);
}
 
Example 36
Project: ImageEnhanceViaFusion   File: Pyramid.java   View source code 5 votes vote down vote up
public static Mat PyramidReconstruct(Mat[] pyramid) {
	int level = pyramid.length;
	for (int i = level - 1; i > 0; i--) {
		Mat tmpPyr = new Mat();
		Imgproc.resize(pyramid[i], tmpPyr, pyramid[i - 1].size(), 0, 0, Imgproc.INTER_LINEAR);
		Core.add(pyramid[i - 1], tmpPyr, pyramid[i - 1]);
	}
	return pyramid[0];
}
 
Example 37
Project: FlashLib   File: CvProcessing.java   View source code 5 votes vote down vote up
/**
 * removes all contours which not found as circles
 * 
 * @param threshold mat
 * @param contours of contours
 * 
 */
public static void FilterByCircle(Mat threshold,List<MatOfPoint> contours) {
    Mat circles = DetectCircle(threshold,10);
    //MatOfPoint mpoints = new MatOfPoint(threshold);
   // MatOfPoint2f threshold2f = new MatOfPoint2f(mpoints.toArray());
    
	//System.out.println("circles Found - " + circles.rows());
    for (int i = 0; i < contours.size(); i++){
		
		boolean foundMatch = false;
		for (int j = 0; j < circles.rows();j++){	

			double[] data = circles.get(j, 0);
			Point center = new Point(data[0],data[1]);
			
			double distance = Imgproc.pointPolygonTest(new MatOfPoint2f(contours.get(i)) , center, true);
			if(distance < 0)//inside
			{
				foundMatch = true; //found matching circle for contour 
				break;
			}
				
		}
		if(!foundMatch)
			contours.remove(i);
	}	
	
}
 
Example 38
Project: pdi   File: TransformacaoPerspectiva.java   View source code 5 votes vote down vote up
private static void doProjection(ImgWindow projWnd) {
    if (corners.size() == 4) {
        Mat cornersMat = Converters.vector_Point2f_to_Mat(corners);
        Mat targetMat = Converters.vector_Point2f_to_Mat(target);
        trans = Imgproc.getPerspectiveTransform(cornersMat, targetMat);
        invTrans = Imgproc.getPerspectiveTransform(targetMat, cornersMat);
        proj = new Mat();
        Imgproc.warpPerspective(img, proj, trans, new Size(img.cols(), img.rows()));
        if (projWnd.isClicked()) {
            perspPoints.add(new Point(projWnd.mouseX, projWnd.mouseY));
        }
    }
}
 
Example 39
Project: DogeCV   File: GlyphDetector.java   View source code 5 votes vote down vote up
private double GetMinArea(List<MatOfPoint> allConturs){
    double currentMax = Double.MAX_VALUE;

    for (MatOfPoint c: allConturs){
        double area= Imgproc.contourArea(c);
        if(area<currentMax){
            currentMax = area;
        }
    }

    return currentMax;
}
 
Example 40
Project: OptimizedImageEnhance   File: FeatureWeight.java   View source code 5 votes vote down vote up
public static Mat LaplacianContrast(Mat img) {
	Mat laplacian = new Mat();
	Imgproc.Laplacian(img, laplacian, img.depth());
	//Imgproc.Laplacian(img, laplacian, img.depth(), 3, 1, 0);
	Core.convertScaleAbs(laplacian, laplacian);
	return laplacian;
}