Java Code Examples for org.opencv.core.Core#sumElems()

The following examples show how to use org.opencv.core.Core#sumElems() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DetectionHelper.java    From ml-authentication with Apache License 2.0 6 votes vote down vote up
private static double getImageBrightness(Mat img){
    Mat temp = new Mat();
    List<Mat> color = new ArrayList<Mat>(3);
    Mat lum = new Mat();
    temp = img;

    Core.split(temp, color);

    if(color.size() > 0){
        Core.multiply(color.get(0), new Scalar(0.299), color.get(0));
        Core.multiply(color.get(1), new Scalar(0.587), color.get(1));
        Core.multiply(color.get(2), new Scalar(0.114), color.get(2));

        Core.add(color.get(0),color.get(1),lum);
        Core.add(lum, color.get(2), lum);

        Scalar sum = Core.sumElems(lum);

        return sum.val[0]/((1<<8 - 1)*img.rows() * img.cols()) * 2;
    } else {
        return 1;
    }
}
 
Example 2
Source File: DetectionHelper.java    From ml-authentication with Apache License 2.0 6 votes vote down vote up
private static double getImageBrightness(Mat img){
    Mat temp = new Mat();
    List<Mat> color = new ArrayList<Mat>(3);
    Mat lum = new Mat();
    temp = img;

    Core.split(temp, color);

    if(color.size() > 0){
        Core.multiply(color.get(0), new Scalar(0.299), color.get(0));
        Core.multiply(color.get(1), new Scalar(0.587), color.get(1));
        Core.multiply(color.get(2), new Scalar(0.114), color.get(2));

        Core.add(color.get(0),color.get(1),lum);
        Core.add(lum, color.get(2), lum);

        Scalar sum = Core.sumElems(lum);

        return sum.val[0]/((1<<8 - 1)*img.rows() * img.cols()) * 2;
    } else {
        return 1;
    }
}
 
Example 3
Source File: BlkTransEstimate.java    From OptimizedImageEnhance with MIT License 5 votes vote down vote up
public static double blkEstimateEachChannel(Mat blkIm, double airlight, double lambda, double fTrans) {
	double Trans = 0.0;
	double nTrans = Math.floor(1.0 / fTrans * 128);
	double fMinCost = Double.MAX_VALUE;
	int numberOfPixels = blkIm.rows() * blkIm.cols() * blkIm.channels();
	int nCounter = 0;
	while (nCounter < (int) (1 - fTrans) * 10) {
		// initial dehazing process to calculate the loss information
		Mat channel = blkIm.clone();
		channel = preDehaze(channel, airlight, nTrans);
		// find the pixels with over-255 value and below-0 value, and
		// calculate the sum of information loss
		double nSumOfLoss = 0.0;
		for (int i = 0; i < channel.rows(); i++) {
			for (int j = 0; j < channel.cols(); j++) {
				if (channel.get(i, j)[0] > 255.0) nSumOfLoss += (channel.get(i, j)[0] - 255.0) * (channel.get(i, j)[0] - 255.0);
				else if (channel.get(i, j)[0] < 0.0) nSumOfLoss += channel.get(i, j)[0] * channel.get(i, j)[0];
			}
		}
		// calculate the value of sum of square out
		double nSumOfSquareOuts = Core.sumElems(channel.mul(channel)).val[0];
		// calculate the value of sum of out
		double nSumOfOuts = Core.sumElems(channel).val[0];
		// calculate the mean value of the block image
		double fMean = nSumOfOuts / numberOfPixels;
		// calculate the cost function
		double fCost = lambda * nSumOfLoss / numberOfPixels - (nSumOfSquareOuts / numberOfPixels - fMean * fMean);
		// find the minimum cost and the related transmission
		if (nCounter == 0 || fMinCost > fCost) {
			fMinCost = fCost;
			Trans = fTrans;
		}
		fTrans = fTrans + 0.1;
		nTrans = 1.0 / fTrans * 128;
		nCounter = nCounter + 1;
	}
	return Trans;
}
 
Example 4
Source File: BlkTransEstimate.java    From OptimizedImageEnhance with MIT License 4 votes vote down vote up
public static double blkEstimate(Mat blkIm, double[] airlight, double lambda, double fTrans) {
	double Trans = 0.0;
	double nTrans = Math.floor(1.0 / fTrans * 128);
	double fMinCost = Double.MAX_VALUE;
	int numberOfPixels = blkIm.rows() * blkIm.cols() * blkIm.channels();
	double nCounter = 0.0;
	List<Mat> bgr = new ArrayList<>();
	Core.split(blkIm, bgr);
	while (nCounter < (1.0 - fTrans) * 10) {
		// initial dehazing process to calculate the loss information
		Mat bChannel = bgr.get(0).clone();
		bChannel = preDehaze(bChannel, airlight[0], nTrans);
		Mat gChannel = bgr.get(1).clone();
		gChannel = preDehaze(gChannel, airlight[1], nTrans);
		Mat rChannel = bgr.get(2).clone();
		rChannel = preDehaze(rChannel, airlight[2], nTrans);
		// find the pixels with over-255 value and below-0 value, and
		// calculate the sum of information loss
		double nSumOfLoss = 0.0;
		for (int i = 0; i < bChannel.rows(); i++) {
			for (int j = 0; j < bChannel.cols(); j++) {
				if (bChannel.get(i, j)[0] > 255.0) nSumOfLoss += (bChannel.get(i, j)[0] - 255.0) * (bChannel.get(i, j)[0] - 255.0);
				else if (bChannel.get(i, j)[0] < 0.0) nSumOfLoss += bChannel.get(i, j)[0] * bChannel.get(i, j)[0];
				if (gChannel.get(i, j)[0] > 255.0) nSumOfLoss += (gChannel.get(i, j)[0] - 255.0) * (gChannel.get(i, j)[0] - 255.0);
				else if (gChannel.get(i, j)[0] < 0.0) nSumOfLoss += gChannel.get(i, j)[0] * gChannel.get(i, j)[0];
				if (rChannel.get(i, j)[0] > 255.0) nSumOfLoss += (rChannel.get(i, j)[0] - 255.0) * (rChannel.get(i, j)[0] - 255.0);
				else if (rChannel.get(i, j)[0] < 0.0) nSumOfLoss += rChannel.get(i, j)[0] * rChannel.get(i, j)[0];
			}
		}
		// calculate the value of sum of square out
		double nSumOfSquareOuts = Core.sumElems(bChannel.mul(bChannel)).val[0] + Core.sumElems(gChannel.mul(gChannel)).val[0] + Core.sumElems(rChannel.mul(rChannel)).val[0];
		// calculate the value of sum of out
		double nSumOfOuts = Core.sumElems(bChannel).val[0] + Core.sumElems(gChannel).val[0] + Core.sumElems(rChannel).val[0];
		// calculate the mean value of the block image
		double fMean = nSumOfOuts / numberOfPixels;
		// calculate the cost function
		double fCost = lambda * nSumOfLoss / numberOfPixels - (nSumOfSquareOuts / numberOfPixels - fMean * fMean);
		// find the minimum cost and the related transmission
		if (nCounter == 0 || fMinCost > fCost) {
			fMinCost = fCost;
			Trans = fTrans;
		}
		fTrans = fTrans + 0.1;
		nTrans = 1.0 / fTrans * 128.0;
		nCounter = nCounter + 1;
	}
	return Trans;
}
 
Example 5
Source File: ColorBlobDetectionActivity.java    From OpenCV-AndroidSamples with MIT License 2 votes vote down vote up
public boolean onTouch(View v, MotionEvent event) {
    int cols = mRgba.cols();
    int rows = mRgba.rows();

    int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
    int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;

    int x = (int)event.getX() - xOffset;
    int y = (int)event.getY() - yOffset;

    Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");

    if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;

    Rect touchedRect = new Rect();

    touchedRect.x = (x>4) ? x-4 : 0;
    touchedRect.y = (y>4) ? y-4 : 0;

    touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
    touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;

    Mat touchedRegionRgba = mRgba.submat(touchedRect);

    Mat touchedRegionHsv = new Mat();
    Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);

    // Calculate average color of touched region
    mBlobColorHsv = Core.sumElems(touchedRegionHsv);
    int pointCount = touchedRect.width*touchedRect.height;
    for (int i = 0; i < mBlobColorHsv.val.length; i++)
        mBlobColorHsv.val[i] /= pointCount;

    mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);

    Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
            ", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");

    mDetector.setHsvColor(mBlobColorHsv);

    Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);

    mIsColorSelected = true;

    touchedRegionRgba.release();
    touchedRegionHsv.release();

    return false; // don't need subsequent touch events
}
 
Example 6
Source File: MainActivity.java    From hand_finger_recognition_android with MIT License 2 votes vote down vote up
public boolean onTouch(View v, MotionEvent event) {
    int cols = mRgba.cols();
    int rows = mRgba.rows();

    int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
    int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;

    int x = (int)event.getX() - xOffset;
    int y = (int)event.getY() - yOffset;

    Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");

    if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;

    Rect touchedRect = new Rect();

    touchedRect.x = (x>5) ? x-5 : 0;
    touchedRect.y = (y>5) ? y-5 : 0;

    touchedRect.width = (x+5 < cols) ? x + 5 - touchedRect.x : cols - touchedRect.x;
    touchedRect.height = (y+5 < rows) ? y + 5 - touchedRect.y : rows - touchedRect.y;

    Mat touchedRegionRgba = mRgba.submat(touchedRect);

    Mat touchedRegionHsv = new Mat();
    Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);

    // Calculate average color of touched region
    mBlobColorHsv = Core.sumElems(touchedRegionHsv);
    int pointCount = touchedRect.width*touchedRect.height;
    for (int i = 0; i < mBlobColorHsv.val.length; i++)
        mBlobColorHsv.val[i] /= pointCount;

    mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);

    Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
            ", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");

    mDetector.setHsvColor(mBlobColorHsv);

    Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);

    mIsColorSelected = true;

    touchedRegionRgba.release();
    touchedRegionHsv.release();

    return false; // don't need subsequent touch events
}