org.opencv.highgui.Highgui Java Examples

The following examples show how to use org.opencv.highgui.Highgui. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Utils.java    From effective_android_sample with Apache License 2.0 6 votes vote down vote up
public static Mat loadResource(Context context, int resourceId, int flags) throws IOException
{
    InputStream is = context.getResources().openRawResource(resourceId);
    ByteArrayOutputStream os = new ByteArrayOutputStream(is.available());

    byte[] buffer = new byte[4096];
    int bytesRead;
    while ((bytesRead = is.read(buffer)) != -1) {
        os.write(buffer, 0, bytesRead);
    }
    is.close();

    Mat encoded = new Mat(1, os.size(), CvType.CV_8U);
    encoded.put(0, 0, os.toByteArray());
    os.close();

    Mat decoded = Highgui.imdecode(encoded, flags);
    encoded.release();

    return decoded;
}
 
Example #2
Source File: Utils.java    From marvel with MIT License 6 votes vote down vote up
public static Mat loadResource(Context context, int resourceId, int flags) throws IOException
{
    InputStream is = context.getResources().openRawResource(resourceId);
    ByteArrayOutputStream os = new ByteArrayOutputStream(is.available());

    byte[] buffer = new byte[4096];
    int bytesRead;
    while ((bytesRead = is.read(buffer)) != -1) {
        os.write(buffer, 0, bytesRead);
    }
    is.close();

    Mat encoded = new Mat(1, os.size(), CvType.CV_8U);
    encoded.put(0, 0, os.toByteArray());
    os.close();

    Mat decoded = Highgui.imdecode(encoded, flags);
    encoded.release();

    return decoded;
}
 
Example #3
Source File: Utils.java    From ResistorScanner with MIT License 6 votes vote down vote up
public static Mat loadResource(Context context, int resourceId, int flags) throws IOException
{
    InputStream is = context.getResources().openRawResource(resourceId);
    ByteArrayOutputStream os = new ByteArrayOutputStream(is.available());

    byte[] buffer = new byte[4096];
    int bytesRead;
    while ((bytesRead = is.read(buffer)) != -1) {
        os.write(buffer, 0, bytesRead);
    }
    is.close();

    Mat encoded = new Mat(1, os.size(), CvType.CV_8U);
    encoded.put(0, 0, os.toByteArray());
    os.close();

    Mat decoded = Highgui.imdecode(encoded, flags);
    encoded.release();

    return decoded;
}
 
Example #4
Source File: AutoCalibrationManager.java    From ShootOFF with GNU General Public License v3.0 6 votes vote down vote up
public Mat preProcessFrame(final Mat mat) {
	if (mat.channels() == 1) return mat.clone();

	final Mat newMat = new Mat(mat.rows(), mat.cols(), CvType.CV_8UC1);

	Imgproc.cvtColor(mat, newMat, Imgproc.COLOR_BGR2GRAY);

	if (logger.isTraceEnabled()) {
		String filename = String.format("grayscale.png");
		final File file = new File(filename);
		filename = file.toString();
		Highgui.imwrite(filename, newMat);
	}

	return newMat;
}
 
Example #5
Source File: Utils.java    From android-object-distance with Apache License 2.0 6 votes vote down vote up
public static Mat loadResource(Context context, int resourceId, int flags) throws IOException
{
    InputStream is = context.getResources().openRawResource(resourceId);
    ByteArrayOutputStream os = new ByteArrayOutputStream(is.available());

    byte[] buffer = new byte[4096];
    int bytesRead;
    while ((bytesRead = is.read(buffer)) != -1) {
        os.write(buffer, 0, bytesRead);
    }
    is.close();

    Mat encoded = new Mat(1, os.size(), CvType.CV_8U);
    encoded.put(0, 0, os.toByteArray());
    os.close();

    Mat decoded = Highgui.imdecode(encoded, flags);
    encoded.release();

    return decoded;
}
 
Example #6
Source File: AutoCalibrationManager.java    From ShootOFF with GNU General Public License v3.0 6 votes vote down vote up
private void blankRotatedRect(Mat mat, final RotatedRect rect) {
	final Mat tempMat = Mat.zeros(mat.size(), CvType.CV_8UC1);

	final Point points[] = new Point[4];
	rect.points(points);
	for (int i = 0; i < 4; ++i) {
		Core.line(tempMat, points[i], points[(i + 1) % 4], new Scalar(255, 255, 255));
	}

	final Mat tempMask = Mat.zeros((mat.rows() + 2), (mat.cols() + 2), CvType.CV_8UC1);
	Imgproc.floodFill(tempMat, tempMask, rect.center, new Scalar(255, 255, 255), null, new Scalar(0, 0, 0),
			new Scalar(254, 254, 254), 4);

	if (logger.isTraceEnabled()) {
		String filename = String.format("poly.png");
		final File file = new File(filename);
		filename = file.toString();
		Highgui.imwrite(filename, tempMat);
	}

	mat.setTo(new Scalar(0, 0, 0), tempMat);
}
 
Example #7
Source File: FaceDetector.java    From GenderRecognizer with MIT License 6 votes vote down vote up
public static void main(String[] args) {
	System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
	
	String imagePath = "C:\\Users\\admin\\Desktop\\a.png";
	
	FaceDetector faceDetector = new FaceDetector();
	Mat[] mats = faceDetector.snipFace(imagePath, new Size(90, 90));
	
	int i=0;
	for(Mat mat: mats){
		Highgui.imwrite(imagePath.substring(0, imagePath.length()-4)+"Snipped"+i+imagePath.substring(imagePath.length()-4),
				mat);
		
		i++;
	}

	System.out.println("Done!!!");
}
 
Example #8
Source File: HistogramEqualization.java    From opencv-fun with GNU Affero General Public License v3.0 6 votes vote down vote up
public static void main (String[] args) {
	CVLoader.load();
	
	// load the image
	Mat img = Highgui.imread("data/topdown-9.png");
	Mat equ = new Mat();
	img.copyTo(equ);
	Imgproc.blur(equ, equ, new Size(3, 3));
	
	Imgproc.cvtColor(equ, equ, Imgproc.COLOR_BGR2YCrCb);
	List<Mat> channels = new ArrayList<Mat>();
	Core.split(equ, channels);
	Imgproc.equalizeHist(channels.get(0), channels.get(0));
	Core.merge(channels, equ);
	Imgproc.cvtColor(equ, equ, Imgproc.COLOR_YCrCb2BGR);
	
	Mat gray = new Mat();
	Imgproc.cvtColor(equ, gray, Imgproc.COLOR_BGR2GRAY);
	Mat grayOrig = new Mat();
	Imgproc.cvtColor(img, grayOrig, Imgproc.COLOR_BGR2GRAY);
	
	ImgWindow.newWindow(img);
	ImgWindow.newWindow(equ);
	ImgWindow.newWindow(gray);
	ImgWindow.newWindow(grayOrig);
}
 
Example #9
Source File: HighGuiUtil.java    From javautils with Apache License 2.0 6 votes vote down vote up
/**
 * Detects faces in an image, draws boxes around them, and writes the results
 * @param fileName
 * @param destName
 */
public static void drawRect(String fileName, String destName){
    Mat image = Highgui.imread(fileName);
    // Create a face detector from the cascade file in the resources
    // directory.
    CascadeClassifier faceDetector = new CascadeClassifier("libs/lbpcascade_frontalface.xml");
    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);
    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
        Core.rectangle(image, new Point(rect.x, rect.y),
                new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
    }

    Highgui.imwrite(destName, image);

}
 
Example #10
Source File: Utils.java    From Android-Car-duino with GNU General Public License v2.0 6 votes vote down vote up
public static Mat loadResource(Context context, int resourceId, int flags) throws IOException
{
    InputStream is = context.getResources().openRawResource(resourceId);
    ByteArrayOutputStream os = new ByteArrayOutputStream(is.available());

    byte[] buffer = new byte[4096];
    int bytesRead;
    while ((bytesRead = is.read(buffer)) != -1) {
        os.write(buffer, 0, bytesRead);
    }
    is.close();

    Mat encoded = new Mat(1, os.size(), CvType.CV_8U);
    encoded.put(0, 0, os.toByteArray());
    os.close();

    Mat decoded = Highgui.imdecode(encoded, flags);
    encoded.release();

    return decoded;
}
 
Example #11
Source File: OpenCVUtils.java    From mvisc with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Display image in a frame
 *
 * @param title
 * @param img
 */
public static void imshow(String title, Mat img) {
	 
    
    // Convert image Mat to a jpeg
    MatOfByte imageBytes = new MatOfByte();
    Highgui.imencode(".jpg", img, imageBytes);
    
    try {
        // Put the jpeg bytes into a JFrame window and show.
        JFrame frame = new JFrame(title);
        frame.getContentPane().add(new JLabel(new ImageIcon(ImageIO.read(new ByteArrayInputStream(imageBytes.toArray())))));
        frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
        frame.pack();
        frame.setVisible(true);
        frame.setLocation(30 + (windowNo*20), 30 + (windowNo*20));
        windowNo++;
    } catch (Exception e) {
        e.printStackTrace();
    }
}
 
Example #12
Source File: MyCameraBridgeViewBase.java    From pulse with ISC License 6 votes vote down vote up
public Mat onCameraFrame(MyCameraBridgeViewBase.CvCameraViewFrame inputFrame) {
     Mat result = null;
     switch (mPreviewFormat) {
        case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGB:
            result = mOldStyleListener.onCameraFrame(inputFrame.rgb());
            break;
        case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA:
            result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
            break;
        case Highgui.CV_CAP_ANDROID_GREY_FRAME:
            result = mOldStyleListener.onCameraFrame(inputFrame.gray());
            break;
        default:
            Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
    };

    return result;
}
 
Example #13
Source File: App.java    From pulse with ISC License 6 votes vote down vote up
@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    setContentView(R.layout.app);

    camera = (MyCameraBridgeViewBase) findViewById(R.id.camera);
    camera.setCvCameraViewListener(this);
    camera.SetCaptureFormat(Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGB);
    camera.setMaxFrameSize(600, 600);

    bpmView = (BpmView) findViewById(R.id.bpm);
    bpmView.setBackgroundColor(Color.DKGRAY);
    bpmView.setTextColor(Color.LTGRAY);

    pulseView = (PulseView) findViewById(R.id.pulse);

    faceBoxPaint = initFaceBoxPaint();
    faceBoxTextPaint = initFaceBoxTextPaint();
}
 
Example #14
Source File: Utils.java    From SoftwarePilot with MIT License 6 votes vote down vote up
public static Mat loadResource(Context context, int resourceId, int flags) throws IOException
{
    InputStream is = context.getResources().openRawResource(resourceId);
    ByteArrayOutputStream os = new ByteArrayOutputStream(is.available());

    byte[] buffer = new byte[4096];
    int bytesRead;
    while ((bytesRead = is.read(buffer)) != -1) {
        os.write(buffer, 0, bytesRead);
    }
    is.close();

    Mat encoded = new Mat(1, os.size(), CvType.CV_8U);
    encoded.put(0, 0, os.toByteArray());
    os.close();

    Mat decoded = Highgui.imdecode(encoded, flags);
    encoded.release();

    return decoded;
}
 
Example #15
Source File: FaceDetectionActivity.java    From AndroidFaceRecognizer with MIT License 5 votes vote down vote up
private void setImagesForDatabaseEdit() {
	for(int i = 0; i < faceImages.size(); i++) {
		Mat m = Highgui.imread(thisPerson.getFacesFolderPath()+"/"+i+".jpg");
		if(m != null) {
			onFaceCaptured(m);
		}
	}
}
 
Example #16
Source File: PartialMatcher.java    From StormCV with Apache License 2.0 5 votes vote down vote up
/**
 * Calculates descriptors as defined by detectorType and 
 * descriptorType provided at construction for the provided image
 * @param input
 * @return
 * @throws IOException
 */
private Mat calculateDescriptors(byte[] buffer) throws IOException{
	MatOfByte mob = new MatOfByte(buffer);
	Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
	
	FeatureDetector siftDetector = FeatureDetector.create(detectorType);
	MatOfKeyPoint mokp = new MatOfKeyPoint();
	siftDetector.detect(image, mokp);
	
	Mat descriptors = new Mat();
	DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
	extractor.compute(image, mokp, descriptors);
	return descriptors;
}
 
Example #17
Source File: FaceRecognitionActivity.java    From AndroidFaceRecognizer with MIT License 5 votes vote down vote up
private void recognize(){
  	new Thread(new Runnable() {
	
	@Override
	public void run() {
		int result = faceRecognition(capturedMat.getNativeObjAddr(), persons.size());
		for(int i = 0; i < persons.size(); i++) {
			int id = (int)persons.get(i).getId();
			if(result == id) {
				final int index = i;
				FaceRecognitionActivity.this.runOnUiThread(new Runnable() {
					
					@Override
					public void run() {
						ImageView image = (ImageView)findViewById(R.id.frresultimage);
						Mat m = Highgui.imread(persons.get(index).getFacesFolderPath()+"/1.jpg");
						final Bitmap bmp = Bitmap.createBitmap(m.cols(), m.rows(), Bitmap.Config.RGB_565);
						Utils.matToBitmap(m, bmp);
						image.setImageBitmap(bmp);
						TextView resultText = (TextView)findViewById(R.id.frresulttextview);
						resultText.setText(persons.get(index).getName());
						if(!showingResults) {
							showResults();
						}
					}
				});
			}
		}
	}
}).start();
  }
 
Example #18
Source File: FeatureMatcherOp.java    From StormCV with Apache License 2.0 5 votes vote down vote up
/**
 * Calculates descriptors as defined by detectorType and 
 * descriptorType provided at construction for the provided image
 * @param input
 * @return
 * @throws IOException
 */
private Mat calculateDescriptors(byte[] buffer) throws IOException{
	MatOfByte mob = new MatOfByte(buffer);
	Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
	
	FeatureDetector siftDetector = FeatureDetector.create(detectorType);
	MatOfKeyPoint mokp = new MatOfKeyPoint();
	siftDetector.detect(image, mokp);
	
	Mat descriptors = new Mat();
	DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
	extractor.compute(image, mokp, descriptors);
	return descriptors;
}
 
Example #19
Source File: CameraBridgeViewBase.java    From Android-Car-duino with GNU General Public License v2.0 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
     Mat result = null;
     switch (mPreviewFormat) {
        case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA:
            result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
            break;
        case Highgui.CV_CAP_ANDROID_GREY_FRAME:
            result = mOldStyleListener.onCameraFrame(inputFrame.gray());
            break;
        default:
            Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
    };

    return result;
}
 
Example #20
Source File: HaarCascadeOp.java    From StormCV with Apache License 2.0 5 votes vote down vote up
@Override
public List<CVParticle> execute(CVParticle input) throws Exception {
	ArrayList<CVParticle> result = new ArrayList<CVParticle>();
	Frame frame = (Frame)input;
	if(frame.getImageType().equals(Frame.NO_IMAGE)) return result;

	MatOfByte mob = new MatOfByte(frame.getImageBytes());
	Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_COLOR);
	
	/*
	mob = new MatOfByte();
	Highgui.imencode(".png", image, mob);
	BufferedImage bi = ImageUtils.bytesToImage(mob.toArray());
	ImageIO.write(bi, "png", new File("testOutput/"+sf.getStreamId()+"_"+sf.getSequenceNr()+".png"));
	*/
	
	MatOfRect haarDetections = new MatOfRect();
	haarDetector.detectMultiScale(image, haarDetections, scaleFactor, minNeighbors, flags, new Size(minSize[0], minSize[1]), new Size(maxSize[0], maxSize[1]));
	ArrayList<Descriptor> descriptors = new ArrayList<Descriptor>();
	for(Rect rect : haarDetections.toArray()){
		Rectangle box = new Rectangle(rect.x, rect.y, rect.width, rect.height);
		descriptors.add(new Descriptor(input.getStreamId(), input.getSequenceNr(), box, 0, new float[0]));
	}
	
	Feature feature = new Feature(input.getStreamId(), input.getSequenceNr(), name, 0, descriptors, null);
	if(outputFrame){
		frame.getFeatures().add(feature);
		result.add(frame);
	}else{
		result.add(feature);
	}
	return result;
}
 
Example #21
Source File: HoughLines.java    From opencv-fun with GNU Affero General Public License v3.0 5 votes vote down vote up
public static void main (String[] args) {
	CVLoader.load();
	
	// load the image
	Mat img = Highgui.imread("data/topdown-6.jpg");
	
	// generate gray scale and blur
	Mat gray = new Mat();
	Imgproc.cvtColor(img, gray, Imgproc.COLOR_BGR2GRAY);
	Imgproc.blur(gray, gray, new Size(3, 3));
	
	// detect the edges
	Mat edges = new Mat();
	int lowThreshold = 50;
	int ratio = 3;
	Imgproc.Canny(gray, edges, lowThreshold, lowThreshold * ratio);
	
	Mat lines = new Mat();
	Imgproc.HoughLinesP(edges, lines, 1, Math.PI / 180, 50, 50, 10);
	
	for(int i = 0; i < lines.cols(); i++) {
		double[] val = lines.get(0, i);
		Core.line(img, new Point(val[0], val[1]), new Point(val[2], val[3]), new Scalar(0, 0, 255), 2);
	}
	
	ImgWindow.newWindow(edges);
	ImgWindow.newWindow(gray);
	ImgWindow.newWindow(img);
}
 
Example #22
Source File: HoughCircles.java    From opencv-fun with GNU Affero General Public License v3.0 5 votes vote down vote up
public static void main (String[] args) {
		CVLoader.load();
		Mat orig = Highgui.imread("data/topdown-6.jpg");
		Mat gray = new Mat();
		orig.copyTo(gray);
		
		// blur
//		Imgproc.medianBlur(gray, gray, 5);
//		Imgproc.GaussianBlur(gray, gray, new Size(3, 3), 100);
		
		// convert to grayscale
		Imgproc.cvtColor(gray, gray, Imgproc.COLOR_BGR2GRAY);
		
		// do hough circles
		Mat circles = new Mat();
		int minRadius = 10;
		int maxRadius = 18;
		Imgproc.HoughCircles(gray, circles, Imgproc.CV_HOUGH_GRADIENT, 1, minRadius, 120, 10, minRadius, maxRadius);
		System.out.println(circles);
		
		ImgWindow.newWindow(gray);
		ImgWindow wnd = ImgWindow.newWindow(orig);			
		
		while(!wnd.closed) {
			wnd.setImage(orig);
			Graphics2D g = wnd.begin();
			g.setColor(Color.MAGENTA);
			g.setStroke(new BasicStroke(3));
			for(int i = 0; i < circles.cols(); i++) {
				double[] circle = circles.get(0, i);
				g.drawOval((int)circle[0] - (int)circle[2], (int)circle[1] - (int)circle[2], (int)circle[2] * 2, (int)circle[2] * 2);
			}		
			wnd.end();
		}
	}
 
Example #23
Source File: BallDetection.java    From opencv-fun with GNU Affero General Public License v3.0 5 votes vote down vote up
public static void main (String[] args) {
	CVLoader.load();
	
	ImgWindow wnd = ImgWindow.newWindow();
	Calibration calib = new Calibration(1280, 800);
	calib.setBackgroundImage(Highgui.imread("screenshots/positions/background.png"));
	BallDetector detector = new BallDetector(calib);
	Mat camera = Highgui.imread("screenshots/positions/camera.png");
	
	 while(true) {
		 detect(wnd, detector, camera);
	 }
}
 
Example #24
Source File: CameraBridgeViewBase.java    From effective_android_sample with Apache License 2.0 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
     Mat result = null;
     switch (mPreviewFormat) {
        case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA:
            result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
            break;
        case Highgui.CV_CAP_ANDROID_GREY_FRAME:
            result = mOldStyleListener.onCameraFrame(inputFrame.gray());
            break;
        default:
            Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
    };

    return result;
}
 
Example #25
Source File: IpCamera.java    From opencv-fun with GNU Affero General Public License v3.0 5 votes vote down vote up
public IpCamera(final String url) {
	this.frame = Mat.zeros(new Size(200, 200), CvType.CV_8UC3);
	this.url = url;
	thread = new Thread(new Runnable() {
		@Override
		public void run () {
			while(true) {
				try {
					ByteArrayOutputStream bytes = new ByteArrayOutputStream();
					byte[] buffer = new byte[10 * 1024];
					URLConnection con = new URL(url + "/shot.jpg").openConnection();
					InputStream in = con.getInputStream();
					int read = -1;
					while((read = in.read(buffer)) != -1) {
						bytes.write(buffer, 0, read);
					}
					DataOutputStream writer = new DataOutputStream(new FileOutputStream(new File("img.jpg")));
					writer.write(bytes.toByteArray());
					writer.close();
					Mat mat =  Highgui.imread("img.jpg");
					synchronized(this) {
						frame = mat;
					}
				} catch(Throwable t) {
					t.printStackTrace();
				}
			}
		}
	});
	thread.setDaemon(true);
	thread.start();
}
 
Example #26
Source File: CameraBridgeViewBase.java    From ResistorScanner with MIT License 5 votes vote down vote up
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
     Mat result = null;
     switch (mPreviewFormat) {
        case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA:
            result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
            break;
        case Highgui.CV_CAP_ANDROID_GREY_FRAME:
            result = mOldStyleListener.onCameraFrame(inputFrame.gray());
            break;
        default:
            Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
    };

    return result;
}
 
Example #27
Source File: WeightedStandardImage.java    From GenderRecognizer with MIT License 5 votes vote down vote up
public WeightedStandardImage() {
	this.types = 0;
	this.size = new Size(0, 0);
	this.ids = new int[types];
	this.weights = new int[types];
	this.standardImages = new Mat[types];
	for(int i=0; i<types; i++){
		standardImages[i] = new Mat(size, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
	}
}
 
Example #28
Source File: WeightedStandardImage.java    From GenderRecognizer with MIT License 5 votes vote down vote up
public WeightedStandardImage(int types, Size size) {
	this.types = types;
	this.size = size;
	this.ids = new int[types];
	this.weights = new int[types];
	this.standardImages = new Mat[types];
	for(int i=0; i<types; i++){
		standardImages[i] = new Mat(size, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
	}
}
 
Example #29
Source File: Predict.java    From GenderRecognizer with MIT License 5 votes vote down vote up
public static void main(String[] args) {
	System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
	
	WeightedStandardPixelTrainer weightedStandardPixelTrainer = new WeightedStandardPixelTrainer();

	//sample file
	String imageFilePath = "src/res/sample/sample.jpg";
	Mat[] faces = new FaceDetector().snipFace(imageFilePath, new Size(90, 90));
	
	
	//experience file
	weightedStandardPixelTrainer.load("src/res/knowledge/Knowledge.log");
	
	int faceNo=1;
	for(Mat face: faces){
		
		int prediction = weightedStandardPixelTrainer.predict(face);
		
		if(prediction==-1){
			System.out.println("I think " + faceNo + " is not a face.");
			Highgui.imwrite("src/res/sample/" + faceNo + "_noface.jpg", face);
		}else if(prediction==0){
			System.out.println("I think " + faceNo + " is a female.");
			Highgui.imwrite("src/res/sample/" + faceNo + "_female.jpg", face);
		}else{
			System.out.println("I think " + faceNo + " is a male.");
			Highgui.imwrite("src/res/sample/" + faceNo + "_male.jpg", face);
		}
		
		faceNo++;
	}
	
	System.out.println("Operation Successful!!!");
}
 
Example #30
Source File: VideoReaderTest.java    From HadoopCV with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
	
	System.out.println(System.getProperty("java.class.path"));
 System.out.println(System.getProperty("java.library.path"));
 
 
	System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
	VideoCapture camera = new VideoCapture("data/bike.avi");
	MatOfByte frame = new MatOfByte();
	int i = 0;
	
 
	while(true){
        if (camera.read(frame)){
            System.out.println("Frame Obtained");
            System.out.println("Captured Frame Width " +
            frame.width() + " Height " + frame.height());
            System.out.println(frame.dump());
            Highgui.imwrite("tmp\\image\\camera"+(i++)+".jpg", frame);
            //Highgui.imencode(ext, img, buf)
        }else{
        	break;
        }
    }
	camera.release();
	
}