Java Code Examples for org.opencv.features2d.FeatureDetector

The following examples show how to use org.opencv.features2d.FeatureDetector. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may want to check out the right sidebar which shows the related API usage.
Example 1
Source Project: OpenCV-Android-Object-Detection   Source File: MainActivity.java    License: MIT License 6 votes vote down vote up
private void initializeOpenCVDependencies() throws IOException {
    mOpenCvCameraView.enableView();
    detector = FeatureDetector.create(FeatureDetector.ORB);
    descriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
    matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    img1 = new Mat();
    AssetManager assetManager = getAssets();
    InputStream istr = assetManager.open("a.jpeg");
    Bitmap bitmap = BitmapFactory.decodeStream(istr);
    Utils.bitmapToMat(bitmap, img1);
    Imgproc.cvtColor(img1, img1, Imgproc.COLOR_RGB2GRAY);
    img1.convertTo(img1, 0); //converting the image to match with the type of the cameras image
    descriptors1 = new Mat();
    keypoints1 = new MatOfKeyPoint();
    detector.detect(img1, keypoints1);
    descriptor.compute(img1, keypoints1, descriptors1);

}
 
Example 2
Source Project: VIA-AI   Source File: FeatureDetector.java    License: MIT License 5 votes vote down vote up
@Deprecated
public static FeatureDetector create(int detectorType)
{
    
    FeatureDetector retVal = FeatureDetector.__fromPtr__(create_0(detectorType));
    
    return retVal;
}
 
Example 3
Source Project: LPR   Source File: FeatureDetector.java    License: Apache License 2.0 5 votes vote down vote up
@Deprecated
public static FeatureDetector create(int detectorType)
{
    
    FeatureDetector retVal = FeatureDetector.__fromPtr__(create_0(detectorType));
    
    return retVal;
}
 
Example 4
Source Project: LicensePlateDiscern   Source File: FeatureDetector.java    License: MIT License 5 votes vote down vote up
@Deprecated
public static FeatureDetector create(int detectorType)
{
    
    FeatureDetector retVal = FeatureDetector.__fromPtr__(create_0(detectorType));
    
    return retVal;
}
 
Example 5
@Deprecated
public static FeatureDetector create(int detectorType)
{
    
    FeatureDetector retVal = FeatureDetector.__fromPtr__(create_0(detectorType));
    
    return retVal;
}
 
Example 6
Source Project: OpenCvFaceDetect   Source File: FeatureDetector.java    License: Apache License 2.0 5 votes vote down vote up
@Deprecated
public static FeatureDetector create(int detectorType)
{
    
    FeatureDetector retVal = FeatureDetector.__fromPtr__(create_0(detectorType));
    
    return retVal;
}
 
Example 7
Source Project: mvisc   Source File: KMeansMatcher.java    License: GNU General Public License v3.0 5 votes vote down vote up
public KMeansMatcher()
{
	model = null;
	featureDetector = FeatureDetector.create(FeatureDetector.PYRAMID_ORB);
	descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.BRIEF);
	matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2);
}
 
Example 8
Source Project: onetwo   Source File: ImageRecognition.java    License: Apache License 2.0 5 votes vote down vote up
public void init() {
	featureDetector = FeatureDetector.create(detectorType);
	descriptorExtractor = DescriptorExtractor.create(extractorType);
	
	if (this.writeDebugImage) {
		this.writeDestImageKeyPoints = true;
		this.writeCutMatchedImageFromSrc = true;
		this.writeMatchingImage = true;
		this.writeDrawMatchedLineImage = true;
	}
}
 
Example 9
Source Project: onetwo   Source File: ImageTest.java    License: Apache License 2.0 5 votes vote down vote up
public static Mat FeatureSiftLannbased(Mat src, Mat dst){
	FeatureDetector fd = FeatureDetector.create(FeatureDetector.SIFT);
	DescriptorExtractor de = DescriptorExtractor.create(DescriptorExtractor.SIFT);
	DescriptorMatcher Matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
	
	MatOfKeyPoint mkp = new MatOfKeyPoint();
	fd.detect(src, mkp);
	Mat desc = new Mat();
	de.compute(src, mkp, desc);
	Features2d.drawKeypoints(src, mkp, src);
	
	MatOfKeyPoint mkp2 = new MatOfKeyPoint();
	fd.detect(dst, mkp2);
	Mat desc2 = new Mat();
	de.compute(dst, mkp2, desc2);
	Features2d.drawKeypoints(dst, mkp2, dst);
	
	
	// Matching features
	MatOfDMatch Matches = new MatOfDMatch();
	Matcher.match(desc, desc2, Matches);
	
	List<DMatch> l = Matches.toList();
	List<DMatch> goodMatch = new ArrayList<DMatch>();
	for (int i = 0; i < l.size(); i++) {
		DMatch dmatch = l.get(i);
		if (Math.abs(dmatch.queryIdx - dmatch.trainIdx) < 10f) {
			goodMatch.add(dmatch);
		}
		
	}
	
	Matches.fromList(goodMatch);
	// Show result
	Mat OutImage = new Mat();
	Features2d.drawMatches(src, mkp, dst, mkp2, Matches, OutImage);
	
	return OutImage;
}
 
Example 10
Source Project: StormCV   Source File: PartialMatcher.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Calculates descriptors as defined by detectorType and 
 * descriptorType provided at construction for the provided image
 * @param input
 * @return
 * @throws IOException
 */
private Mat calculateDescriptors(byte[] buffer) throws IOException{
	MatOfByte mob = new MatOfByte(buffer);
	Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
	
	FeatureDetector siftDetector = FeatureDetector.create(detectorType);
	MatOfKeyPoint mokp = new MatOfKeyPoint();
	siftDetector.detect(image, mokp);
	
	Mat descriptors = new Mat();
	DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
	extractor.compute(image, mokp, descriptors);
	return descriptors;
}
 
Example 11
Source Project: StormCV   Source File: FeatureMatcherOp.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Calculates descriptors as defined by detectorType and 
 * descriptorType provided at construction for the provided image
 * @param input
 * @return
 * @throws IOException
 */
private Mat calculateDescriptors(byte[] buffer) throws IOException{
	MatOfByte mob = new MatOfByte(buffer);
	Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
	
	FeatureDetector siftDetector = FeatureDetector.create(detectorType);
	MatOfKeyPoint mokp = new MatOfKeyPoint();
	siftDetector.detect(image, mokp);
	
	Mat descriptors = new Mat();
	DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
	extractor.compute(image, mokp, descriptors);
	return descriptors;
}
 
Example 12
Source Project: FtcSamples   Source File: GripPipelineRedJewel.java    License: MIT License 4 votes vote down vote up
/**
 * Detects groups of pixels in an image.
 * @param input The image on which to perform the find blobs.
 * @param minArea The minimum size of a blob that will be found
 * @param circularity The minimum and maximum circularity of blobs that will be found
 * @param darkBlobs The boolean that determines if light or dark blobs are found.
 * @param blobList The output where the MatOfKeyPoint is stored.
 */
private void findBlobs(Mat input, double minArea, double[] circularity,
	Boolean darkBlobs, MatOfKeyPoint blobList) {
	FeatureDetector blobDet = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
	try {
		File tempFile = File.createTempFile("config", ".xml");

		StringBuilder config = new StringBuilder();

		config.append("<?xml version=\"1.0\"?>\n");
		config.append("<opencv_storage>\n");
		config.append("<thresholdStep>10.</thresholdStep>\n");
		config.append("<minThreshold>50.</minThreshold>\n");
		config.append("<maxThreshold>220.</maxThreshold>\n");
		config.append("<minRepeatability>2</minRepeatability>\n");
		config.append("<minDistBetweenBlobs>10.</minDistBetweenBlobs>\n");
		config.append("<filterByColor>1</filterByColor>\n");
		config.append("<blobColor>");
		config.append((darkBlobs ? 0 : 255));
		config.append("</blobColor>\n");
		config.append("<filterByArea>1</filterByArea>\n");
		config.append("<minArea>");
		config.append(minArea);
		config.append("</minArea>\n");
		config.append("<maxArea>");
		config.append(Integer.MAX_VALUE);
		config.append("</maxArea>\n");
		config.append("<filterByCircularity>1</filterByCircularity>\n");
		config.append("<minCircularity>");
		config.append(circularity[0]);
		config.append("</minCircularity>\n");
		config.append("<maxCircularity>");
		config.append(circularity[1]);
		config.append("</maxCircularity>\n");
		config.append("<filterByInertia>1</filterByInertia>\n");
		config.append("<minInertiaRatio>0.1</minInertiaRatio>\n");
		config.append("<maxInertiaRatio>" + Integer.MAX_VALUE + "</maxInertiaRatio>\n");
		config.append("<filterByConvexity>1</filterByConvexity>\n");
		config.append("<minConvexity>0.95</minConvexity>\n");
		config.append("<maxConvexity>" + Integer.MAX_VALUE + "</maxConvexity>\n");
		config.append("</opencv_storage>\n");
		FileWriter writer;
		writer = new FileWriter(tempFile, false);
		writer.write(config.toString());
		writer.close();
		blobDet.read(tempFile.getPath());
	} catch (IOException e) {
		e.printStackTrace();
	}

	blobDet.detect(input, blobList);
}
 
Example 13
Source Project: FtcSamples   Source File: GripPipelineBlueJewel.java    License: MIT License 4 votes vote down vote up
/**
 * Detects groups of pixels in an image.
 * @param input The image on which to perform the find blobs.
 * @param minArea The minimum size of a blob that will be found
 * @param circularity The minimum and maximum circularity of blobs that will be found
 * @param darkBlobs The boolean that determines if light or dark blobs are found.
 * @param blobList The output where the MatOfKeyPoint is stored.
 */
private void findBlobs(Mat input, double minArea, double[] circularity,
	Boolean darkBlobs, MatOfKeyPoint blobList) {
	FeatureDetector blobDet = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
	try {
		File tempFile = File.createTempFile("config", ".xml");

		StringBuilder config = new StringBuilder();

		config.append("<?xml version=\"1.0\"?>\n");
		config.append("<opencv_storage>\n");
		config.append("<thresholdStep>10.</thresholdStep>\n");
		config.append("<minThreshold>50.</minThreshold>\n");
		config.append("<maxThreshold>220.</maxThreshold>\n");
		config.append("<minRepeatability>2</minRepeatability>\n");
		config.append("<minDistBetweenBlobs>10.</minDistBetweenBlobs>\n");
		config.append("<filterByColor>1</filterByColor>\n");
		config.append("<blobColor>");
		config.append((darkBlobs ? 0 : 255));
		config.append("</blobColor>\n");
		config.append("<filterByArea>1</filterByArea>\n");
		config.append("<minArea>");
		config.append(minArea);
		config.append("</minArea>\n");
		config.append("<maxArea>");
		config.append(Integer.MAX_VALUE);
		config.append("</maxArea>\n");
		config.append("<filterByCircularity>1</filterByCircularity>\n");
		config.append("<minCircularity>");
		config.append(circularity[0]);
		config.append("</minCircularity>\n");
		config.append("<maxCircularity>");
		config.append(circularity[1]);
		config.append("</maxCircularity>\n");
		config.append("<filterByInertia>1</filterByInertia>\n");
		config.append("<minInertiaRatio>0.1</minInertiaRatio>\n");
		config.append("<maxInertiaRatio>" + Integer.MAX_VALUE + "</maxInertiaRatio>\n");
		config.append("<filterByConvexity>1</filterByConvexity>\n");
		config.append("<minConvexity>0.95</minConvexity>\n");
		config.append("<maxConvexity>" + Integer.MAX_VALUE + "</maxConvexity>\n");
		config.append("</opencv_storage>\n");
		FileWriter writer;
		writer = new FileWriter(tempFile, false);
		writer.write(config.toString());
		writer.close();
		blobDet.read(tempFile.getPath());
	} catch (IOException e) {
		e.printStackTrace();
	}

	blobDet.detect(input, blobList);
}
 
Example 14
Source Project: FtcSamples   Source File: GripPipeline.java    License: MIT License 4 votes vote down vote up
/**
 * Detects groups of pixels in an image.
 * @param input The image on which to perform the find blobs.
 * @param minArea The minimum size of a blob that will be found
 * @param circularity The minimum and maximum circularity of blobs that will be found
 * @param darkBlobs The boolean that determines if light or dark blobs are found.
 * @param blobList The output where the MatOfKeyPoint is stored.
 */
private void findBlobs(Mat input, double minArea, double[] circularity,
	Boolean darkBlobs, MatOfKeyPoint blobList) {
	FeatureDetector blobDet = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
	try {
		File tempFile = File.createTempFile("config", ".xml");

		StringBuilder config = new StringBuilder();

		config.append("<?xml version=\"1.0\"?>\n");
		config.append("<opencv_storage>\n");
		config.append("<thresholdStep>10.</thresholdStep>\n");
		config.append("<minThreshold>50.</minThreshold>\n");
		config.append("<maxThreshold>220.</maxThreshold>\n");
		config.append("<minRepeatability>2</minRepeatability>\n");
		config.append("<minDistBetweenBlobs>10.</minDistBetweenBlobs>\n");
		config.append("<filterByColor>1</filterByColor>\n");
		config.append("<blobColor>");
		config.append((darkBlobs ? 0 : 255));
		config.append("</blobColor>\n");
		config.append("<filterByArea>1</filterByArea>\n");
		config.append("<minArea>");
		config.append(minArea);
		config.append("</minArea>\n");
		config.append("<maxArea>");
		config.append(Integer.MAX_VALUE);
		config.append("</maxArea>\n");
		config.append("<filterByCircularity>1</filterByCircularity>\n");
		config.append("<minCircularity>");
		config.append(circularity[0]);
		config.append("</minCircularity>\n");
		config.append("<maxCircularity>");
		config.append(circularity[1]);
		config.append("</maxCircularity>\n");
		config.append("<filterByInertia>1</filterByInertia>\n");
		config.append("<minInertiaRatio>0.1</minInertiaRatio>\n");
		config.append("<maxInertiaRatio>" + Integer.MAX_VALUE + "</maxInertiaRatio>\n");
		config.append("<filterByConvexity>1</filterByConvexity>\n");
		config.append("<minConvexity>0.95</minConvexity>\n");
		config.append("<maxConvexity>" + Integer.MAX_VALUE + "</maxConvexity>\n");
		config.append("</opencv_storage>\n");
		FileWriter writer;
		writer = new FileWriter(tempFile, false);
		writer.write(config.toString());
		writer.close();
		blobDet.read(tempFile.getPath());
	} catch (IOException e) {
		e.printStackTrace();
	}

	blobDet.detect(input, blobList);
}
 
Example 15
Source Project: FTCVision   Source File: ObjectDetection.java    License: MIT License 4 votes vote down vote up
/**
 * Instantiate an object detector based on the FAST, BRIEF, and BRUTEFORCE_HAMMING algorithms
 */
public ObjectDetection() {
    detector = FeatureDetector.create(FeatureDetectorType.FAST.val());
    extractor = DescriptorExtractor.create(DescriptorExtractorType.BRIEF.val());
    matcher = DescriptorMatcher.create(DescriptorMatcherType.BRUTEFORCE_HAMMING.val());
}
 
Example 16
Source Project: onetwo   Source File: ImageTest.java    License: Apache License 2.0 4 votes vote down vote up
@Test
	public void imgMatching2() throws Exception {
		System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
//		Mat src_base = Imgcodecs.imread("D:\\test\\test5.jpg");
//		Mat src_test = Imgcodecs.imread("D:\\test\\test3.jpg");

		Mat src_base = Imgcodecs.imread("g:/test/find-src.jpg");
		Mat src_test = Imgcodecs.imread("g:/test/find-dest2.jpg");
		
		Mat gray_base = new Mat();
		Mat gray_test = new Mat();
		// 转换为灰度
		Imgproc.cvtColor(src_base, gray_base, Imgproc.COLOR_RGB2GRAY);
		Imgproc.cvtColor(src_test, gray_test, Imgproc.COLOR_RGB2GRAY);
		// 初始化ORB检测描述子
		FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.ORB);//特别提示下这里opencv暂时不支持SIFT、SURF检测方法,这个好像是opencv(windows) java版的一个bug,本人在这里被坑了好久。
		DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB);
		// 关键点及特征描述矩阵声明
		MatOfKeyPoint keyPoint1 = new MatOfKeyPoint(), keyPoint2 = new MatOfKeyPoint();
		Mat descriptorMat1 = new Mat(), descriptorMat2 = new Mat();
		// 计算ORB特征关键点
		featureDetector.detect(gray_base, keyPoint1);
		featureDetector.detect(gray_test, keyPoint2);
		

        Mat output=new Mat();
        Features2d.drawKeypoints(gray_base, keyPoint1, output );
        Imgcodecs.imwrite("g:/test/out.jpg", output);
        
		// 计算ORB特征描述矩阵
		descriptorExtractor.compute(gray_base, keyPoint1, descriptorMat1);
		descriptorExtractor.compute(gray_test, keyPoint2, descriptorMat2);
		float result = 0;
		// 特征点匹配
		System.out.println("test5:" + keyPoint1.size());
		System.out.println("test3:" + keyPoint2.size());
		if (!keyPoint1.size().empty() && !keyPoint2.size().empty()) {
			// FlannBasedMatcher matcher = new FlannBasedMatcher();
			DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_L1);
			MatOfDMatch matches = new MatOfDMatch();
			matcher.match(descriptorMat1, descriptorMat2, matches);
			// 最优匹配判断
			double minDist = 100;
			DMatch[] dMatchs = matches.toArray();
			int num = 0;
			for (int i = 0; i < dMatchs.length; i++) {
				if (dMatchs[i].distance <= 2 * minDist) {
					result += dMatchs[i].distance * dMatchs[i].distance;
					num++;
				}
			}
			// 匹配度计算
			result /= num;
		}
		System.out.println(result);
	}
 
Example 17
Source Project: StormCV   Source File: E3_MultipleFeaturesTopology.java    License: Apache License 2.0 4 votes vote down vote up
public static void main(String[] args){
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();

	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(8); // number of workers in the topology
	conf.setMaxSpoutPending(32); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	
	String userDir = System.getProperty("user.dir").replaceAll("\\\\", "/");
	// create a list with files to be processed, in this case just one. Multiple files will be spread over the available spouts
	List<String> files = new ArrayList<String>();
	files.add( "file://"+ userDir + "/resources/data/" );

	int frameSkip = 13; 
	
	// now create the topology itself (spout -> scale -> {face detection, sift} -> drawer -> streamer)
	TopologyBuilder builder = new TopologyBuilder();
	 // just one spout reading video files, extracting 1 frame out of 25 (i.e. 1 per second)
	builder.setSpout("spout", new CVParticleSpout( new FileFrameFetcher(files).frameSkip(frameSkip) ), 1 );
	
	// add bolt that scales frames down to 25% of the original size 
	builder.setBolt("scale", new SingleInputBolt( new ScaleImageOp(0.25f)), 1)
		.shuffleGrouping("spout");
	
	// one bolt with a HaarCascade classifier detecting faces. This operation outputs a Frame including the Features with detected faces.
	// the xml file must be present on the classpath!
	builder.setBolt("face", new SingleInputBolt( new HaarCascadeOp("face", "lbpcascade_frontalface.xml").outputFrame(true)), 1)
		.shuffleGrouping("scale");
	
	// add a bolt that performs SIFT keypoint extraction
	builder.setBolt("sift", new SingleInputBolt( new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT).outputFrame(false)), 2)
		.shuffleGrouping("scale");
	
	// Batch bolt that waits for input from both the face and sift detection bolts and combines them in a single frame object
	builder.setBolt("combiner", new BatchInputBolt(new SequenceNrBatcher(2), new FeatureCombinerOp()), 1)
		.fieldsGrouping("sift", new Fields(FrameSerializer.STREAMID))
		.fieldsGrouping("face", new Fields(FrameSerializer.STREAMID));
	
	// simple bolt that draws Features (i.e. locations of features) into the frame
	builder.setBolt("drawer", new SingleInputBolt(new DrawFeaturesOp()), 1)
		.shuffleGrouping("combiner");
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("drawer");

	// NOTE: if the topology is started (locally) go to http://localhost:8558/streaming/tiles and click the image to see the stream!
	
	try {
		
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "multifeature", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		
		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
Example 18
Source Project: StormCV   Source File: E8_DRPCTopology.java    License: Apache License 2.0 4 votes vote down vote up
public static void main(String[] args){
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();

	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
			
	conf.setNumWorkers(5); // number of workers in the topology
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	conf.put(Config.NIMBUS_TASK_LAUNCH_SECS, 30);
	
	String userDir = System.getProperty("user.dir").replaceAll("\\\\", "/");
			
	List<String> prototypes = new ArrayList<String>();
	prototypes.add( "file://"+ userDir +"/resources/data" );
	
	// create a linear DRPC builder called 'match'
	LinearDRPCTopologyBuilder builder = new LinearDRPCTopologyBuilder("match");

	//add a FeatureMatchRequestOp that receives drpc requests
	builder.addBolt(new RequestBolt(new FeatureMatchRequestOp()), 1); 
	
	 // add two bolts that perform sift extraction (as used in other examples!)
	builder.addBolt(new SingleInputBolt(
		new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT).outputFrame(false)
		), 1).shuffleGrouping();

	// add bolt that matches queries it gets with the prototypes it has loaded upon the prepare.
	// The prototypes are divided over the available tasks which means that each query has to be send to all tasks (use allGrouping)
	// the matcher only reports a match if at least 1 strong match has been found (can be set to 0)
	builder.addBolt(new SingleInputBolt(new PartialMatcher(prototypes, 0, 0.5f)), 2).allGrouping(); 
	
	// add a bolt that aggregates all the results it gets from the two matchers 
	builder.addBolt(new BatchBolt(new FeatureMatchResultOp(true)), 1).fieldsGrouping(new Fields(CVParticleSerializer.REQUESTID));
	
	// create local drpc server and cluster. Deploy the drpc topology on the cluster
	LocalDRPC drpc = new LocalDRPC();
	LocalCluster cluster = new LocalCluster();
	cluster.submitTopology("drpc-demo", conf, builder.createLocalTopology(drpc));
	
	// use all face images as queries (same images as loaded by the matcher!)
	File queryDir = new File(userDir +"/resources/data/");
	for(String img : queryDir.list()){
		if(!img.endsWith(".jpg")) continue; // to avoid reading non-image files
		// execute the drpc with the image as argument. Note that the execute blocks
		String matchesJson = drpc.execute("match", "file://"+userDir +"/resources/data/"+img);
		System.out.println(img+" : " + matchesJson);
	}
		
	cluster.shutdown();
	drpc.shutdown();
}
 
Example 19
Source Project: StormCV   Source File: E4_SequentialFeaturesTopology.java    License: Apache License 2.0 4 votes vote down vote up
public static void main(String[] args){
	
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();
	
	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(8); // number of workers in the topology
	conf.setMaxSpoutPending(6); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	
	String userDir = System.getProperty("user.dir").replaceAll("\\\\", "/");
	// create a list with files to be processed, in this case just one. Multiple files will be spread over the available spouts
	List<String> files = new ArrayList<String>();
	files.add( "file://"+ userDir+"/resources/data/"); // will process all video files in this directory (i.e. two files)

	// specify the list with SingleInputOperations to be executed sequentially by the 'fat' bolt
	@SuppressWarnings("rawtypes")
	List<ISingleInputOperation> operations = new ArrayList<ISingleInputOperation>();
	operations.add(new HaarCascadeOp("face", "lbpcascade_frontalface.xml") );
	operations.add(new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT));
	
	int frameSkip = 13; 
	
	// now create the topology itself (spout -> scale -> fat[face detection & sift] -> drawer -> streamer)
	TopologyBuilder builder = new TopologyBuilder();
	 // just one spout reading video files, extracting 1 frame out of 25 (i.e. 1 per second)
	builder.setSpout("spout", new CVParticleSpout( new FileFrameFetcher(files).frameSkip(frameSkip) ), 1 );
	
	// add bolt that scales frames down to 25% of the original size 
	builder.setBolt("scale", new SingleInputBolt( new ScaleImageOp(0.25f)), 1)
		.shuffleGrouping("spout");
	
	// three 'fat' bolts containing a SequentialFrameOperation will will emit a Frame object containing the detected features
	builder.setBolt("fat_features", new SingleInputBolt( new SequentialFrameOp(operations).outputFrame(true).retainImage(true)), 3)
		.shuffleGrouping("scale");
	
	// simple bolt that draws Features (i.e. locations of features) into the frame
	builder.setBolt("drawer", new SingleInputBolt(new DrawFeaturesOp()), 1)
		.shuffleGrouping("fat_features");
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("drawer");
	
	try {
		
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "fatfeature", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run for two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		
		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
Example 20
Source Project: StormCV   Source File: E7_FetchOperateCombiTopology.java    License: Apache License 2.0 4 votes vote down vote up
public static void main(String[] args){
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();
	
	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(3); // number of workers in the topology
	conf.setMaxSpoutPending(32); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib"); // sets the opencv lib to be used by all OpenCVOperation implementing operations
	
	List<String> urls = new ArrayList<String>();
	urls.add( "rtsp://streaming3.webcam.nl:1935/n224/n224.stream" );
	urls.add("rtsp://streaming3.webcam.nl:1935/n233/n233.stream");

	@SuppressWarnings("rawtypes")
	List<ISingleInputOperation> operations = new ArrayList<ISingleInputOperation>();
	operations.add(new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT).outputFrame(true));
	operations.add(new DrawFeaturesOp() );
	 
	int frameSkip = 13; 
	
	TopologyBuilder builder = new TopologyBuilder();
	builder.setSpout("spout", new CVParticleSpout( 
			new FetchAndOperateFetcher( // use a meta fetcher to combine a Fetcher and Operation
					new StreamFrameFetcher(urls).frameSkip(frameSkip), // use a normal fetcher to get video frames from streams
					new SequentialFrameOp(operations).outputFrame(true).retainImage(true) // use a sequential operation to execute a number of operations on frames
				)
			) , 2 );
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("spout");
	
	try {
		
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "sequential_spout", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		
		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
Example 21
Source Project: StormCV   Source File: DeploymentTopology.java    License: Apache License 2.0 4 votes vote down vote up
public static void main(String[] args){
	
	// first some global (topology) configuration
	StormCVConfig conf = new StormCVConfig();
	
	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	//conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(4); // number of workers in the topology
	conf.setMaxSpoutPending(20); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	
	List<String> urls = new ArrayList<String>();
	urls.add( "rtsp://streaming3.webcam.nl:1935/n224/n224.stream" );
	urls.add("rtsp://streaming3.webcam.nl:1935/n233/n233.stream");
	urls.add("rtsp://streaming3.webcam.nl:1935/n302/n302.stream"); 
	urls.add("rtsp://streaming3.webcam.nl:1935/n346/n346.stream");
	urls.add("rtsp://streaming3.webcam.nl:1935/n319/n319.stream"); 
	urls.add("rtsp://streaming3.webcam.nl:1935/n794b/n794b.stream"); 

	int frameSkip = 13;
	
	// specify the list with SingleInputOperations to be executed sequentially by the 'fat' bolt
	@SuppressWarnings("rawtypes")
	List<ISingleInputOperation> operations = new ArrayList<ISingleInputOperation>();
	operations.add(new ScaleImageOp(0.5f) );
	operations.add(new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT));
	operations.add(new FeatureExtractionOp("surf", FeatureDetector.SURF, DescriptorExtractor.SURF));
	operations.add(new DrawFeaturesOp());
	
	// now create the topology itself (spout -> background subtraction --> streamer)
	TopologyBuilder builder = new TopologyBuilder();
			
	// number of tasks must match the number of urls!
	builder.setSpout("spout", new CVParticleSpout( new StreamFrameFetcher(urls).frameSkip(frameSkip) ), 1 ).setNumTasks(6);
	
	// three 'fat' bolts containing a SequentialFrameOperation will will emit a Frame object containing the detected features
	builder.setBolt("features", new SingleInputBolt( new SequentialFrameOp(operations).outputFrame(true).retainImage(true)), 2)
		.shuffleGrouping("spout");
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("features");
	
	try {
		
		// run in local mode
		/*
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "deployment_Test", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run for two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		*/
		// run on a storm cluster
		StormSubmitter.submitTopology("Your_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
Example 22
Source Project: StormCV   Source File: FeatureExtractionOp.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public List<CVParticle> execute(CVParticle particle) throws Exception {
	List<CVParticle> result = new ArrayList<CVParticle>();
	if(!(particle instanceof Frame)) return result;
	
	Frame frame = (Frame)particle;
	if(frame.getImageType().equals(Frame.NO_IMAGE)) return result;
	try{
		MatOfByte mob = new MatOfByte(frame.getImageBytes());
		Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
		
		FeatureDetector siftDetector = FeatureDetector.create(detectorType);
		MatOfKeyPoint mokp = new MatOfKeyPoint();
		siftDetector.detect(image, mokp);
		List<KeyPoint> keypoints = mokp.toList();
		
		Mat descriptors = new Mat();
		DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
		extractor.compute(image, mokp, descriptors);
		List<Descriptor> descrList = new ArrayList<Descriptor>();
		float[] tmp = new float[1];
		for(int r=0; r<descriptors.rows(); r++){
			float[] values = new float[descriptors.cols()];
			for(int c=0; c<descriptors.cols(); c++){
				descriptors.get(r, c, tmp);
				values[c] = tmp[0];
			}
			descrList.add(new Descriptor(frame.getStreamId(), frame.getSequenceNr(), new Rectangle((int)keypoints.get(r).pt.x, (int)keypoints.get(r).pt.y, 0, 0), 0, values));
		}
		
		Feature feature = new Feature(frame.getStreamId(), frame.getSequenceNr(), featureName, 0, descrList, null);
		if(outputFrame){
			frame.getFeatures().add(feature);
			result.add(frame);
		}else{
			result.add(feature);
		}		
	}catch(Exception e){
		// catching exception at this point will prevent the sent of a fail! 
		logger.warn("Unable to extract features for frame!", e);
	}
	return result;
}
 
Example 23
Source Project: FTCVision   Source File: ObjectDetection.java    License: MIT License 2 votes vote down vote up
/**
 * Instantiate an object detector based on custom algorithms
 *
 * @param detector  Keypoint detection algorithm
 * @param extractor Keypoint descriptor extractor
 * @param matcher   Descriptor matcher
 */
public ObjectDetection(FeatureDetectorType detector, DescriptorExtractorType extractor, DescriptorMatcherType matcher) {
    this.detector = FeatureDetector.create(detector.val());
    this.extractor = DescriptorExtractor.create(extractor.val());
    this.matcher = DescriptorMatcher.create(matcher.val());
}
 
Example 24
Source Project: VIA-AI   Source File: FeatureDetector.java    License: MIT License votes vote down vote up
protected FeatureDetector(long addr) { nativeObj = addr; } 
Example 25
Source Project: VIA-AI   Source File: FeatureDetector.java    License: MIT License votes vote down vote up
public static FeatureDetector __fromPtr__(long addr) { return new FeatureDetector(addr); } 
Example 26
Source Project: LPR   Source File: FeatureDetector.java    License: Apache License 2.0 votes vote down vote up
protected FeatureDetector(long addr) { nativeObj = addr; } 
Example 27
Source Project: LPR   Source File: FeatureDetector.java    License: Apache License 2.0 votes vote down vote up
public static FeatureDetector __fromPtr__(long addr) { return new FeatureDetector(addr); } 
Example 28
Source Project: LicensePlateDiscern   Source File: FeatureDetector.java    License: MIT License votes vote down vote up
protected FeatureDetector(long addr) { nativeObj = addr; } 
Example 29
Source Project: LicensePlateDiscern   Source File: FeatureDetector.java    License: MIT License votes vote down vote up
public static FeatureDetector __fromPtr__(long addr) { return new FeatureDetector(addr); } 
Example 30
protected FeatureDetector(long addr) { nativeObj = addr; }