Java Code Examples for org.opencv.features2d.FeatureDetector

The following are top voted examples for showing how to use org.opencv.features2d.FeatureDetector. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: zooracle   File: KMeansMatcher.java   View source code 5 votes vote down vote up
public KMeansMatcher()
{
	model = null;
	featureDetector = FeatureDetector.create(FeatureDetector.PYRAMID_ORB);
	descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.BRIEF);
	matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2);
}
 
Example 2
Project: MotionTracker_SmartRoom   File: MotionTracker.java   View source code 5 votes vote down vote up
/**
 * Creates a new motion tracker that updates the x and y coordinates in the given Coordinate object.
 * @param coords 
 */
public MotionTracker(Coordinate coords) {
    // create a blob detector with the parameters from the param file
    // (especially used to ignore small blobs and those that are very close together)
    blobDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
    blobDetector.read(PARAMS);
    
    this.coords = coords;
}
 
Example 3
Project: second_eyes   File: ExtractPath.java   View source code 5 votes vote down vote up
public ExtractPath() {
    super();

    mKeyPointsPrev = new MatOfKeyPoint();
    // set up feature detection
    try {
        mFeatureDectector = FeatureDetector.create(FeatureDetector.FAST);
    } catch (UnsatisfiedLinkError err) {
        Log.e(TAG, "Feature detector failed with");
        err.printStackTrace();
    }
    // set up description detection
    mDescExtractor = DescriptorExtractor.create(DescriptorExtractor.BRISK);
    mDescMatcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);

    mPrevFrame = new Mat();
    prevKeyPoints = new MatOfKeyPoint();
    RGBFrame = new Mat();
    mForeGroundMask = new Mat();
    mContours = new ArrayList<MatOfPoint>();
    //creates a new BackgroundSubtractorMOG class with the arguments
    mBackgroundSub = Video.createBackgroundSubtractorMOG2(50, 0, true);
}
 
Example 4
Project: StormCV   File: PartialMatcher.java   View source code 5 votes vote down vote up
/**
 * Calculates descriptors as defined by detectorType and 
 * descriptorType provided at construction for the provided image
 * @param input
 * @return
 * @throws IOException
 */
private Mat calculateDescriptors(byte[] buffer) throws IOException{
	MatOfByte mob = new MatOfByte(buffer);
	Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
	
	FeatureDetector siftDetector = FeatureDetector.create(detectorType);
	MatOfKeyPoint mokp = new MatOfKeyPoint();
	siftDetector.detect(image, mokp);
	
	Mat descriptors = new Mat();
	DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
	extractor.compute(image, mokp, descriptors);
	return descriptors;
}
 
Example 5
Project: StormCV   File: FeatureMatcherOp.java   View source code 5 votes vote down vote up
/**
 * Calculates descriptors as defined by detectorType and 
 * descriptorType provided at construction for the provided image
 * @param input
 * @return
 * @throws IOException
 */
private Mat calculateDescriptors(byte[] buffer) throws IOException{
	MatOfByte mob = new MatOfByte(buffer);
	Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
	
	FeatureDetector siftDetector = FeatureDetector.create(detectorType);
	MatOfKeyPoint mokp = new MatOfKeyPoint();
	siftDetector.detect(image, mokp);
	
	Mat descriptors = new Mat();
	DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
	extractor.compute(image, mokp, descriptors);
	return descriptors;
}
 
Example 6
Project: OpenCvSample   File: MyActivity.java   View source code 5 votes vote down vote up
@Override
public void onManagerConnected(int status) {
    switch (status) {
        case LoaderCallbackInterface.SUCCESS:
            try {
                target = Utils.loadResource(MyActivity.this, R.raw.f1, Highgui.CV_LOAD_IMAGE_COLOR);
                detector = FeatureDetector.create(FeatureDetector.ORB);
                extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

                MatOfKeyPoint keyPoint = new MatOfKeyPoint();
                Mat descriptors = new Mat();

                long time = System.currentTimeMillis();
                detector.detect(target, keyPoint);
                extractor.compute(target, keyPoint, descriptors);
                Log.d("opencv", "计算关键点耗时(毫秒): " + (System.currentTimeMillis() - time) +
                        ", 关键点总数: " + keyPoint.toArray().length);

                for (KeyPoint k : keyPoint.toArray()) {
                    Core.circle(target, k.pt, 5, new Scalar(255, 0, 0));
                }

                Mat tmp = new Mat(target.cols(), target.rows(), CvType.CV_8U, new Scalar(4));
                Bitmap image = Bitmap.createBitmap(target.cols(), target.rows(), Bitmap.Config.ARGB_8888);
                Imgproc.cvtColor(target, tmp, Imgproc.COLOR_RGB2BGRA, 4);
                Utils.matToBitmap(tmp, image);
                myImageView.setImageBitmap(image);

            } catch (Exception e) {
                throw new RuntimeException(e);
            }
            break;
        default:
            super.onManagerConnected(status);
    }
}
 
Example 7
Project: fingerblox   File: ImageProcessing.java   View source code 4 votes vote down vote up
@NonNull
private Mat detectFeatures(Mat skeleton, Mat edges) {
    FeatureDetector star = FeatureDetector.create(FeatureDetector.ORB);
    DescriptorExtractor brief = DescriptorExtractor.create(DescriptorExtractor.ORB);

    MatOfKeyPoint keypoints = new MatOfKeyPoint();
    star.detect(skeleton, keypoints);
    keypointsField = keypoints;

    KeyPoint[] keypointArray = keypoints.toArray();
    ArrayList<KeyPoint> filteredKeypointArray = new ArrayList<>(keypointArray.length);

    int filterCount = 0;
    for (KeyPoint k : keypointArray) {
        if (edges.get((int)k.pt.y, (int)k.pt.x)[0] <= 0.0) {
            k.size /= 8;
            filteredKeypointArray.add(k);
        } else {
            filterCount++;
        }
    }
    Log.d(TAG, String.format("Filtered %s Keypoints", filterCount));

    keypoints.fromList(filteredKeypointArray);

    Mat descriptors = new Mat();
    brief.compute(skeleton, keypoints, descriptors);
    descriptorsField = descriptors;

    Mat results = new Mat();
    Scalar color = new Scalar(255, 0, 0); // RGB
    Features2d.drawKeypoints(skeleton, keypoints, results, color, Features2d.DRAW_RICH_KEYPOINTS);
    return results;
}
 
Example 8
Project: RobotIGS   File: ObjectDetection.java   View source code 4 votes vote down vote up
/**
 * Instantiate an object detector based on the FAST, BRIEF, and BRUTEFORCE_HAMMING algorithms
 */
public ObjectDetection() {
    detector = FeatureDetector.create(FeatureDetectorType.FAST.val());
    extractor = DescriptorExtractor.create(DescriptorExtractorType.BRIEF.val());
    matcher = DescriptorMatcher.create(DescriptorMatcherType.BRUTEFORCE_HAMMING.val());
}
 
Example 9
Project: FtcSamples   File: GripPipelineRedJewel.java   View source code 4 votes vote down vote up
/**
 * Detects groups of pixels in an image.
 * @param input The image on which to perform the find blobs.
 * @param minArea The minimum size of a blob that will be found
 * @param circularity The minimum and maximum circularity of blobs that will be found
 * @param darkBlobs The boolean that determines if light or dark blobs are found.
 * @param blobList The output where the MatOfKeyPoint is stored.
 */
private void findBlobs(Mat input, double minArea, double[] circularity,
	Boolean darkBlobs, MatOfKeyPoint blobList) {
	FeatureDetector blobDet = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
	try {
		File tempFile = File.createTempFile("config", ".xml");

		StringBuilder config = new StringBuilder();

		config.append("<?xml version=\"1.0\"?>\n");
		config.append("<opencv_storage>\n");
		config.append("<thresholdStep>10.</thresholdStep>\n");
		config.append("<minThreshold>50.</minThreshold>\n");
		config.append("<maxThreshold>220.</maxThreshold>\n");
		config.append("<minRepeatability>2</minRepeatability>\n");
		config.append("<minDistBetweenBlobs>10.</minDistBetweenBlobs>\n");
		config.append("<filterByColor>1</filterByColor>\n");
		config.append("<blobColor>");
		config.append((darkBlobs ? 0 : 255));
		config.append("</blobColor>\n");
		config.append("<filterByArea>1</filterByArea>\n");
		config.append("<minArea>");
		config.append(minArea);
		config.append("</minArea>\n");
		config.append("<maxArea>");
		config.append(Integer.MAX_VALUE);
		config.append("</maxArea>\n");
		config.append("<filterByCircularity>1</filterByCircularity>\n");
		config.append("<minCircularity>");
		config.append(circularity[0]);
		config.append("</minCircularity>\n");
		config.append("<maxCircularity>");
		config.append(circularity[1]);
		config.append("</maxCircularity>\n");
		config.append("<filterByInertia>1</filterByInertia>\n");
		config.append("<minInertiaRatio>0.1</minInertiaRatio>\n");
		config.append("<maxInertiaRatio>" + Integer.MAX_VALUE + "</maxInertiaRatio>\n");
		config.append("<filterByConvexity>1</filterByConvexity>\n");
		config.append("<minConvexity>0.95</minConvexity>\n");
		config.append("<maxConvexity>" + Integer.MAX_VALUE + "</maxConvexity>\n");
		config.append("</opencv_storage>\n");
		FileWriter writer;
		writer = new FileWriter(tempFile, false);
		writer.write(config.toString());
		writer.close();
		blobDet.read(tempFile.getPath());
	} catch (IOException e) {
		e.printStackTrace();
	}

	blobDet.detect(input, blobList);
}
 
Example 10
Project: FtcSamples   File: GripPipelineBlueJewel.java   View source code 4 votes vote down vote up
/**
 * Detects groups of pixels in an image.
 * @param input The image on which to perform the find blobs.
 * @param minArea The minimum size of a blob that will be found
 * @param circularity The minimum and maximum circularity of blobs that will be found
 * @param darkBlobs The boolean that determines if light or dark blobs are found.
 * @param blobList The output where the MatOfKeyPoint is stored.
 */
private void findBlobs(Mat input, double minArea, double[] circularity,
	Boolean darkBlobs, MatOfKeyPoint blobList) {
	FeatureDetector blobDet = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
	try {
		File tempFile = File.createTempFile("config", ".xml");

		StringBuilder config = new StringBuilder();

		config.append("<?xml version=\"1.0\"?>\n");
		config.append("<opencv_storage>\n");
		config.append("<thresholdStep>10.</thresholdStep>\n");
		config.append("<minThreshold>50.</minThreshold>\n");
		config.append("<maxThreshold>220.</maxThreshold>\n");
		config.append("<minRepeatability>2</minRepeatability>\n");
		config.append("<minDistBetweenBlobs>10.</minDistBetweenBlobs>\n");
		config.append("<filterByColor>1</filterByColor>\n");
		config.append("<blobColor>");
		config.append((darkBlobs ? 0 : 255));
		config.append("</blobColor>\n");
		config.append("<filterByArea>1</filterByArea>\n");
		config.append("<minArea>");
		config.append(minArea);
		config.append("</minArea>\n");
		config.append("<maxArea>");
		config.append(Integer.MAX_VALUE);
		config.append("</maxArea>\n");
		config.append("<filterByCircularity>1</filterByCircularity>\n");
		config.append("<minCircularity>");
		config.append(circularity[0]);
		config.append("</minCircularity>\n");
		config.append("<maxCircularity>");
		config.append(circularity[1]);
		config.append("</maxCircularity>\n");
		config.append("<filterByInertia>1</filterByInertia>\n");
		config.append("<minInertiaRatio>0.1</minInertiaRatio>\n");
		config.append("<maxInertiaRatio>" + Integer.MAX_VALUE + "</maxInertiaRatio>\n");
		config.append("<filterByConvexity>1</filterByConvexity>\n");
		config.append("<minConvexity>0.95</minConvexity>\n");
		config.append("<maxConvexity>" + Integer.MAX_VALUE + "</maxConvexity>\n");
		config.append("</opencv_storage>\n");
		FileWriter writer;
		writer = new FileWriter(tempFile, false);
		writer.write(config.toString());
		writer.close();
		blobDet.read(tempFile.getPath());
	} catch (IOException e) {
		e.printStackTrace();
	}

	blobDet.detect(input, blobList);
}
 
Example 11
Project: FtcSamples   File: GripPipeline.java   View source code 4 votes vote down vote up
/**
 * Detects groups of pixels in an image.
 * @param input The image on which to perform the find blobs.
 * @param minArea The minimum size of a blob that will be found
 * @param circularity The minimum and maximum circularity of blobs that will be found
 * @param darkBlobs The boolean that determines if light or dark blobs are found.
 * @param blobList The output where the MatOfKeyPoint is stored.
 */
private void findBlobs(Mat input, double minArea, double[] circularity,
	Boolean darkBlobs, MatOfKeyPoint blobList) {
	FeatureDetector blobDet = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
	try {
		File tempFile = File.createTempFile("config", ".xml");

		StringBuilder config = new StringBuilder();

		config.append("<?xml version=\"1.0\"?>\n");
		config.append("<opencv_storage>\n");
		config.append("<thresholdStep>10.</thresholdStep>\n");
		config.append("<minThreshold>50.</minThreshold>\n");
		config.append("<maxThreshold>220.</maxThreshold>\n");
		config.append("<minRepeatability>2</minRepeatability>\n");
		config.append("<minDistBetweenBlobs>10.</minDistBetweenBlobs>\n");
		config.append("<filterByColor>1</filterByColor>\n");
		config.append("<blobColor>");
		config.append((darkBlobs ? 0 : 255));
		config.append("</blobColor>\n");
		config.append("<filterByArea>1</filterByArea>\n");
		config.append("<minArea>");
		config.append(minArea);
		config.append("</minArea>\n");
		config.append("<maxArea>");
		config.append(Integer.MAX_VALUE);
		config.append("</maxArea>\n");
		config.append("<filterByCircularity>1</filterByCircularity>\n");
		config.append("<minCircularity>");
		config.append(circularity[0]);
		config.append("</minCircularity>\n");
		config.append("<maxCircularity>");
		config.append(circularity[1]);
		config.append("</maxCircularity>\n");
		config.append("<filterByInertia>1</filterByInertia>\n");
		config.append("<minInertiaRatio>0.1</minInertiaRatio>\n");
		config.append("<maxInertiaRatio>" + Integer.MAX_VALUE + "</maxInertiaRatio>\n");
		config.append("<filterByConvexity>1</filterByConvexity>\n");
		config.append("<minConvexity>0.95</minConvexity>\n");
		config.append("<maxConvexity>" + Integer.MAX_VALUE + "</maxConvexity>\n");
		config.append("</opencv_storage>\n");
		FileWriter writer;
		writer = new FileWriter(tempFile, false);
		writer.write(config.toString());
		writer.close();
		blobDet.read(tempFile.getPath());
	} catch (IOException e) {
		e.printStackTrace();
	}

	blobDet.detect(input, blobList);
}
 
Example 12
Project: OpencvAndroid   File: MainActivity.java   View source code 4 votes vote down vote up
private void initializeOpenCVDependencies() throws IOException {
    mOpenCvCameraView.enableView();
    detector = FeatureDetector.create(FeatureDetector.ORB);
    descriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
    matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    img1 = new Mat();
    AssetManager assetManager = getAssets();
    InputStream istr = assetManager.open("a.jpeg");
    Bitmap bitmap = BitmapFactory.decodeStream(istr);
    Utils.bitmapToMat(bitmap, img1);
    Imgproc.cvtColor(img1, img1, Imgproc.COLOR_RGB2GRAY);
    img1.convertTo(img1, 0); //converting the image to match with the type of the cameras image
    descriptors1 = new Mat();
    keypoints1 = new MatOfKeyPoint();
    detector.detect(img1, keypoints1);
    descriptor.compute(img1, keypoints1, descriptors1);

}
 
Example 13
Project: liastem   File: Grip1 copy.java   View source code 4 votes vote down vote up
/**
 * Detects groups of pixels in an image.
 * @param input The image on which to perform the find blobs.
 * @param minArea The minimum size of a blob that will be found
 * @param circularity The minimum and maximum circularity of blobs that will be found
 * @param darkBlobs The boolean that determines if light or dark blobs are found.
 * @param blobList The output where the MatOfKeyPoint is stored.
 */
private void findBlobs(Mat input, double minArea, double[] circularity,
	Boolean darkBlobs, MatOfKeyPoint blobList) {
	FeatureDetector blobDet = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
	try {
		File tempFile = File.createTempFile("config", ".xml");

		StringBuilder config = new StringBuilder();

		config.append("<?xml version=\"1.0\"?>\n");
		config.append("<opencv_storage>\n");
		config.append("<thresholdStep>10.</thresholdStep>\n");
		config.append("<minThreshold>50.</minThreshold>\n");
		config.append("<maxThreshold>220.</maxThreshold>\n");
		config.append("<minRepeatability>2</minRepeatability>\n");
		config.append("<minDistBetweenBlobs>10.</minDistBetweenBlobs>\n");
		config.append("<filterByColor>1</filterByColor>\n");
		config.append("<blobColor>");
		config.append((darkBlobs ? 0 : 255));
		config.append("</blobColor>\n");
		config.append("<filterByArea>1</filterByArea>\n");
		config.append("<minArea>");
		config.append(minArea);
		config.append("</minArea>\n");
		config.append("<maxArea>");
		config.append(Integer.MAX_VALUE);
		config.append("</maxArea>\n");
		config.append("<filterByCircularity>1</filterByCircularity>\n");
		config.append("<minCircularity>");
		config.append(circularity[0]);
		config.append("</minCircularity>\n");
		config.append("<maxCircularity>");
		config.append(circularity[1]);
		config.append("</maxCircularity>\n");
		config.append("<filterByInertia>1</filterByInertia>\n");
		config.append("<minInertiaRatio>0.1</minInertiaRatio>\n");
		config.append("<maxInertiaRatio>" + Integer.MAX_VALUE + "</maxInertiaRatio>\n");
		config.append("<filterByConvexity>1</filterByConvexity>\n");
		config.append("<minConvexity>0.95</minConvexity>\n");
		config.append("<maxConvexity>" + Integer.MAX_VALUE + "</maxConvexity>\n");
		config.append("</opencv_storage>\n");
		FileWriter writer;
		writer = new FileWriter(tempFile, false);
		writer.write(config.toString());
		writer.close();
		blobDet.read(tempFile.getPath());
	} catch (IOException e) {
		e.printStackTrace();
	}

	blobDet.detect(input, blobList);
}
 
Example 14
Project: 2016-FTC   File: ObjectDetection.java   View source code 4 votes vote down vote up
/**
 * Instantiate an object detector based on the FAST, BRIEF, and BRUTEFORCE_HAMMING algorithms
 */
public ObjectDetection() {
    detector = FeatureDetector.create(FeatureDetectorType.FAST.val());
    extractor = DescriptorExtractor.create(DescriptorExtractorType.BRIEF.val());
    matcher = DescriptorMatcher.create(DescriptorMatcherType.BRUTEFORCE_HAMMING.val());
}
 
Example 15
Project: ImageDetectionCordovaPlugin   File: ImageDetectionPlugin.java   View source code 4 votes vote down vote up
@Override
public void surfaceCreated(SurfaceHolder holder) {
    matches = new MatOfDMatch();
    orbDetector = FeatureDetector.create(FeatureDetector.ORB);
    orbDescriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
    kp2 = new MatOfKeyPoint();
    desc2 = new Mat();
}
 
Example 16
Project: OpenCVTour   File: ImageDetector.java   View source code 4 votes vote down vote up
public ImageDetector() {
	this(FeatureDetector.ORB, DescriptorExtractor.ORB, DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
}
 
Example 17
Project: OpenCVTour   File: ImageDetector.java   View source code 4 votes vote down vote up
public ImageDetector(int detector_type, int extractor_type, int matcher_type)
{
	fDetector = FeatureDetector.create
			(detector_type);
	dExtractor = DescriptorExtractor.create
			(extractor_type);
	dMatcher= DescriptorMatcher.create
			(matcher_type);
	training_library= new ArrayList<TrainingImage>();
	
	// Specific values selected after experimenting with different data sets
	max_side = 300;
	filter_ratio = 5;
	distance_bound = 50;
}
 
Example 18
Project: FTCVision   File: ObjectDetection.java   View source code 4 votes vote down vote up
/**
 * Instantiate an object detector based on the FAST, BRIEF, and BRUTEFORCE_HAMMING algorithms
 */
public ObjectDetection() {
    detector = FeatureDetector.create(FeatureDetectorType.FAST.val());
    extractor = DescriptorExtractor.create(DescriptorExtractorType.BRIEF.val());
    matcher = DescriptorMatcher.create(DescriptorMatcherType.BRUTEFORCE_HAMMING.val());
}
 
Example 19
Project: near-image-replica-detection   File: KeypointExtractor.java   View source code 4 votes vote down vote up
private static void detectKeypoints(Mat img, FeatureDetector detector, MatOfKeyPoint points) {
	detector.detect(img, points);
}
 
Example 20
Project: OpenCV-BackProjection   File: BackProjectionActivity.java   View source code 4 votes vote down vote up
private void testMatSerialization(){
	File storage = Environment.getExternalStorageDirectory();
	String path = storage.getAbsolutePath()+"/opencv/file.bin";
	
	FeatureDetector detector = FeatureDetector.create(FeatureDetector.GRID_ORB);
	DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB);
	
	MatOfKeyPoint kpts = new MatOfKeyPoint();
	detector.detect(mRgba, kpts);
	
	Mat descriptors = new Mat();
	extractor.compute(mGray, kpts, descriptors);
	
	Log.d(TAG, "test - descriptors "+descriptors);			
			
	UtilsOpenCV.matToJson(descriptors);
	
	//UtilsOpenCV.matStore(path, descriptors);
	
	// UtilsOpenCV.matRetrieve(path, rows, cols, type);
	
}
 
Example 21
Project: StormCV   File: E3_MultipleFeaturesTopology.java   View source code 4 votes vote down vote up
public static void main(String[] args){
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();

	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(8); // number of workers in the topology
	conf.setMaxSpoutPending(32); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	
	String userDir = System.getProperty("user.dir").replaceAll("\\\\", "/");
	// create a list with files to be processed, in this case just one. Multiple files will be spread over the available spouts
	List<String> files = new ArrayList<String>();
	files.add( "file://"+ userDir + "/resources/data/" );

	int frameSkip = 13; 
	
	// now create the topology itself (spout -> scale -> {face detection, sift} -> drawer -> streamer)
	TopologyBuilder builder = new TopologyBuilder();
	 // just one spout reading video files, extracting 1 frame out of 25 (i.e. 1 per second)
	builder.setSpout("spout", new CVParticleSpout( new FileFrameFetcher(files).frameSkip(frameSkip) ), 1 );
	
	// add bolt that scales frames down to 25% of the original size 
	builder.setBolt("scale", new SingleInputBolt( new ScaleImageOp(0.25f)), 1)
		.shuffleGrouping("spout");
	
	// one bolt with a HaarCascade classifier detecting faces. This operation outputs a Frame including the Features with detected faces.
	// the xml file must be present on the classpath!
	builder.setBolt("face", new SingleInputBolt( new HaarCascadeOp("face", "lbpcascade_frontalface.xml").outputFrame(true)), 1)
		.shuffleGrouping("scale");
	
	// add a bolt that performs SIFT keypoint extraction
	builder.setBolt("sift", new SingleInputBolt( new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT).outputFrame(false)), 2)
		.shuffleGrouping("scale");
	
	// Batch bolt that waits for input from both the face and sift detection bolts and combines them in a single frame object
	builder.setBolt("combiner", new BatchInputBolt(new SequenceNrBatcher(2), new FeatureCombinerOp()), 1)
		.fieldsGrouping("sift", new Fields(FrameSerializer.STREAMID))
		.fieldsGrouping("face", new Fields(FrameSerializer.STREAMID));
	
	// simple bolt that draws Features (i.e. locations of features) into the frame
	builder.setBolt("drawer", new SingleInputBolt(new DrawFeaturesOp()), 1)
		.shuffleGrouping("combiner");
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("drawer");

	// NOTE: if the topology is started (locally) go to http://localhost:8558/streaming/tiles and click the image to see the stream!
	
	try {
		
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "multifeature", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		
		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
Example 22
Project: StormCV   File: E8_DRPCTopology.java   View source code 4 votes vote down vote up
public static void main(String[] args){
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();

	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
			
	conf.setNumWorkers(5); // number of workers in the topology
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	conf.put(Config.NIMBUS_TASK_LAUNCH_SECS, 30);
	
	String userDir = System.getProperty("user.dir").replaceAll("\\\\", "/");
			
	List<String> prototypes = new ArrayList<String>();
	prototypes.add( "file://"+ userDir +"/resources/data" );
	
	// create a linear DRPC builder called 'match'
	LinearDRPCTopologyBuilder builder = new LinearDRPCTopologyBuilder("match");

	//add a FeatureMatchRequestOp that receives drpc requests
	builder.addBolt(new RequestBolt(new FeatureMatchRequestOp()), 1); 
	
	 // add two bolts that perform sift extraction (as used in other examples!)
	builder.addBolt(new SingleInputBolt(
		new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT).outputFrame(false)
		), 1).shuffleGrouping();

	// add bolt that matches queries it gets with the prototypes it has loaded upon the prepare.
	// The prototypes are divided over the available tasks which means that each query has to be send to all tasks (use allGrouping)
	// the matcher only reports a match if at least 1 strong match has been found (can be set to 0)
	builder.addBolt(new SingleInputBolt(new PartialMatcher(prototypes, 0, 0.5f)), 2).allGrouping(); 
	
	// add a bolt that aggregates all the results it gets from the two matchers 
	builder.addBolt(new BatchBolt(new FeatureMatchResultOp(true)), 1).fieldsGrouping(new Fields(CVParticleSerializer.REQUESTID));
	
	// create local drpc server and cluster. Deploy the drpc topology on the cluster
	LocalDRPC drpc = new LocalDRPC();
	LocalCluster cluster = new LocalCluster();
	cluster.submitTopology("drpc-demo", conf, builder.createLocalTopology(drpc));
	
	// use all face images as queries (same images as loaded by the matcher!)
	File queryDir = new File(userDir +"/resources/data/");
	for(String img : queryDir.list()){
		if(!img.endsWith(".jpg")) continue; // to avoid reading non-image files
		// execute the drpc with the image as argument. Note that the execute blocks
		String matchesJson = drpc.execute("match", "file://"+userDir +"/resources/data/"+img);
		System.out.println(img+" : " + matchesJson);
	}
		
	cluster.shutdown();
	drpc.shutdown();
}
 
Example 23
Project: StormCV   File: E4_SequentialFeaturesTopology.java   View source code 4 votes vote down vote up
public static void main(String[] args){
	
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();
	
	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(8); // number of workers in the topology
	conf.setMaxSpoutPending(6); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	
	String userDir = System.getProperty("user.dir").replaceAll("\\\\", "/");
	// create a list with files to be processed, in this case just one. Multiple files will be spread over the available spouts
	List<String> files = new ArrayList<String>();
	files.add( "file://"+ userDir+"/resources/data/"); // will process all video files in this directory (i.e. two files)

	// specify the list with SingleInputOperations to be executed sequentially by the 'fat' bolt
	@SuppressWarnings("rawtypes")
	List<ISingleInputOperation> operations = new ArrayList<ISingleInputOperation>();
	operations.add(new HaarCascadeOp("face", "lbpcascade_frontalface.xml") );
	operations.add(new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT));
	
	int frameSkip = 13; 
	
	// now create the topology itself (spout -> scale -> fat[face detection & sift] -> drawer -> streamer)
	TopologyBuilder builder = new TopologyBuilder();
	 // just one spout reading video files, extracting 1 frame out of 25 (i.e. 1 per second)
	builder.setSpout("spout", new CVParticleSpout( new FileFrameFetcher(files).frameSkip(frameSkip) ), 1 );
	
	// add bolt that scales frames down to 25% of the original size 
	builder.setBolt("scale", new SingleInputBolt( new ScaleImageOp(0.25f)), 1)
		.shuffleGrouping("spout");
	
	// three 'fat' bolts containing a SequentialFrameOperation will will emit a Frame object containing the detected features
	builder.setBolt("fat_features", new SingleInputBolt( new SequentialFrameOp(operations).outputFrame(true).retainImage(true)), 3)
		.shuffleGrouping("scale");
	
	// simple bolt that draws Features (i.e. locations of features) into the frame
	builder.setBolt("drawer", new SingleInputBolt(new DrawFeaturesOp()), 1)
		.shuffleGrouping("fat_features");
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("drawer");
	
	try {
		
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "fatfeature", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run for two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		
		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
Example 24
Project: StormCV   File: E7_FetchOperateCombiTopology.java   View source code 4 votes vote down vote up
public static void main(String[] args){
	// first some global (topology configuration)
	StormCVConfig conf = new StormCVConfig();
	
	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(3); // number of workers in the topology
	conf.setMaxSpoutPending(32); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib"); // sets the opencv lib to be used by all OpenCVOperation implementing operations
	
	List<String> urls = new ArrayList<String>();
	urls.add( "rtsp://streaming3.webcam.nl:1935/n224/n224.stream" );
	urls.add("rtsp://streaming3.webcam.nl:1935/n233/n233.stream");

	@SuppressWarnings("rawtypes")
	List<ISingleInputOperation> operations = new ArrayList<ISingleInputOperation>();
	operations.add(new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT).outputFrame(true));
	operations.add(new DrawFeaturesOp() );
	 
	int frameSkip = 13; 
	
	TopologyBuilder builder = new TopologyBuilder();
	builder.setSpout("spout", new CVParticleSpout( 
			new FetchAndOperateFetcher( // use a meta fetcher to combine a Fetcher and Operation
					new StreamFrameFetcher(urls).frameSkip(frameSkip), // use a normal fetcher to get video frames from streams
					new SequentialFrameOp(operations).outputFrame(true).retainImage(true) // use a sequential operation to execute a number of operations on frames
				)
			) , 2 );
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("spout");
	
	try {
		
		// run in local mode
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "sequential_spout", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		
		// run on a storm cluster
		// StormSubmitter.submitTopology("some_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
Example 25
Project: StormCV   File: DeploymentTopology.java   View source code 4 votes vote down vote up
public static void main(String[] args){
	
	// first some global (topology) configuration
	StormCVConfig conf = new StormCVConfig();
	
	/**
	 * Sets the OpenCV library to be used which depends on the system the topology is being executed on
	 */
	//conf.put(StormCVConfig.STORMCV_OPENCV_LIB, "mac64_opencv_java248.dylib");
	
	conf.setNumWorkers(4); // number of workers in the topology
	conf.setMaxSpoutPending(20); // maximum un-acked/un-failed frames per spout (spout blocks if this number is reached)
	conf.put(StormCVConfig.STORMCV_FRAME_ENCODING, Frame.JPG_IMAGE); // indicates frames will be encoded as JPG throughout the topology (JPG is the default when not explicitly set)
	conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, true); // True if Storm should timeout messages or not.
	conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS , 10); // The maximum amount of time given to the topology to fully process a message emitted by a spout (default = 30)
	conf.put(StormCVConfig.STORMCV_SPOUT_FAULTTOLERANT, false); // indicates if the spout must be fault tolerant; i.e. spouts do NOT! replay tuples on fail
	conf.put(StormCVConfig.STORMCV_CACHES_TIMEOUT_SEC, 30); // TTL (seconds) for all elements in all caches throughout the topology (avoids memory overload)
	
	List<String> urls = new ArrayList<String>();
	urls.add( "rtsp://streaming3.webcam.nl:1935/n224/n224.stream" );
	urls.add("rtsp://streaming3.webcam.nl:1935/n233/n233.stream");
	urls.add("rtsp://streaming3.webcam.nl:1935/n302/n302.stream"); 
	urls.add("rtsp://streaming3.webcam.nl:1935/n346/n346.stream");
	urls.add("rtsp://streaming3.webcam.nl:1935/n319/n319.stream"); 
	urls.add("rtsp://streaming3.webcam.nl:1935/n794b/n794b.stream"); 

	int frameSkip = 13;
	
	// specify the list with SingleInputOperations to be executed sequentially by the 'fat' bolt
	@SuppressWarnings("rawtypes")
	List<ISingleInputOperation> operations = new ArrayList<ISingleInputOperation>();
	operations.add(new ScaleImageOp(0.5f) );
	operations.add(new FeatureExtractionOp("sift", FeatureDetector.SIFT, DescriptorExtractor.SIFT));
	operations.add(new FeatureExtractionOp("surf", FeatureDetector.SURF, DescriptorExtractor.SURF));
	operations.add(new DrawFeaturesOp());
	
	// now create the topology itself (spout -> background subtraction --> streamer)
	TopologyBuilder builder = new TopologyBuilder();
			
	// number of tasks must match the number of urls!
	builder.setSpout("spout", new CVParticleSpout( new StreamFrameFetcher(urls).frameSkip(frameSkip) ), 1 ).setNumTasks(6);
	
	// three 'fat' bolts containing a SequentialFrameOperation will will emit a Frame object containing the detected features
	builder.setBolt("features", new SingleInputBolt( new SequentialFrameOp(operations).outputFrame(true).retainImage(true)), 2)
		.shuffleGrouping("spout");
	
	// add bolt that creates a webservice on port 8558 enabling users to view the result
	builder.setBolt("streamer", new BatchInputBolt(
			new SlidingWindowBatcher(2, frameSkip).maxSize(6), // note the required batcher used as a buffer and maintains the order of the frames
			new MjpegStreamingOp().port(8558).framerate(5)).groupBy(new Fields(FrameSerializer.STREAMID))
		, 1)
		.shuffleGrouping("features");
	
	try {
		
		// run in local mode
		/*
		LocalCluster cluster = new LocalCluster();
		cluster.submitTopology( "deployment_Test", conf, builder.createTopology() );
		Utils.sleep(120*1000); // run for two minutes and then kill the topology
		cluster.shutdown();
		System.exit(1);
		*/
		// run on a storm cluster
		StormSubmitter.submitTopology("Your_topology_name", conf, builder.createTopology());
	} catch (Exception e){
		e.printStackTrace();
	}
}
 
Example 26
Project: StormCV   File: FeatureExtractionOp.java   View source code 4 votes vote down vote up
@Override
public List<CVParticle> execute(CVParticle particle) throws Exception {
	List<CVParticle> result = new ArrayList<CVParticle>();
	if(!(particle instanceof Frame)) return result;
	
	Frame frame = (Frame)particle;
	if(frame.getImageType().equals(Frame.NO_IMAGE)) return result;
	try{
		MatOfByte mob = new MatOfByte(frame.getImageBytes());
		Mat image = Highgui.imdecode(mob, Highgui.CV_LOAD_IMAGE_ANYCOLOR);
		
		FeatureDetector siftDetector = FeatureDetector.create(detectorType);
		MatOfKeyPoint mokp = new MatOfKeyPoint();
		siftDetector.detect(image, mokp);
		List<KeyPoint> keypoints = mokp.toList();
		
		Mat descriptors = new Mat();
		DescriptorExtractor extractor = DescriptorExtractor.create(descriptorType);
		extractor.compute(image, mokp, descriptors);
		List<Descriptor> descrList = new ArrayList<Descriptor>();
		float[] tmp = new float[1];
		for(int r=0; r<descriptors.rows(); r++){
			float[] values = new float[descriptors.cols()];
			for(int c=0; c<descriptors.cols(); c++){
				descriptors.get(r, c, tmp);
				values[c] = tmp[0];
			}
			descrList.add(new Descriptor(frame.getStreamId(), frame.getSequenceNr(), new Rectangle((int)keypoints.get(r).pt.x, (int)keypoints.get(r).pt.y, 0, 0), 0, values));
		}
		
		Feature feature = new Feature(frame.getStreamId(), frame.getSequenceNr(), featureName, 0, descrList, null);
		if(outputFrame){
			frame.getFeatures().add(feature);
			result.add(frame);
		}else{
			result.add(feature);
		}		
	}catch(Exception e){
		// catching exception at this point will prevent the sent of a fail! 
		logger.warn("Unable to extract features for frame!", e);
	}
	return result;
}
 
Example 27
Project: enchantment   File: ImageMatcher.java   View source code 4 votes vote down vote up
public static FeatureData detectFeatures(Mat mat) {
	FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB);
	DescriptorExtractor extractor = DescriptorExtractor
			.create(DescriptorExtractor.ORB);
	MatOfKeyPoint keypoints = new MatOfKeyPoint();
	Mat descriptors = new Mat();
	detector.detect(mat, keypoints);
	extractor.compute(mat, keypoints, descriptors);
	return new FeatureData(keypoints, descriptors);
}
 
Example 28
Project: FTC2016   File: ObjectDetection.java   View source code 3 votes vote down vote up
/**
 * Instantiate an object detector based on the FAST, BRIEF, and BRUTEFORCE_HAMMING algorithms
 */
public ObjectDetection() {
    detector = FeatureDetector.create(FeatureDetectorType.FAST.val());
    extractor = DescriptorExtractor.create(DescriptorExtractorType.BRIEF.val());
    matcher = DescriptorMatcher.create(DescriptorMatcherType.BRUTEFORCE_HAMMING.val());
}
 
Example 29
Project: FTC2016   File: ObjectDetection.java   View source code 3 votes vote down vote up
/**
 * Instantiate an object detector based on custom algorithms
 *
 * @param detector  Keypoint detection algorithm
 * @param extractor Keypoint descriptor extractor
 * @param matcher   Descriptor matcher
 */
public ObjectDetection(FeatureDetectorType detector, DescriptorExtractorType extractor, DescriptorMatcherType matcher) {
    this.detector = FeatureDetector.create(detector.val());
    this.extractor = DescriptorExtractor.create(extractor.val());
    this.matcher = DescriptorMatcher.create(matcher.val());
}
 
Example 30
Project: RobotIGS   File: ObjectDetection.java   View source code 2 votes vote down vote up
/**
 * Instantiate an object detector based on custom algorithms
 *
 * @param detector  Keypoint detection algorithm
 * @param extractor Keypoint descriptor extractor
 * @param matcher   Descriptor matcher
 */
public ObjectDetection(FeatureDetectorType detector, DescriptorExtractorType extractor, DescriptorMatcherType matcher) {
    this.detector = FeatureDetector.create(detector.val());
    this.extractor = DescriptorExtractor.create(extractor.val());
    this.matcher = DescriptorMatcher.create(matcher.val());
}
 
Example 31
Project: 2016-FTC   File: ObjectDetection.java   View source code 2 votes vote down vote up
/**
 * Instantiate an object detector based on custom algorithms
 *
 * @param detector  Keypoint detection algorithm
 * @param extractor Keypoint descriptor extractor
 * @param matcher   Descriptor matcher
 */
public ObjectDetection(FeatureDetectorType detector, DescriptorExtractorType extractor, DescriptorMatcherType matcher) {
    this.detector = FeatureDetector.create(detector.val());
    this.extractor = DescriptorExtractor.create(extractor.val());
    this.matcher = DescriptorMatcher.create(matcher.val());
}
 
Example 32
Project: FTCVision   File: ObjectDetection.java   View source code 2 votes vote down vote up
/**
 * Instantiate an object detector based on custom algorithms
 *
 * @param detector  Keypoint detection algorithm
 * @param extractor Keypoint descriptor extractor
 * @param matcher   Descriptor matcher
 */
public ObjectDetection(FeatureDetectorType detector, DescriptorExtractorType extractor, DescriptorMatcherType matcher) {
    this.detector = FeatureDetector.create(detector.val());
    this.extractor = DescriptorExtractor.create(extractor.val());
    this.matcher = DescriptorMatcher.create(matcher.val());
}