Python cv2.meanShift() Examples

The following are 3 code examples of cv2.meanShift(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module cv2 , or try the search function .
Example #1
Source File: tracking.py    From OpenCV-Computer-Vision-Projects-with-Python with MIT License 5 votes vote down vote up
def _append_boxes_from_meanshift(self, frame, box_all):
        """Adds to the list all bounding boxes found with mean-shift tracking

            Mean-shift tracking is used to track objects from frame to frame.
            This information is combined with a saliency map to discard
            false-positives and focus only on relevant objects that move.

            :param frame: current RGB image frame
            :box_all: append bounding boxes from tracking to this list
            :returns: new list of all collected bounding boxes
        """
        hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)

        for i in xrange(len(self.object_roi)):
            roi_hist = copy.deepcopy(self.object_roi[i])
            box_old = copy.deepcopy(self.object_box[i])

            dst = cv2.calcBackProject([hsv], [0], roi_hist, [0, 180], 1)
            ret, box_new = cv2.meanShift(dst, tuple(box_old), self.term_crit)
            self.object_box[i] = copy.deepcopy(box_new)

            # discard boxes that don't move
            (xo, yo, wo, ho) = box_old
            (xn, yn, wn, hn) = box_new

            co = [xo + wo/2, yo + ho/2]
            cn = [xn + wn/2, yn + hn/2]
            if (co[0]-cn[0])**2 + (co[1]-cn[1])**2 >= self.min_shift2:
                box_all.append(box_new)

        return box_all 
Example #2
Source File: test_monkey.py    From ATX with Apache License 2.0 5 votes vote down vote up
def test_features():
    from atx.drivers.android_minicap import AndroidDeviceMinicap
    cv2.namedWindow("preview")
    d = AndroidDeviceMinicap()

    # r, h, c, w = 200, 100, 200, 100
    # track_window = (c, r, w, h)
    # oldimg = cv2.imread('base1.png')
    # roi = oldimg[r:r+h, c:c+w]
    # hsv_roi = cv2.cvtColor(roi, cv2.COLOR_BGR2HSV)
    # mask = cv2.inRange(hsv_roi, 0, 255)
    # roi_hist = cv2.calcHist([hsv_roi], [0], mask, [180], [0,180])
    # cv2.normalize(roi_hist, roi_hist, 0, 255, cv2.NORM_MINMAX)
    # term_cirt = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT,  10, 1)


    while True:
        try:
            w, h = d._screen.shape[:2]
            img = cv2.resize(d._screen, (h/2, w/2))
            cv2.imshow('preview', img)

            hist = cv2.calcHist([img], [0], None, [256], [0,256])
            plt.plot(plt.hist(hist.ravel(), 256))
            plt.show()
            # if img.shape == oldimg.shape:
            #     # hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
            #     # ret, track_window = cv2.meanShift(hsv, track_window, term_cirt)
            #     # x, y, w, h = track_window
            #     cv2.rectangle(img, (x, y), (x+w, y+h), 255, 2)
            #     cv2.imshow('preview', img)
            # # cv2.imshow('preview', img)
            cv2.waitKey(1)
        except KeyboardInterrupt:
            break

    cv2.destroyWindow('preview') 
Example #3
Source File: object-tracking.py    From rpi-opencv with GNU General Public License v3.0 4 votes vote down vote up
def run_main():
    cap = cv2.VideoCapture('crash-480.mp4')
    #cap.set(3,320)
    #cap.set(4,240)

    # Read the first frame of the video
    ret, frame = cap.read()

    # Set the ROI (Region of Interest). Actually, this is a
    # rectangle of the building that we're tracking
    c,r,w,h = 427,240,50,50
    track_window = (c,r,w,h)

    # Create mask and normalized histogram
    roi = frame[r:r+h, c:c+w]
    hsv_roi = cv2.cvtColor(roi, cv2.COLOR_BGR2HSV)
    mask = cv2.inRange(hsv_roi, np.array((0., 30.,32.)), np.array((180.,255.,255.)))
    roi_hist = cv2.calcHist([hsv_roi], [0], mask, [180], [0, 180])
    cv2.normalize(roi_hist, roi_hist, 0, 255, cv2.NORM_MINMAX)
    term_crit = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 80, 1)
    
    while True:
        t = cv2.getTickCount()
        ret, frame = cap.read()

        hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
        dst = cv2.calcBackProject([hsv], [0], roi_hist, [0,180], 1)

        ret, track_window = cv2.meanShift(dst, track_window, term_crit)

        x,y,w,h = track_window
        cv2.rectangle(frame, (x,y), (x+w,y+h), 255, 2)
        cv2.putText(frame, 'Tracked', (x-25,y-10), cv2.FONT_HERSHEY_SIMPLEX,
            .5, (255,255,255), 1, cv2.CV_AA)
        
        t = cv2.getTickCount() - t
        print "detection time = %gms" % (t/(cv2.getTickFrequency()*1000.))
        cv2.imshow('Tracking', frame)

        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

    cap.release()
    cv2.destroyAllWindows()