使用多线程应用2个摄像机的CRST对象跟踪
我试图通过多线程使用2个摄像机应用CRST对象跟踪。 2个相机将直接连接在我的笔记本电脑USB端口中。 和2个摄像机将实时跟踪对象。
第一个功能是“ Object_tracking_1”,第二个功能是“ Object_tracking_2”。 我尝试将其与多线程一起使用。
如果我执行任何摄像机之一,它可以很好地工作。 (Camid:0(LabTop网络摄像头),1(Camera1),2(Camera2))
我的问题:
- 连接相机之一
我的笔记本电脑网络摄像头 +可以执行的 。 但是,当一个使用适当的对象跟踪执行时, 另一个不起作用。 我的意思是我看到了选择框区域的窗口图,但是我无法应用对象跟踪。
换句话说,两者都可以执行,但一个人不适用于对象跟踪。
- 2摄像机(主要目标)
当我执行它时,它是有效的,但是绘制的窗口显示了2个相机的视图非常快速,替代。
我收到了此错误消息:
Exception in thread Thread-8:
Traceback (most recent call last):
File "C:\ProgramData\Anaconda3\lib\threading.py", line 973, in _bootstrap_inner
self.run()
File "C:\ProgramData\Anaconda3\lib\threading.py", line 910, in run
self._target(*self._args, **self._kwargs)
File "C:\Users\user\AppData\Local\Temp/ipykernel_18232/2959108515.py", line 59, in object_tracking_1
cv2.error: OpenCV(3.4.11) C:\Users\appveyor\AppData\Local\Temp\1\pip-req-build-neg5amx3\opencv\modules\core\src\dxt.cpp:3335: error: (-215:Assertion failed) type == CV_32FC1 || type == CV_32FC2 || type == CV_64FC1 || type == CV_64FC2 in function 'cv::dft'
在Internet上,它说它是总线的问题,并与USB寻址,但是如果我一一使用它,它可以很好地工作。
代码:
```
import cv2
import sys
import threading
(major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.')
def object_tracking_1():
if __name__ == '__main__' :
# Set up tracker.
# Instead of MIL, you can also use
tracker_types = ['BOOSTING', 'MIL','KCF', 'TLD', 'MEDIANFLOW', 'GOTURN', 'MOSSE', 'CSRT']
tracker_type = tracker_types[7]
if int(minor_ver) < 3:
tracker = cv2.Tracker_create(tracker_type)
else:
if tracker_type == 'BOOSTING':
tracker = cv2.TrackerBoosting_create()
if tracker_type == 'MIL':
tracker = cv2.TrackerMIL_create()
if tracker_type == 'KCF':
tracker = cv2.TrackerKCF_create()
if tracker_type == 'TLD':
tracker = cv2.TrackerTLD_create()
if tracker_type == 'MEDIANFLOW':
tracker = cv2.TrackerMedianFlow_create()
if tracker_type == 'GOTURN':
tracker = cv2.TrackerGOTURN_create()
if tracker_type == 'MOSSE':
tracker = cv2.TrackerMOSSE_create()
if tracker_type == "CSRT":
tracker = cv2.TrackerCSRT_create()
# Read video
video = cv2.VideoCapture(1) ######################################################
#video = cv2.VideoCapture("video15.mp4")
# Exit if video not opened.
if not video.isOpened():
print ("Could not open video")
sys.exit()
# Read first frame.
ok, frame = video.read()
if not ok:
print ('Cannot read video file')
sys.exit()
# Define an initial bounding box
bbox = (287, 23, 86, 320)
# Uncomment the line below to select a different bounding box
bbox = cv2.selectROI(frame, False)
# Initialize tracker with first frame and bounding box
ok = tracker.init(frame, bbox)
while True:
# Read a new frame
ok, frame = video.read()
if not ok:
break
# Start timer
timer = cv2.getTickCount()
# Update tracker
ok, bbox = tracker.update(frame)
# Calculate Frames per second (FPS)
fps = cv2.getTickFrequency() / (cv2.getTickCount() - timer);
# Draw bounding box
if ok:
# Tracking success
p1 = (int(bbox[0]), int(bbox[1])) # bbox[0]=x value, bbox[1]=y value
p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) # x2, y2
cv2.rectangle(frame, p1, p2, (255,0,0), 2, 1)
#print(bbox)
List_x1 = (bbox[0]+bbox[2])/2
List_y1 = (bbox[1]+bbox[3])/2
RList_x1 = round(List_x1, 2)
RList_y1 = round(List_y1, 2)
Coordinate_x1 = str(RList_x1)
Coordinate_y1 = str(RList_y1)
cv2.putText(frame, "x Coordinate : "+ Coordinate_x1 , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
cv2.putText(frame, "y Coordinate : "+ Coordinate_y1 , (100,105), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# print('x1=' , RList_x1 , 'y2=' , RList_y1 )
# X = list(map(str,bbox))
# print(X)
# cv2.putText(frame, ", ".join(X) , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
else :
# Tracking failure
cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# Display tracker type on frame
cv2.putText(frame, tracker_type + " Tracker", (100,20), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50),2);
# Display FPS on frame
cv2.putText(frame, "FPS : " + str(int(fps)), (100,50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50), 2);
# Display result
cv2.imshow("Tracking", frame)
# Exit if ESC pressed
k = cv2.waitKey(1) & 0xff
if k == 27 : break
def object_tracking_2():
if __name__ == '__main__' :
# Set up tracker.
# Instead of MIL, you can also use
tracker_types = ['BOOSTING', 'MIL','KCF', 'TLD', 'MEDIANFLOW', 'GOTURN', 'MOSSE', 'CSRT']
tracker_type = tracker_types[7]
if int(minor_ver) < 3:
tracker = cv2.Tracker_create(tracker_type)
else:
if tracker_type == 'BOOSTING':
tracker = cv2.TrackerBoosting_create()
if tracker_type == 'MIL':
tracker = cv2.TrackerMIL_create()
if tracker_type == 'KCF':
tracker = cv2.TrackerKCF_create()
if tracker_type == 'TLD':
tracker = cv2.TrackerTLD_create()
if tracker_type == 'MEDIANFLOW':
tracker = cv2.TrackerMedianFlow_create()
if tracker_type == 'GOTURN':
tracker = cv2.TrackerGOTURN_create()
if tracker_type == 'MOSSE':
tracker = cv2.TrackerMOSSE_create()
if tracker_type == "CSRT":
tracker = cv2.TrackerCSRT_create()
# Read video
video = cv2.VideoCapture(2) ##################################################
#video = cv2.VideoCapture("video15.mp4")
# Exit if video not opened.
if not video.isOpened():
print ("Could not open video")
sys.exit()
# Read first frame.
ok, frame = video.read()
if not ok:
print ('Cannot read video file')
sys.exit()
# Define an initial bounding box
bbox = (287, 23, 86, 320)
# Uncomment the line below to select a different bounding box
bbox = cv2.selectROI(frame, False)
# Initialize tracker with first frame and bounding box
ok = tracker.init(frame, bbox)
while True:
# Read a new frame
ok, frame = video.read()
if not ok:
break
# Start timer
timer = cv2.getTickCount()
# Update tracker
ok, bbox = tracker.update(frame)
# Calculate Frames per second (FPS)
fps = cv2.getTickFrequency() / (cv2.getTickCount() - timer);
# Draw bounding box
if ok:
# Tracking success
p1 = (int(bbox[0]), int(bbox[1])) # bbox[0]=x value, bbox[1]=y value
p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) # x2, y2
cv2.rectangle(frame, p1, p2, (255,0,0), 2, 1)
#print(bbox)
List_x2 = (bbox[0]+bbox[2])/2
List_y2 = (bbox[1]+bbox[3])/2
RList_x2 = round(List_x2, 2)
RList_y2 = round(List_y2, 2)
Coordinate_x2 = str(RList_x2)
Coordinate_y2 = str(RList_y2)
cv2.putText(frame, "x Coordinate : "+ Coordinate_x2 , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
cv2.putText(frame, "y Coordinate : "+ Coordinate_y2 , (100,105), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# print('x1=' , RList_x1 , 'y2=' , RList_y1 )
# X = list(map(str,bbox))
# print(X)
# cv2.putText(frame, ", ".join(X) , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
else :
# Tracking failure
cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# Display tracker type on frame
cv2.putText(frame, tracker_type + " Tracker", (100,20), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50),2);
# Display FPS on frame
cv2.putText(frame, "FPS : " + str(int(fps)), (100,50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50), 2);
# Display result
cv2.imshow("Tracking", frame)
# Exit if ESC pressed
k = cv2.waitKey(1) & 0xff
if k == 27 : break
t1 = threading.Thread(target=object_tracking_1)
t2 = threading.Thread(target=object_tracking_2)
t1.start()
t2.start()
# t1.join()
# t2.join()
```
我的主要目标是为每个连接的相机应用CRST算法。
多谢。
I am trying to apply CRST object tracking with 2 cameras by multithread.
2 cameras will be directly connected in my laptop USB ports separately.
And 2 cameras will be tracking a object in real time.
The first function which is "object_tracking_1" and second one is "object_tracking_2".
And I try to use it with multithread.
If I execute one of any cameras, it works very well.
(The camID : 0(labtop webcam) , 1(camera1) , 2(camera2) )
My problem:
- My laptop webcam + one of the connected camera
It can be executed.
But when one is executed with proper object tracking ,
the other one is not working.
I mean I see the window plot to choose box area, but i cannot apply the object tracking.
In other words, both can be executed but one is not working for the object tracking.
- 2 cameras (main goal)
When I executed it , it is kind of working but the plotted window shows 2 camera's views very fast and alternatively.
And i got this error message:
Exception in thread Thread-8:
Traceback (most recent call last):
File "C:\ProgramData\Anaconda3\lib\threading.py", line 973, in _bootstrap_inner
self.run()
File "C:\ProgramData\Anaconda3\lib\threading.py", line 910, in run
self._target(*self._args, **self._kwargs)
File "C:\Users\user\AppData\Local\Temp/ipykernel_18232/2959108515.py", line 59, in object_tracking_1
cv2.error: OpenCV(3.4.11) C:\Users\appveyor\AppData\Local\Temp\1\pip-req-build-neg5amx3\opencv\modules\core\src\dxt.cpp:3335: error: (-215:Assertion failed) type == CV_32FC1 || type == CV_32FC2 || type == CV_64FC1 || type == CV_64FC2 in function 'cv::dft'
On the internet, it says that it is the problem with bus and address with USB, but if i use it one by one, it works perfectly.
Code :
```
import cv2
import sys
import threading
(major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.')
def object_tracking_1():
if __name__ == '__main__' :
# Set up tracker.
# Instead of MIL, you can also use
tracker_types = ['BOOSTING', 'MIL','KCF', 'TLD', 'MEDIANFLOW', 'GOTURN', 'MOSSE', 'CSRT']
tracker_type = tracker_types[7]
if int(minor_ver) < 3:
tracker = cv2.Tracker_create(tracker_type)
else:
if tracker_type == 'BOOSTING':
tracker = cv2.TrackerBoosting_create()
if tracker_type == 'MIL':
tracker = cv2.TrackerMIL_create()
if tracker_type == 'KCF':
tracker = cv2.TrackerKCF_create()
if tracker_type == 'TLD':
tracker = cv2.TrackerTLD_create()
if tracker_type == 'MEDIANFLOW':
tracker = cv2.TrackerMedianFlow_create()
if tracker_type == 'GOTURN':
tracker = cv2.TrackerGOTURN_create()
if tracker_type == 'MOSSE':
tracker = cv2.TrackerMOSSE_create()
if tracker_type == "CSRT":
tracker = cv2.TrackerCSRT_create()
# Read video
video = cv2.VideoCapture(1) ######################################################
#video = cv2.VideoCapture("video15.mp4")
# Exit if video not opened.
if not video.isOpened():
print ("Could not open video")
sys.exit()
# Read first frame.
ok, frame = video.read()
if not ok:
print ('Cannot read video file')
sys.exit()
# Define an initial bounding box
bbox = (287, 23, 86, 320)
# Uncomment the line below to select a different bounding box
bbox = cv2.selectROI(frame, False)
# Initialize tracker with first frame and bounding box
ok = tracker.init(frame, bbox)
while True:
# Read a new frame
ok, frame = video.read()
if not ok:
break
# Start timer
timer = cv2.getTickCount()
# Update tracker
ok, bbox = tracker.update(frame)
# Calculate Frames per second (FPS)
fps = cv2.getTickFrequency() / (cv2.getTickCount() - timer);
# Draw bounding box
if ok:
# Tracking success
p1 = (int(bbox[0]), int(bbox[1])) # bbox[0]=x value, bbox[1]=y value
p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) # x2, y2
cv2.rectangle(frame, p1, p2, (255,0,0), 2, 1)
#print(bbox)
List_x1 = (bbox[0]+bbox[2])/2
List_y1 = (bbox[1]+bbox[3])/2
RList_x1 = round(List_x1, 2)
RList_y1 = round(List_y1, 2)
Coordinate_x1 = str(RList_x1)
Coordinate_y1 = str(RList_y1)
cv2.putText(frame, "x Coordinate : "+ Coordinate_x1 , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
cv2.putText(frame, "y Coordinate : "+ Coordinate_y1 , (100,105), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# print('x1=' , RList_x1 , 'y2=' , RList_y1 )
# X = list(map(str,bbox))
# print(X)
# cv2.putText(frame, ", ".join(X) , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
else :
# Tracking failure
cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# Display tracker type on frame
cv2.putText(frame, tracker_type + " Tracker", (100,20), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50),2);
# Display FPS on frame
cv2.putText(frame, "FPS : " + str(int(fps)), (100,50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50), 2);
# Display result
cv2.imshow("Tracking", frame)
# Exit if ESC pressed
k = cv2.waitKey(1) & 0xff
if k == 27 : break
def object_tracking_2():
if __name__ == '__main__' :
# Set up tracker.
# Instead of MIL, you can also use
tracker_types = ['BOOSTING', 'MIL','KCF', 'TLD', 'MEDIANFLOW', 'GOTURN', 'MOSSE', 'CSRT']
tracker_type = tracker_types[7]
if int(minor_ver) < 3:
tracker = cv2.Tracker_create(tracker_type)
else:
if tracker_type == 'BOOSTING':
tracker = cv2.TrackerBoosting_create()
if tracker_type == 'MIL':
tracker = cv2.TrackerMIL_create()
if tracker_type == 'KCF':
tracker = cv2.TrackerKCF_create()
if tracker_type == 'TLD':
tracker = cv2.TrackerTLD_create()
if tracker_type == 'MEDIANFLOW':
tracker = cv2.TrackerMedianFlow_create()
if tracker_type == 'GOTURN':
tracker = cv2.TrackerGOTURN_create()
if tracker_type == 'MOSSE':
tracker = cv2.TrackerMOSSE_create()
if tracker_type == "CSRT":
tracker = cv2.TrackerCSRT_create()
# Read video
video = cv2.VideoCapture(2) ##################################################
#video = cv2.VideoCapture("video15.mp4")
# Exit if video not opened.
if not video.isOpened():
print ("Could not open video")
sys.exit()
# Read first frame.
ok, frame = video.read()
if not ok:
print ('Cannot read video file')
sys.exit()
# Define an initial bounding box
bbox = (287, 23, 86, 320)
# Uncomment the line below to select a different bounding box
bbox = cv2.selectROI(frame, False)
# Initialize tracker with first frame and bounding box
ok = tracker.init(frame, bbox)
while True:
# Read a new frame
ok, frame = video.read()
if not ok:
break
# Start timer
timer = cv2.getTickCount()
# Update tracker
ok, bbox = tracker.update(frame)
# Calculate Frames per second (FPS)
fps = cv2.getTickFrequency() / (cv2.getTickCount() - timer);
# Draw bounding box
if ok:
# Tracking success
p1 = (int(bbox[0]), int(bbox[1])) # bbox[0]=x value, bbox[1]=y value
p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) # x2, y2
cv2.rectangle(frame, p1, p2, (255,0,0), 2, 1)
#print(bbox)
List_x2 = (bbox[0]+bbox[2])/2
List_y2 = (bbox[1]+bbox[3])/2
RList_x2 = round(List_x2, 2)
RList_y2 = round(List_y2, 2)
Coordinate_x2 = str(RList_x2)
Coordinate_y2 = str(RList_y2)
cv2.putText(frame, "x Coordinate : "+ Coordinate_x2 , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
cv2.putText(frame, "y Coordinate : "+ Coordinate_y2 , (100,105), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# print('x1=' , RList_x1 , 'y2=' , RList_y1 )
# X = list(map(str,bbox))
# print(X)
# cv2.putText(frame, ", ".join(X) , (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
else :
# Tracking failure
cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2)
# Display tracker type on frame
cv2.putText(frame, tracker_type + " Tracker", (100,20), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50),2);
# Display FPS on frame
cv2.putText(frame, "FPS : " + str(int(fps)), (100,50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (50,170,50), 2);
# Display result
cv2.imshow("Tracking", frame)
# Exit if ESC pressed
k = cv2.waitKey(1) & 0xff
if k == 27 : break
t1 = threading.Thread(target=object_tracking_1)
t2 = threading.Thread(target=object_tracking_2)
t1.start()
t2.start()
# t1.join()
# t2.join()
```
My main goal is to apply CRST algorithms for each connected camera.
Thanks a lot.
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论