i am working on a robot that detects users and follows. I am using YOLOv2 from darkflow repo for the object detection module.
As of now, i want to make the Arduino micro-controller to move the robot forward towards the bounding box when detected. There is no distance or stereo cam. It is just if bounding box detected then drive the motors forward.
This is my code for detecting bounding box in through the camera. It will be helpful if someone could direct me to the right path for any resources or tutorials. Thank you.
import cv2
from darkflow.net.build import TFNet
import numpy as np
import time
options = {
'model': 'cfg/yolov2.cfg',
'load': 'bin/yolov2.weights',
'threshold': 0.8,
'gpu': 0.8
}
tfnet = TFNet(options)
colors = [tuple(255 * np.random.rand(3)) for _ in range(10)]
capture = cv2.VideoCapture(0)
capture.set(cv2.CAP_PROP_FRAME_WIDTH, 1920)
capture.set(cv2.CAP_PROP_FRAME_HEIGHT, 1080)
while True:
stime = time.time()
ret, frame = capture.read()
if ret:
results = tfnet.return_predict(frame)
for color, result in zip(colors, results):
tl = (result['topleft']['x'], result['topleft']['y'])
br = (result['bottomright']['x'], result['bottomright']['y'])
label = result['label']
confidence = result['confidence']
text = '{}: {:.0f}%'.format(label, confidence * 100)
frame = cv2.rectangle(frame, tl, br, color, 5)
frame = cv2.putText(frame, text, tl, cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 0), 2)
cv2.imshow('frame', frame)
print('FPS {:.1f}'.format(1 / (time.time() - stime)))
if cv2.waitKey(1) & 0xFF == ord('q'):
break
capture.release()
cv2.destroyAllWindows()