#To stop the code, press the Q key
import cv2
from ultralytics import YOLO
import math
model = YOLO("C:\\Users\\<USER>\\Desktop\\OpenML\\high.pt") #Modify here with your current Path and the downloaded OpenML model
focal_length_pixels = 600
object_real_width = 3.8
camera_angle_from_object = 0
fov_degrees = 60 #Here you put the camera FOV. Look at Camera Calibration to find out how to calculate it
object_width = 0
object_height = 0
maxWidth = 0
minWidth = 3000
cap = cv2.VideoCapture(0) #If it doesn't work, increment the number by 1 until the camera works and appears on the screen
while True:
ret, frame = cap.read()
if not ret:
break
results = model.predict(frame, conf=0.5)
frame_with_results = results[0].plot()
boxes = results[0].boxes
if len(boxes) > 0:
x1, y1, x2, y2 = map(int, boxes.xyxy[0])
object_width = x2 - x1
object_height = y2 - y1
middle_of_the_object_pos = (x2 + x1) / 2
middle_of_the_screen = 640 / 2 #640x480 640 Is the horizontal distance and measures the middle of the image horizontally. If testing on a 320x240 resolution, change to 320 from 640
offset = middle_of_the_object_pos - middle_of_the_screen
camera_angle_from_object = (offset / 640) * fov_degrees #Here it measures the sample angle relative to the camera horizontally. (It can also be vertical, same thing, but if the resolution is 640x480, instead of 640 it is 480
first_angle = 0.00 #Here put the smallest mathematically calculated angle when you followed the steps on the Camera Calibration page. The sample is straight arctan(width/height) => angle
y = 0.00 #Here you calculate (max angle - min angle)/90 degrees. Example: (66.32-24.79)/90 => 0.459555
raw_angle = math.degrees(math.atan(object_width / object_height))
object_angle = (raw_angle - first_angle) / y
servo_position = 0.5 + (object_angle * 0.0038) #Calculating Servo_Pos same as on the Camera Calibration page
cv2.putText(frame_with_results, f"Sample Angle: {object_angle:.2f}",
(x1, y1 + 80),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
cv2.putText(frame_with_results, f"Servo Pos: {servo_position:.2f}",
(x1, y1 + 100),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
cv2.imshow('OpenML - Real-time Detection', frame_with_results)
if cv2.waitKey(1) & 0xFF == ord('q'): # Press Q to exit the code
break
cap.release()
cv2.destroyAllWindows()
Python Code For Detection