I'm trying to detect whether the user's eyes are open or closed in a live video, using haar cascade algorithm in python. Unfortunately it doesn't work well.
I understood that "haarcascade_eye.xml" is used to detect open eyes and "haarcascade_lefteye_2splits" is used to detect an eye (closed or open).
I wanted to compare the open eyes and eyes in general in the video but it makes false recognition of closed eyes. Are there other\more ways to improve it?
Here is my code:
import numpy as np
import cv2
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('haarcascade_eye.xml')
lefteye_cascade = cv2.CascadeClassifier('haarcascade_lefteye_2splits.xml')
cap = cv2.VideoCapture(0)
while True:
ret, img = cap.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
for (x, y, w, h) in faces:
cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2)
# regions of interest
roi_gray = gray[y:y + h, (x+w)/2:x + w]
roi_color = img[y:y + h, (x+w)/2:x + w]
eye = 0
openEye = 0
counter = 0
openEyes = eye_cascade.detectMultiScale(roi_gray)
AllEyes = lefteye_cascade.detectMultiScale(roi_gray)
for (ex, ey, ew, eh) in openEyes:
openEye += 1
cv2.rectangle(roi_color, (ex, ey), (ex + ew, ey + eh), (0, 255, 0),2)
for (ex, ey, ew, eh) in AllEyes:
eye += 1
cv2.rectangle(roi_color, (ex, ey), (ex + ew, ey + eh), (0, 0, 40),2)
if (openEye != eye):
print ('alert')
cv2.imshow('img', img)
k = cv2.waitKey(30) & 0xff
if k == 27:
break
cap.release()
cv2.destroyAllWindows()