_
This commit is contained in:
parent
f017d73429
commit
d8ef457c22
@ -1,69 +1,87 @@
|
|||||||
|
import math
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import mediapipe as mp
|
import mediapipe as mp
|
||||||
import time
|
|
||||||
import math
|
|
||||||
import pyautogui
|
import pyautogui
|
||||||
|
|
||||||
cap = cv2.VideoCapture(0)
|
|
||||||
pyautogui.PAUSE = 0.01
|
class Hands_AI:
|
||||||
mpHands = mp.solutions.hands
|
def __init__(self):
|
||||||
hands = mpHands.Hands(max_num_hands=1, min_tracking_confidence=0.80, min_detection_confidence=0.90)
|
self.cap = cv2.VideoCapture(0)
|
||||||
mpDraw = mp.solutions.drawing_utils
|
pyautogui.PAUSE = 0.01
|
||||||
testing = 23
|
self.mpHands = mp.solutions.hands
|
||||||
click_dis = 35
|
self.hands = self.mpHands.Hands(max_num_hands=1, min_tracking_confidence=0.95, min_detection_confidence=0.90)
|
||||||
sensitivity = 3.5
|
self.mpDraw = mp.solutions.drawing_utils
|
||||||
|
self.mov_dis = 0.2479
|
||||||
|
self.click_dis = 0.2823
|
||||||
|
self.sensitivity = 3.5
|
||||||
|
self.x, self.y = None, None
|
||||||
|
self.click = False
|
||||||
|
|
||||||
|
def get_distance(self, first, second, height, width):
|
||||||
|
dist_x = (self.results.multi_hand_landmarks[0].landmark[first].x -
|
||||||
|
self.results.multi_hand_landmarks[0].landmark[
|
||||||
|
second].x) * width
|
||||||
|
dist_y = (self.results.multi_hand_landmarks[0].landmark[first].y -
|
||||||
|
self.results.multi_hand_landmarks[0].landmark[
|
||||||
|
second].y) * height
|
||||||
|
return math.sqrt(abs(dist_x ** 2 + dist_y ** 2))
|
||||||
|
|
||||||
|
def run(self, q):
|
||||||
|
while True:
|
||||||
|
success, img = self.cap.read()
|
||||||
|
self.h, self.w, self.c = img.shape
|
||||||
|
imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
||||||
|
self.results = self.hands.process(imgRGB)
|
||||||
|
if self.results.multi_hand_landmarks:
|
||||||
|
dist_palm = round(self.get_distance(0, 5, self.h, self.w))
|
||||||
|
if not self.x and not self.y:
|
||||||
|
self.x, self.y = self.results.multi_hand_landmarks[0].landmark[4].x * self.w, \
|
||||||
|
self.results.multi_hand_landmarks[0].landmark[4].y * self.h
|
||||||
|
dis_1 = self.get_distance(12, 8, self.h, self.w)
|
||||||
|
cv2.putText(img,
|
||||||
|
f"""MOVE: {round(dis_1)}/{round(self.mov_dis * dist_palm, 2)} - {"true" if dis_1 < self.mov_dis * dist_palm else "false"}""",
|
||||||
|
(0, 15),
|
||||||
|
cv2.FONT_HERSHEY_PLAIN, 1.5, (255, 0, 255), thickness=2)
|
||||||
|
cv2.putText(img,
|
||||||
|
f"""CLICK: {round(self.get_distance(5, 3, self.h, self.w))}/{round(self.click_dis * dist_palm, 2)} - {"true" if self.get_distance(5, 3, self.h, self.w) < self.click_dis * dist_palm else "false"}""",
|
||||||
|
(0, 40),
|
||||||
|
cv2.FONT_HERSHEY_PLAIN, 1.5, (255, 0, 255), thickness=2)
|
||||||
|
cv2.putText(img,
|
||||||
|
f"""DRAG: {"true" if dis_1 < self.mov_dis * dist_palm and self.get_distance(5, 3, self.h, self.w) < self.click_dis * dist_palm else "false"}""",
|
||||||
|
(0, 65),
|
||||||
|
cv2.FONT_HERSHEY_PLAIN, 1.5, (255, 0, 255), thickness=2)
|
||||||
|
for handLms in self.results.multi_hand_landmarks:
|
||||||
|
self.mpDraw.draw_landmarks(img, handLms, self.mpHands.HAND_CONNECTIONS)
|
||||||
|
if dis_1 < self.mov_dis * dist_palm and self.get_distance(5, 3, self.h,
|
||||||
|
self.w) < self.click_dis * dist_palm:
|
||||||
|
pyautogui.mouseDown()
|
||||||
|
pyautogui.moveRel(
|
||||||
|
-(self.results.multi_hand_landmarks[0].landmark[8].x * self.w - self.x) * self.sensitivity,
|
||||||
|
(self.results.multi_hand_landmarks[0].landmark[8].y * self.h - self.y) * self.sensitivity,
|
||||||
|
duration=0.001)
|
||||||
|
elif dis_1 < self.mov_dis * dist_palm:
|
||||||
|
pyautogui.mouseUp()
|
||||||
|
pyautogui.moveRel(
|
||||||
|
-(self.results.multi_hand_landmarks[0].landmark[8].x * self.w - self.x) * self.sensitivity,
|
||||||
|
(self.results.multi_hand_landmarks[0].landmark[8].y * self.h - self.y) * self.sensitivity,
|
||||||
|
duration=0.001)
|
||||||
|
|
||||||
|
elif self.get_distance(5, 3, self.h, self.w) < self.click_dis * dist_palm and not self.click:
|
||||||
|
pyautogui.mouseUp()
|
||||||
|
pyautogui.click()
|
||||||
|
self.click = True
|
||||||
|
else:
|
||||||
|
pyautogui.mouseUp()
|
||||||
|
self.click = False
|
||||||
|
self.x, self.y = self.results.multi_hand_landmarks[0].landmark[8].x * self.w, \
|
||||||
|
self.results.multi_hand_landmarks[0].landmark[8].y * self.h
|
||||||
|
else:
|
||||||
|
self.x, self.y = None, None
|
||||||
|
cv2.imshow("Image", img)
|
||||||
|
cv2.waitKey(1)
|
||||||
|
|
||||||
|
|
||||||
def get_distance(first, second, height, width):
|
if __name__ == '__main__':
|
||||||
dist_x = (results.multi_hand_landmarks[0].landmark[first].x - results.multi_hand_landmarks[0].landmark[
|
ai = Hands_AI()
|
||||||
second].x) * width
|
ai.run("X")
|
||||||
dist_y = (results.multi_hand_landmarks[0].landmark[first].y - results.multi_hand_landmarks[0].landmark[
|
|
||||||
second].y) * height
|
|
||||||
return math.sqrt(abs(dist_x ** 2 + dist_y ** 2))
|
|
||||||
|
|
||||||
|
|
||||||
x, y = None, None
|
|
||||||
|
|
||||||
while True:
|
|
||||||
success, img = cap.read()
|
|
||||||
h, w, c = img.shape
|
|
||||||
imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
|
||||||
results = hands.process(imgRGB)
|
|
||||||
if results.multi_hand_landmarks:
|
|
||||||
dist_palm = round(get_distance(0, 9, h, w) / 100, 3)
|
|
||||||
if not x and not y:
|
|
||||||
x, y = results.multi_hand_landmarks[0].landmark[4].x * w, results.multi_hand_landmarks[0].landmark[4].y * h
|
|
||||||
dis_1 = get_distance(4, 8, h, w)
|
|
||||||
cv2.putText(img, f"""dist 4-8: {round(dis_1)}/{round(testing * dist_palm, 2)}""", (0, 15),
|
|
||||||
cv2.FONT_HERSHEY_PLAIN, 1.5, (255, 0, 255), thickness=2)
|
|
||||||
cv2.putText(img, f"""MOVE: {"True" if dis_1 < testing else "False"}""", (0, 40),
|
|
||||||
cv2.FONT_HERSHEY_PLAIN, 1.5, (255, 0, 255), thickness=2)
|
|
||||||
cv2.putText(img, f"""dist 4-12: {round(get_distance(12, 4, h, w))}/{round(click_dis * dist_palm, 2)}""",
|
|
||||||
(0, 65),
|
|
||||||
cv2.FONT_HERSHEY_PLAIN, 1.5, (255, 0, 255), thickness=2)
|
|
||||||
cv2.putText(img, f"""CLICK: {"True" if get_distance(12, 4, h, w) < click_dis * dist_palm else "False"}""",
|
|
||||||
(0, 90),
|
|
||||||
cv2.FONT_HERSHEY_PLAIN, 1.5, (255, 0, 255), thickness=2)
|
|
||||||
cv2.putText(img, f"""dsit 0-9: {dist_palm}""", (0, 115),
|
|
||||||
cv2.FONT_HERSHEY_PLAIN, 1.5, (255, 0, 255), thickness=2)
|
|
||||||
for handLms in results.multi_hand_landmarks:
|
|
||||||
mpDraw.draw_landmarks(img, handLms)
|
|
||||||
if dis_1 < testing * dist_palm and get_distance(12, 4, h, w) < click_dis * dist_palm:
|
|
||||||
pyautogui.dragRel(-(results.multi_hand_landmarks[0].landmark[3].x * w - x) * sensitivity,
|
|
||||||
(results.multi_hand_landmarks[0].landmark[3].y * h - y) * sensitivity, duration=0.001)
|
|
||||||
elif dis_1 < testing * dist_palm:
|
|
||||||
pyautogui.moveRel(-(results.multi_hand_landmarks[0].landmark[3].x * w - x) * sensitivity,
|
|
||||||
(results.multi_hand_landmarks[0].landmark[3].y * h - y) * sensitivity, duration=0.001)
|
|
||||||
|
|
||||||
elif get_distance(12, 4, h, w) < click_dis * dist_palm and not click:
|
|
||||||
pyautogui.click()
|
|
||||||
click = True
|
|
||||||
# mouse.click("left")
|
|
||||||
print("clicked")
|
|
||||||
else:
|
|
||||||
click = False
|
|
||||||
x, y = results.multi_hand_landmarks[0].landmark[3].x * w, results.multi_hand_landmarks[0].landmark[3].y * h
|
|
||||||
else:
|
|
||||||
x, y = None, None
|
|
||||||
cv2.imshow("Image", img)
|
|
||||||
cv2.waitKey(1)
|
|
Loading…
Reference in New Issue
Block a user