Loyd Lobo
Published ยฉ GPL3+

I Turned My Hands Into a Virtual Steering Wheel!!

๐ŸŽฎ " Steering Wheel with Just Your Hands!" ๐Ÿš— Tilt to steer, thumbs to drive or brakeโ€”no controller needed. Just a webcam! ๐Ÿ–๏ธ๐Ÿš—๐Ÿ’ป

AdvancedFull instructions provided3 hours50
I Turned My Hands Into a Virtual Steering Wheel!!

Things used in this project

Hardware components

Laptop or PC
ร—1
Webcam or External Cam
ร—1

Software apps and online services

VS Code
Microsoft VS Code
OpenCV
OpenCV โ€“ Open Source Computer Vision Library OpenCV

Story

Read more

Code

Code!!

Python
It took me days to figuring out and i am providing it for free!!
import cv2
import mediapipe as mp
import math
import pyautogui
import time

mp_drawing = mp.solutions.drawing_utils
mphands = mp.solutions.hands

cap = cv2.VideoCapture(0)
hands = mphands.Hands(static_image_mode=False, max_num_hands=2, min_detection_confidence=0.7, min_tracking_confidence=0.5)

def calculate_angle(x1, y1, x2, y2):
    angle = math.degrees(math.atan2(y2 - y1, x2 - x1))
    if angle < 0:
        angle += 360
    if angle > 180:
        angle -= 360
    return angle

def is_thumb_up(hand_landmarks):
    return hand_landmarks.landmark[4].y < hand_landmarks.landmark[3].y

pressing_w = False
pressing_s = False

turn_sensitivity = 0.01
straight_threshold = 6
tap_duration = 0.01 # Still used, but for logic-based tap, not sleep

# Tap state for A and D keys
tap_a_time = 0
tap_d_time = 0
tap_in_progress_a = False
tap_in_progress_d = False

while True:
    ret, image = cap.read()
    if not ret:
        break

    image = cv2.resize(image, (480, 360))
    image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB)
    results = hands.process(image)
    image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)

    current_time = time.time()

    if results.multi_hand_landmarks and len(results.multi_hand_landmarks) == 2:
        hand1 = results.multi_hand_landmarks[0]
        hand2 = results.multi_hand_landmarks[1]

        if hand1.landmark[0].x < hand2.landmark[0].x:
            left_hand = hand1
            right_hand = hand2
        else:
            left_hand = hand2
            right_hand = hand1

        x1, y1 = left_hand.landmark[0].x * image.shape[1], left_hand.landmark[0].y * image.shape[0]
        x2, y2 = right_hand.landmark[0].x * image.shape[1], right_hand.landmark[0].y * image.shape[0]

        mp_drawing.draw_landmarks(image, left_hand, mphands.HAND_CONNECTIONS)
        mp_drawing.draw_landmarks(image, right_hand, mphands.HAND_CONNECTIONS)
        cv2.line(image, (int(x1), int(y1)), (int(x2), int(y2)), (0, 255, 0), 2)

        thumb1_x, thumb1_y = left_hand.landmark[4].x * image.shape[1], left_hand.landmark[4].y * image.shape[0]
        thumb2_x, thumb2_y = right_hand.landmark[4].x * image.shape[1], right_hand.landmark[4].y * image.shape[0]

        cv2.putText(image, "Accelerator", (int(thumb1_x), int(thumb1_y) - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 1)
        cv2.putText(image, "Brake", (int(thumb2_x), int(thumb2_y) - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1)

        angle = calculate_angle(x1, y1, x2, y2)
        cv2.putText(image, f'{int(angle)} degrees', (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 0, 0), 1)

        # Tap simulation for left and right
        if angle < -straight_threshold and not tap_in_progress_a:
            pyautogui.keyDown('a')
            tap_a_time = current_time
            tap_in_progress_a = True
            cv2.putText(image, "Left", (10, 210), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2)
        elif angle > straight_threshold and not tap_in_progress_d:
            pyautogui.keyDown('d')
            tap_d_time = current_time
            tap_in_progress_d = True
            cv2.putText(image, "Right", (10, 210), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2)

        # Release A key after tap_duration
        if tap_in_progress_a and (current_time - tap_a_time > tap_duration):
            pyautogui.keyUp('a')
            tap_in_progress_a = False

        # Release D key after tap_duration
        if tap_in_progress_d and (current_time - tap_d_time > tap_duration):
            pyautogui.keyUp('d')
            tap_in_progress_d = False

        # Acceleration
        if not is_thumb_up(left_hand):
            if not pressing_w:
                pyautogui.keyDown('w')
                pressing_w = True
        else:
            if pressing_w:
                pyautogui.keyUp('w')
                pressing_w = False

        # Braking
        if not is_thumb_up(right_hand):
            if not pressing_s:
                pyautogui.keyDown('s')
                pressing_s = True
        else:
            if pressing_s:
                pyautogui.keyUp('s')
                pressing_s = False

    cv2.imshow('Hand Tracker', image)
    if cv2.waitKey(1) & 0xFF == 27:
        break

if pressing_w:
    pyautogui.keyUp('w')
if pressing_s:
    pyautogui.keyUp('s')
pyautogui.keyUp('a')
pyautogui.keyUp('d')

cap.release()
cv2.destroyAllWindows()

Credits

Loyd Lobo
3 projects โ€ข 14 followers
Hello!!! I like to innovate,think and fabricate... i have an youtube channel!! u can see my projects!!
Contact

Comments

Please log in or sign up to comment.