gaxil
Published

GestureDrive: Arduino-Powered Hand Gesture Car

Meet GestureDrive: Your Arduino-powered car, where hand gestures steer the way. Using OpenCV and MediaPipe magic, it turns your moves into m

IntermediateFull instructions provided429
GestureDrive: Arduino-Powered Hand Gesture Car

Things used in this project

Story

Read more

Schematics

The circuit diagram

i have included the hc05 module make sure to check the pins position

Code

Arduino code

C/C++
make sure to check the pin s position
int motor1PWM = 10; // PWM input for motor 1
int motor1IN1 = 4; // Input 1 for motor 1
int motor1IN2 = 3; // Input 2 for motor 1

int motor2PWM = 9; // PWM input for motor 2
int motor2IN1 = 5; 
int motor2IN2 = 6; 
int speed =100;

char command;

// Function to move forward
void backward() {
  digitalWrite(13, HIGH);
  digitalWrite(motor1IN1, HIGH);
  digitalWrite(motor1IN2, LOW);
  analogWrite(motor1PWM, speed);

  digitalWrite(motor2IN1, HIGH);
  digitalWrite(motor2IN2, LOW);
  analogWrite(motor2PWM, speed);
}

// Function to move backward
void forward() {
 
  digitalWrite(motor1IN1, LOW);
  digitalWrite(motor1IN2, HIGH);
  analogWrite(motor1PWM, speed);

  digitalWrite(motor2IN1, LOW);
  digitalWrite(motor2IN2, HIGH);
  analogWrite(motor2PWM, speed);
}

// Function to turn left
void left() {
  digitalWrite(motor1IN1, 1);
  digitalWrite(motor1IN2, 0);
  analogWrite(motor1PWM, speed);

  digitalWrite(motor2IN1, 0);
  digitalWrite(motor2IN2, 1);
  analogWrite(motor2PWM, speed);
}

// Function to turn right
void right() {
  digitalWrite(motor1IN1, 0);
  digitalWrite(motor1IN2,1);
  analogWrite(motor1PWM, speed);

  digitalWrite(motor2IN1, 1);
  digitalWrite(motor2IN2, 0);
  analogWrite(motor2PWM, speed);
}

// Function to stop the motors
void stopMotors() {
  digitalWrite(motor1IN1, LOW);
  digitalWrite(motor1IN2, LOW);
  analogWrite(motor1PWM, 0);

  digitalWrite(motor2IN1, LOW);
  digitalWrite(motor2IN2, LOW);
  analogWrite(motor2PWM, 0);
}


void setup() {
  // Motor control pins as outputs
  pinMode(motor1PWM, OUTPUT);
  pinMode(motor1IN1, OUTPUT);
  pinMode(motor1IN2, OUTPUT);

  pinMode(motor2PWM, OUTPUT);
  pinMode(motor2IN1, OUTPUT);
  pinMode(motor2IN2, OUTPUT);
  pinMode(13, OUTPUT);

  // Set the motors to stop initially
  //stopMotors();

  // Initialize Serial communication
  Serial.begin(9600);
}

void loop() {

  if (Serial.available() > 0) {
    command = Serial.read();
  }
    if (command == 'F') {
      forward();
    }
     else if (command == 'B') {
      backward();
    } else if (command == 'L') {
      left();
    } else if (command == 'R') {
      right();
    } else if (command == 'S') {
      stopMotors();
    }
}

hand tracking

Python
make sure to check the corresponding port
import cv2
import mediapipe as mp
import math
import serial  
import time
from cvzone.SerialModule import SerialObject
arduino = SerialObject('COM5',9600)
#arduino_serial = serial.Serial('COM3', 9600)
#arduino_serial=open('COM3', 'wb', buffering=0)
#arduino_serial.timeout=1
#arduino_serial.baudrate = 9600
#arduino_serial.port='COM3'
mp_hands = mp.solutions.hands
#hands = mp_hands.Hands(min_detection_confidence=0.5, min_tracking_confidence=0.5, max_num_hands=1)
hands = mp_hands.Hands(False,1)
mp_drawing = mp.solutions.drawing_utils
gesture_states = [0, 0, 0, 0, 0]
value_sent= 9
def calculate_distance(landmark1, landmark2):
    x1, y1, z1 = landmark1.x, landmark1.y, landmark1.z
    x2, y2, z2 = landmark2.x, landmark2.y, landmark2.z
    distance = math.sqrt((x2 - x1)**2 + (y2 - y1)**2 + (z2 - z1)**2)
    return distance
def is_thumb_up(hand_landmarks):
    thumb_tip = hand_landmarks.landmark[mp_hands.HandLandmark.THUMB_TIP]
    thumb_mcp = hand_landmarks.landmark[mp_hands.HandLandmark.THUMB_MCP]
    distance_threshold = 0.1
    distance = calculate_distance(thumb_tip, thumb_mcp)
    return distance > distance_threshold
# Define a function to check if the first finger is up
def is_first_finger_up(hand_landmarks):
    index_tip = hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP]
    index_tip_mcp = hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_MCP]
    # Define first finger up criteria based on the distance between index finger tip and palm base
    distance_threshold = 0.1  # Adjust this threshold as needed
    distance = calculate_distance(index_tip, index_tip_mcp)
    return distance > distance_threshold

# Define a function to check if the middle finger is up
def is_middle_finger_up(hand_landmarks):
    middle_tip = hand_landmarks.landmark[mp_hands.HandLandmark.MIDDLE_FINGER_TIP]
    middle_tip_mcp = hand_landmarks.landmark[mp_hands.HandLandmark.MIDDLE_FINGER_MCP]
    
    # Define middle finger up criteria based on the distance between middle finger tip and palm base
    distance_threshold = 0.1  # Adjust this threshold as needed
    distance = calculate_distance(middle_tip, middle_tip_mcp)
    
    return distance > distance_threshold

# Define a function to check if the ring finger is up
def is_ring_finger_up(hand_landmarks):
    ring_tip = hand_landmarks.landmark[mp_hands.HandLandmark.RING_FINGER_TIP]
    ring_tip_mcp = hand_landmarks.landmark[mp_hands.HandLandmark.RING_FINGER_MCP]
    
    # Define ring finger up criteria based on the distance between ring finger tip and palm base
    distance_threshold = 0.1  # Adjust this threshold as needed
    distance = calculate_distance(ring_tip, ring_tip_mcp)
    
    return distance > distance_threshold

# Define a function to check if the pinky finger is up
def is_pinky_finger_up(hand_landmarks):
    pinky_tip = hand_landmarks.landmark[mp_hands.HandLandmark.PINKY_TIP]
    pinky_mcp = hand_landmarks.landmark[mp_hands.HandLandmark.PINKY_MCP]
    
    # Define pinky finger up criteria based on the distance between pinky finger tip and palm base
    distance_threshold = 0.1  # Adjust this threshold as needed
    distance = calculate_distance(pinky_tip, pinky_mcp)
    #distance > distance_threshold
    return distance > distance_threshold
cap = cv2.VideoCapture(0)

while cap.isOpened():
    gesture_states = [0, 0, 0, 0, 0]
    # Step 4: Read a frame from the webcam
    ret, frame = cap.read()
    if not ret:
        continue

    # Step 5: Convert the frame to RGB for MediaPipe
    frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)

    # Step 6: Process the frame to detect hands
    results = hands.process(frame_rgb)

    # Step 7: Check if a hand is detected
    if results.multi_hand_landmarks:
        for hand_landmarks in results.multi_hand_landmarks:
            # Step 8: Draw landmarks on the frame
            mp_drawing.draw_landmarks(
                frame, hand_landmarks, mp_hands.HAND_CONNECTIONS)
            # Step 9: Check for gestures and update the array
            gesture_states[0] = 1 if is_thumb_up(hand_landmarks) else 0
            gesture_states[1] = 1 if is_first_finger_up(hand_landmarks) else 0
            gesture_states[2] = 1 if is_middle_finger_up(hand_landmarks) else 0
            gesture_states[3] = 1 if is_ring_finger_up(hand_landmarks) else 0
            gesture_states[4] = 1 if is_pinky_finger_up(hand_landmarks) else 0
            if gesture_states==[0,0,0,0,0]:
                value_sent='s'
            elif gesture_states==[1,0,0,0,0]:
                value_sent='l'
            elif gesture_states==[1,1,0,0,0]:
                value_sent='b'
            elif gesture_states==[1,1,1,0,0]:
                value_sent=''
            elif gesture_states==[0,0,0,0,1]:
                 value_sent='r'
            elif gesture_states==[1,1,1,1,1]:
                value_sent='f'
            print('the mode is ',value_sent)
            #arduino_serial.write(value_sent.encode())
            arduino.sendData([value_sent])
            time.sleep(0.01)
    cv2.imshow("Hand Landmarks", frame)

    # Step 11: Print the current gesture states
    
    #print("Gesture States:", gesture_states)

    # Step 12: Exit the loop on 'q' key press
    if cv2.waitKey(1) & 0xFF == ord('q'):
        break

arduino_serial.close()
# Step 13: Release resources
cap.release()
cv2.destroyAllWindows()

# Step 14: Clean up MediaPipe hands
hands.close()

hand tracking

Python
make sure to check the corresponding port
import cv2
import mediapipe as mp
import math
import serial  
import time
from cvzone.SerialModule import SerialObject
arduino = SerialObject('COM5',9600)
#arduino_serial = serial.Serial('COM3', 9600)
#arduino_serial=open('COM3', 'wb', buffering=0)
#arduino_serial.timeout=1
#arduino_serial.baudrate = 9600
#arduino_serial.port='COM3'
mp_hands = mp.solutions.hands
#hands = mp_hands.Hands(min_detection_confidence=0.5, min_tracking_confidence=0.5, max_num_hands=1)
hands = mp_hands.Hands(False,1)
mp_drawing = mp.solutions.drawing_utils
gesture_states = [0, 0, 0, 0, 0]
value_sent= 9
def calculate_distance(landmark1, landmark2):
    x1, y1, z1 = landmark1.x, landmark1.y, landmark1.z
    x2, y2, z2 = landmark2.x, landmark2.y, landmark2.z
    distance = math.sqrt((x2 - x1)**2 + (y2 - y1)**2 + (z2 - z1)**2)
    return distance
def is_thumb_up(hand_landmarks):
    thumb_tip = hand_landmarks.landmark[mp_hands.HandLandmark.THUMB_TIP]
    thumb_mcp = hand_landmarks.landmark[mp_hands.HandLandmark.THUMB_MCP]
    distance_threshold = 0.1
    distance = calculate_distance(thumb_tip, thumb_mcp)
    return distance > distance_threshold
# Define a function to check if the first finger is up
def is_first_finger_up(hand_landmarks):
    index_tip = hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP]
    index_tip_mcp = hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_MCP]
    # Define first finger up criteria based on the distance between index finger tip and palm base
    distance_threshold = 0.1  # Adjust this threshold as needed
    distance = calculate_distance(index_tip, index_tip_mcp)
    return distance > distance_threshold

# Define a function to check if the middle finger is up
def is_middle_finger_up(hand_landmarks):
    middle_tip = hand_landmarks.landmark[mp_hands.HandLandmark.MIDDLE_FINGER_TIP]
    middle_tip_mcp = hand_landmarks.landmark[mp_hands.HandLandmark.MIDDLE_FINGER_MCP]
    
    # Define middle finger up criteria based on the distance between middle finger tip and palm base
    distance_threshold = 0.1  # Adjust this threshold as needed
    distance = calculate_distance(middle_tip, middle_tip_mcp)
    
    return distance > distance_threshold

# Define a function to check if the ring finger is up
def is_ring_finger_up(hand_landmarks):
    ring_tip = hand_landmarks.landmark[mp_hands.HandLandmark.RING_FINGER_TIP]
    ring_tip_mcp = hand_landmarks.landmark[mp_hands.HandLandmark.RING_FINGER_MCP]
    
    # Define ring finger up criteria based on the distance between ring finger tip and palm base
    distance_threshold = 0.1  # Adjust this threshold as needed
    distance = calculate_distance(ring_tip, ring_tip_mcp)
    
    return distance > distance_threshold

# Define a function to check if the pinky finger is up
def is_pinky_finger_up(hand_landmarks):
    pinky_tip = hand_landmarks.landmark[mp_hands.HandLandmark.PINKY_TIP]
    pinky_mcp = hand_landmarks.landmark[mp_hands.HandLandmark.PINKY_MCP]
    
    # Define pinky finger up criteria based on the distance between pinky finger tip and palm base
    distance_threshold = 0.1  # Adjust this threshold as needed
    distance = calculate_distance(pinky_tip, pinky_mcp)
    #distance > distance_threshold
    return distance > distance_threshold
cap = cv2.VideoCapture(0)

while cap.isOpened():
    gesture_states = [0, 0, 0, 0, 0]
    # Step 4: Read a frame from the webcam
    ret, frame = cap.read()
    if not ret:
        continue

    # Step 5: Convert the frame to RGB for MediaPipe
    frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)

    # Step 6: Process the frame to detect hands
    results = hands.process(frame_rgb)

    # Step 7: Check if a hand is detected
    if results.multi_hand_landmarks:
        for hand_landmarks in results.multi_hand_landmarks:
            # Step 8: Draw landmarks on the frame
            mp_drawing.draw_landmarks(
                frame, hand_landmarks, mp_hands.HAND_CONNECTIONS)
            # Step 9: Check for gestures and update the array
            gesture_states[0] = 1 if is_thumb_up(hand_landmarks) else 0
            gesture_states[1] = 1 if is_first_finger_up(hand_landmarks) else 0
            gesture_states[2] = 1 if is_middle_finger_up(hand_landmarks) else 0
            gesture_states[3] = 1 if is_ring_finger_up(hand_landmarks) else 0
            gesture_states[4] = 1 if is_pinky_finger_up(hand_landmarks) else 0
            if gesture_states==[0,0,0,0,0]:
                value_sent='s'
            elif gesture_states==[1,0,0,0,0]:
                value_sent='l'
            elif gesture_states==[1,1,0,0,0]:
                value_sent='b'
            elif gesture_states==[1,1,1,0,0]:
                value_sent=''
            elif gesture_states==[0,0,0,0,1]:
                 value_sent='r'
            elif gesture_states==[1,1,1,1,1]:
                value_sent='f'
            print('the mode is ',value_sent)
            #arduino_serial.write(value_sent.encode())
            arduino.sendData([value_sent])
            time.sleep(0.01)
    cv2.imshow("Hand Landmarks", frame)

    # Step 11: Print the current gesture states
    
    #print("Gesture States:", gesture_states)

    # Step 12: Exit the loop on 'q' key press
    if cv2.waitKey(1) & 0xFF == ord('q'):
        break

arduino_serial.close()
# Step 13: Release resources
cap.release()
cv2.destroyAllWindows()

# Step 14: Clean up MediaPipe hands
hands.close()

Credits

gaxil
6 projects • 5 followers
wameedh scientific club
Contact

Comments

Please log in or sign up to comment.