vishal soniJyotishka Chattopadhyay
Created September 3, 2024 © MIT

Vision and EMG-Based Assistive Mouse

A hands-free mouse using pupil tracking for cursor movement & EMG sensors for clicking, design to empower individuals with limited mobility

IntermediateOver 6 days34
Vision and EMG-Based Assistive Mouse

Things used in this project

Hardware components

Seeed Studio XIAO ESP32S3 Sense
Seeed Studio XIAO ESP32S3 Sense
×1
STM32 f411
×1
SparkFun MyoWare 2.0 Muscle Sensor Basic Kit
×1

Software apps and online services

Visual Studio Code Extension for Arduino
Microsoft Visual Studio Code Extension for Arduino
Thonny
Arduino IDE
Arduino IDE
OpenCV
OpenCV

Story

Read more

Schematics

schematic for stm32 connections with EMG

Code

Install MicroPython to ESP32s3 sense

Python
install the XIAO_ESP32S3_Micropython firmware and Extract the downloaded zip file and navigate to the folder.
Locate the USB to serial port address by opening the device manager ,Extract the downloaded zip file and navigate to the folder.
click On the path box then type "CMD" and press enter
You will be directed to CMD terminal.
Install esptool using pip command (make sure python 3 is already installed)
Erase the flash memory on the Xiao ESP32S3 using this command
Now proceed to install the micropython firmware using this command
pip install esptool

esptool.py --port COMXX erase_flash

esptool.py --port COMXX --baud 460800 --before default_reset --after hard_reset --chip esp32s3  write_flash --flash_mode dio --flash_size detect --flash_freq 80m 0x0 firmware.bin 

MicroPython code in Thonny for eye tracking

Python
It detects the eyes using Dlib, calculates their centers, and translates these positions into screen coordinates to move the cursor. This is useful for accessibility applications where users control the computer using eye movements.
import cv2
import dlib
import pyautogui
import numpy as np

# Initialize Dlib's face detector and facial landmarks predictor
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor('shape_predictor_68_face_landmarks.dat')

# Get the screen size for controlling the cursor
screen_width, screen_height = pyautogui.size()

# Function to calculate the center of the eye
def eye_center(landmarks, eye_points):
    x = int((landmarks.part(eye_points[0]).x + landmarks.part(eye_points[3]).x) / 2)
    y = int((landmarks.part(eye_points[1]).y + landmarks.part(eye_points[4]).y) / 2)
    return x, y

# Replace with your ESP32-S3 IP address and port
esp32_url = 'http://<ESP32_IP>:<PORT>/video'

# Start the video capture from the ESP32-S3 IP stream
cap = cv2.VideoCapture(esp32_url)

while True:
    ret, frame = cap.read()
    
    if not ret:
        print("Failed to grab frame from ESP32-S3")
        break

    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
    faces = detector(gray)
    
    for face in faces:
        landmarks = predictor(gray, face)
        
        # Draw mesh over the right eye
        right_eye_points = [36, 37, 38, 39, 40, 41]
        right_eye = np.array([(landmarks.part(point).x, landmarks.part(point).y) for point in right_eye_points])
        cv2.polylines(frame, [right_eye], isClosed=True, color=(0, 255, 0), thickness=2)
        
        # Draw mesh over the left eye
        left_eye_points = [42, 43, 44, 45, 46, 47]
        left_eye = np.array([(landmarks.part(point).x, landmarks.part(point).y) for point in left_eye_points])
        cv2.polylines(frame, [left_eye], isClosed=True, color=(0, 255, 0), thickness=2)
        
        # Calculate the center of each eye
        right_eye_center = eye_center(landmarks, right_eye_points)
        left_eye_center = eye_center(landmarks, left_eye_points)
        
        # Calculate the average position of both eyes for cursor control
        eye_center_x = int((right_eye_center[0] + left_eye_center[0]) / 2)
        eye_center_y = int((right_eye_center[1] + left_eye_center[1]) / 2)
        
        # Scale the eye position to the screen size
        cursor_x = np.interp(eye_center_x, [0, frame.shape[1]], [0, screen_width])
        cursor_y = np.interp(eye_center_y, [0, frame.shape[0]], [0, screen_height])
        
        # Move the cursor
        pyautogui.moveTo(cursor_x, cursor_y)

    # Display the frame with the eye mesh
    cv2.imshow("Eye Tracking", frame)
    
    # Break the loop if the 'Esc' key is pressed
    if cv2.waitKey(1) & 0xFF == 27:
        break

# Release the capture and close all OpenCV windows
cap.release()
cv2.destroyAllWindows()

This code will set two digital pins high or low based on the EMG signal from the muscles for left an

C/C++
You will detect muscle activations by analyzing the EMG signal strength. When a threshold is crossed (indicating muscle activation), a click action will be triggered.
// Include necessary libraries
#include "stm32f4xx_hal.h"

// Define the digital pins for left and right click
#define LEFT_CLICK_PIN GPIO_PIN_0  // Pin for left click signal
#define RIGHT_CLICK_PIN GPIO_PIN_1 // Pin for right click signal
#define EMG_PORT GPIOA             // Port where pins are connected

// Thresholds for muscle activation from the EMG sensor
#define LEFT_CLICK_THRESHOLD 300   // Adjust based on sensor sensitivity
#define RIGHT_CLICK_THRESHOLD 300  // Adjust based on sensor sensitivity

// Function prototypes
void SystemClock_Config(void);
void GPIO_Init(void);
int read_EMG_left_click(void); // Function to read EMG for left click
int read_EMG_right_click(void); // Function to read EMG for right click

int main(void) {
    // Initialize the HAL Library
    HAL_Init();

    // Configure the system clock
    SystemClock_Config();

    // Initialize the GPIO pins
    GPIO_Init();

    // Main loop
    while (1) {
        // Read EMG signals for left and right clicks
        int left_click = read_EMG_left_click();
        int right_click = read_EMG_right_click();

        // Check if left click muscle is activated
        if (left_click >= LEFT_CLICK_THRESHOLD) {
            // Set the left click pin high (activate click)
            HAL_GPIO_WritePin(EMG_PORT, LEFT_CLICK_PIN, GPIO_PIN_SET);
            // Simulate a left click here (e.g., via Bluetooth HID or USB HID)
        } else {
            // Set the left click pin low (deactivate click)
            HAL_GPIO_WritePin(EMG_PORT, LEFT_CLICK_PIN, GPIO_PIN_RESET);
        }

        // Check if right click muscle is activated
        if (right_click >= RIGHT_CLICK_THRESHOLD) {
            // Set the right click pin high (activate click)
            HAL_GPIO_WritePin(EMG_PORT, RIGHT_CLICK_PIN, GPIO_PIN_SET);
            // Simulate a right click here
        } else {
            // Set the right click pin low (deactivate click)
            HAL_GPIO_WritePin(EMG_PORT, RIGHT_CLICK_PIN, GPIO_PIN_RESET);
        }
    }
}

// Function to initialize the GPIO pins
void GPIO_Init(void) {
    __HAL_RCC_GPIOA_CLK_ENABLE(); // Enable clock for GPIOA

    GPIO_InitTypeDef GPIO_InitStruct = {0};

    // Configure pins for output (left and right click)
    GPIO_InitStruct.Pin = LEFT_CLICK_PIN | RIGHT_CLICK_PIN;
    GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP; // Push-pull output mode
    GPIO_InitStruct.Pull = GPIO_NOPULL;         // No pull-up or pull-down resistors
    GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_LOW; // Low speed for GPIO

    HAL_GPIO_Init(EMG_PORT, &GPIO_InitStruct);
}

// Dummy function to simulate reading EMG data for left click
int read_EMG_left_click(void) {
    // This function should read the actual EMG sensor data via ADC.
    // Return a simulated EMG value for left click muscle activation.
    return 350; // Simulate activation (replace with actual ADC data)
}

// Dummy function to simulate reading EMG data for right click
int read_EMG_right_click(void) {
    // This function should read the actual EMG sensor data via ADC.
    // Return a simulated EMG value for right click muscle activation.
    return 150; // Simulate no activation (replace with actual ADC data)
}

// Function to configure the system clock
void SystemClock_Config(void) {
    // Clock configuration code here (if needed)
}

Laptop Side Code

C/C++
open Arduino ide select board XIAO esp32s3 ,port ,open boot mode and and upload the code
#include "esp_camera.h"
#include <WiFi.h>

// Replace with your network credentials
const char* ssid = "your_SSID";
const char* password = "your_PASSWORD";

// Define the camera settings
#define CAMERA_MODEL_AI_THINKER
#include "camera_pins.h"

// Initialize the camera and start the server
void startCameraServer() {
  // Initialize the server
  WiFiServer server(80);
  server.begin();

  while (true) {
    WiFiClient client = server.available();
    if (client) {
      String request = client.readStringUntil('\r');
      client.flush();

      if (request.indexOf("GET /video") != -1) {
        // Start streaming video
        camera_fb_t *fb = NULL;
        String responseHeader = "HTTP/1.1 200 OK\r\nContent-Type: multipart/x-mixed-replace; boundary=frame\r\n\r\n";
        client.print(responseHeader);

        while (client.connected()) {
          fb = esp_camera_fb_get();
          if (!fb) {
            Serial.println("Camera capture failed");
            continue;
          }

          client.write("--frame\r\n");
          client.write("Content-Type: image/jpeg\r\n");
          client.write("Content-Length: ");
          client.write(String(fb->len).c_str());
          client.write("\r\n\r\n");
          client.write(fb->buf, fb->len);
          client.write("\r\n");
          esp_camera_fb_return(fb);
        }
      }
    }
  }
}

void setup() {
  Serial.begin(115200);
  camera_config_t config;
  // Set up camera configuration
  // ...
  esp_camera_init(&config);

  // Connect to Wi-Fi
  WiFi.begin(ssid, password);
  while (WiFi.status() != WL_CONNECTED) {
    delay(500);
    Serial.print(".");
  }
  Serial.println("Connected to Wi-Fi");

  startCameraServer();
}

void loop() {
  // Nothing to do here
}

Credits

vishal soni

vishal soni

3 projects • 4 followers
open to work electronics enthusiast doing undergraduate in internet of things
Jyotishka Chattopadhyay

Jyotishka Chattopadhyay

0 projects • 0 followers

Comments