vinay y.n
Published © GPL3+

Facial Recognition Robot with Raspberry Pi and Blynk Control

Face Recognition System with Raspberry Pi and Blynk App enhances security and, allows remote control and real-time unknown person alerts.

IntermediateFull instructions providedOver 4 days505
Facial Recognition Robot with Raspberry Pi and Blynk Control

Things used in this project

Hardware components

Raspberry Pi 4 Model B
Raspberry Pi 4 Model B
×1
ESP32
Espressif ESP32
×1
Ultrasonic Sensor - HC-SR04 (Generic)
Ultrasonic Sensor - HC-SR04 (Generic)
×1
Camera Module
Raspberry Pi Camera Module
×1
Dual H-Bridge motor drivers L298
SparkFun Dual H-Bridge motor drivers L298
×1
Geared DC Motor, 12 V
Geared DC Motor, 12 V
×4
Maker Essentials - Micro-motors & Grippy Wheels
Pimoroni Maker Essentials - Micro-motors & Grippy Wheels
×1
Lead acid battery
×1
Power Bank
×1
Metal robot chassis
×1
SG90 Micro-servo motor
SG90 Micro-servo motor
×1

Software apps and online services

Raspbian
Raspberry Pi Raspbian
Blynk
Blynk
Arduino IDE
Arduino IDE
OpenCV
OpenCV – Open Source Computer Vision Library OpenCV

Hand tools and fabrication machines

Soldering iron (generic)
Soldering iron (generic)
3D Printer (generic)
3D Printer (generic)
Solder Wire, Lead Free
Solder Wire, Lead Free
Multitool, Screwdriver
Multitool, Screwdriver

Story

Read more

Schematics

Block diagram

Code

Facial Detection code

Python
GPIO Pins share the connection with ESP32
# import the necessary packages
from imutils.video import VideoStream
from imutils.video import FPS
import face_recognition
import imutils
import pickle
import time
import cv2
import RPi.GPIO as GPIO
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
data_to_esp = 26
GPIO.setup(data_to_esp,GPIO.OUT)
GPIO.output(data_to_esp, GPIO.LOW)
#Initialize 'currentname' to trigger only when a new person is identified.
currentname = "unknown"
#Determine faces from encodings.pickle file model created from train_model.py
encodingsP = "encodings.pickle"

# load the known faces and embeddings along with OpenCV's Haar
# cascade for face detection
print("[INFO] loading encodings + face detector...")
data = pickle.loads(open(encodingsP, "rb").read())

# initialize the video stream and allow the camera sensor to warm up
# Set the ser to the followng
# src = 0 : for the build in single web cam, could be your laptop webcam
# src = 2 : I had to set it to 2 inorder to use the USB webcam attached to my laptop
#vs = VideoStream(src=2,framerate=10).start()
vs = VideoStream(usePiCamera=True).start()
time.sleep(0.1)

# start the FPS counter
fps = FPS().start()

# loop over frames from the video file stream
while True:
	# grab the frame from the threaded video stream and resize it
	# to 500px (to speedup processing)
	frame = vs.read()
	frame = imutils.resize(frame, width=500)
	# Detect the fce boxes
	boxes = face_recognition.face_locations(frame)
	# compute the facial embeddings for each face bounding box
	encodings = face_recognition.face_encodings(frame, boxes)
	names = []

	# loop over the facial embeddings
	for encoding in encodings:
		# attempt to match each face in the input image to our known
		# encodings
		matches = face_recognition.compare_faces(data["encodings"],
			encoding)
		name = "Unknown"
		

		if True in matches:
			# find the indexes of all matched faces then initialize a
			# dictionary to count the total number of times each face
			# was matched
			matchedIdxs = [i for (i, b) in enumerate(matches) if b]
			counts = {}

			# loop over the matched indexes and maintain a count for
			# each recognized face face
			for i in matchedIdxs:
				name = data["names"][i]
				counts[name] = counts.get(name, 0) + 1

			# determine the recognized face with the largest number
			# of votes (note: in the event of an unlikely tie Python
			# will select first entry in the dictionary)
			name = max(counts, key=counts.get)
			


			#If someone in your dataset is identified, print their name on the screen
			if currentname != name:
				currentname = name
				print(currentname)
			
							
		# update the list of names
		names.append(name)

	# loop over the recognized faces
	for ((top, right, bottom, left), name) in zip(boxes, names):
		# draw the predicted face name on the image - color is in BGR
		cv2.rectangle(frame, (left, top), (right, bottom),
			(0, 255, 225), 2)
		y = top - 15 if top - 15 > 15 else top + 15
		cv2.putText(frame, name, (left, y), cv2.FONT_HERSHEY_SIMPLEX,
			.8, (0, 255, 255), 2)
		lable_text = name
		if lable_text == 'Unknown':
			print("Unkmwn found")
			GPIO.output(data_to_esp,GPIO.HIGH)
			time.sleep(2)
			GPIO.output(data_to_esp, GPIO.LOW)
		else:
			GPIO.output(data_to_esp, GPIO.LOW)
			
		
		
				
	# display the image to our screen
	cv2.imshow("Facial Recognition is Running", frame)
	key = cv2.waitKey(1) & 0xFF

	# quit when 'q' key is pressed
	if key == ord("q"):
		break

	# update the FPS counter
	fps.update()

# stop the timer and display FPS information
fps.stop()
print("[INFO] elasped time: {:.2f}".format(fps.elapsed()))
print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))

# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop()

Code for Face data storing(face dataset creating)

Python
code for capturing the face and storing it to create a dataset in a specific folder
import cv2
from picamera import PiCamera
from picamera.array import PiRGBArray

name = 'chaithrashree' #replace with your name

cam = PiCamera()
cam.resolution = (512, 304)
cam.framerate = 10
rawCapture = PiRGBArray(cam, size=(512, 304))
    
img_counter = 0

while True:
    for frame in cam.capture_continuous(rawCapture, format="bgr", use_video_port=True):
        image = frame.array
        cv2.imshow("Press Space to take a photo", image)
        rawCapture.truncate(0)
    
        k = cv2.waitKey(1)
        rawCapture.truncate(0)
        if k%256 == 27: # ESC pressed
            break
        elif k%256 == 32:
            # SPACE pressed
            img_name = "dataset/"+ name +"/image_{}.jpg".format(img_counter)
            cv2.imwrite(img_name, image)
            print("{} written!".format(img_name))
            img_counter += 1
            
    if k%256 == 27:
        print("Escape hit, closing...")
        break

cv2.destroyAllWindows()

Code For Traning the dataset

Python
After running the dataset creation code, we need to run this code to train the face data stored in the folders
# import the necessary packages
from imutils import paths
import face_recognition
#import argparse
import pickle
import cv2
import os

# our images are located in the dataset folder
print("[INFO] start processing faces...")
imagePaths = list(paths.list_images("dataset"))

# initialize the list of known encodings and known names
knownEncodings = []
knownNames = []

# loop over the image paths
for (i, imagePath) in enumerate(imagePaths):
	# extract the person name from the image path
	print("[INFO] processing image {}/{}".format(i + 1,
		len(imagePaths)))
	name = imagePath.split(os.path.sep)[-2]

	# load the input image and convert it from RGB (OpenCV ordering)
	# to dlib ordering (RGB)
	image = cv2.imread(imagePath)
	rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)

	# detect the (x, y)-coordinates of the bounding boxes
	# corresponding to each face in the input image
	boxes = face_recognition.face_locations(rgb,
		model="hog")

	# compute the facial embedding for the face
	encodings = face_recognition.face_encodings(rgb, boxes)

	# loop over the encodings
	for encoding in encodings:
		# add each encoding + name to our set of known names and
		# encodings
		knownEncodings.append(encoding)
		knownNames.append(name)

# dump the facial encodings + names to disk
print("[INFO] serializing encodings...")
data = {"encodings": knownEncodings, "names": knownNames}
f = open("encodings.pickle", "wb")
f.write(pickle.dumps(data))
f.close()

ESP32 Code

Arduino
The ESP32 acts as the controller for the robot's movement, obstacle detection, and communication with the Blynk app.
#define BLYNK_TEMPLATE_ID "TMPLdoAxwioZ"//Replace with your blynk template ID
#define BLYNK_TEMPLATE_NAME "surveillance Robot"//Replace with your blynk template name
#include <ESP32Servo.h> 
#define BLYNK_PRINT Serial
#include <WiFi.h>
#include <WiFiClient.h>
#include <BlynkSimpleEsp32.h>
char auth[] = "Blynk auth id";
char ssid[] = "Your ssid";
char pass[] = "SSID password";
int ml1 = 23;//MOTOR 1
int ml2 = 19;//MOTOR 2
int mr1 = 18;//MOTOR 3
int mr2 = 4;//MOTOR 4
int RPI_Pin =33; // pin shared with raspberry pi
int RPI = 0;
Servo myservo;  // create servo object to control a servo
int uss_status;
int servoPin = 25;;      // GPIO pin used to connect the servo control (digital out)  
char val;    // variable to read the value from the analog pin

int Buzzer = 26;
int trigPin = 13;    // Trigger
int echoPin = 27;    // Echo

long duration, cm, inches;
unsigned long Buzzer_Intial = 0;       
const long Buzzer_interval = 100;


WidgetLCD lcd(V0);
WidgetLCD RPI_lcd(V8);
BlynkTimer timer;


void Denied_Beep_delay()
{
Buzzer_Intial = millis();
while((millis() - Buzzer_Intial) <= Buzzer_interval) 
{
}
}

void Obstacle_Alert()
{
for (int AB = 0; AB <= 2; AB++) {
digitalWrite(Buzzer,HIGH);
Denied_Beep_delay();
digitalWrite(Buzzer,LOW);
Denied_Beep_delay();
}
}

BLYNK_CONNECTED() {
Blynk.syncAll();
}



BLYNK_WRITE(V1)
{
int x = param.asInt();
if(x == 1 && uss_status == 0)
{
lcd.clear();
lcd.print(1, 0, "Forward"); // use: (position X: 0-15, position Y: 0-1, "Message you want to print")
Serial.println("Forward");
digitalWrite(ml1, HIGH);//forward
digitalWrite(ml2,LOW);
digitalWrite(mr1,HIGH);
digitalWrite(mr2,LOW);
}
else
{
 lcd.clear();
lcd.print(1, 0, "Stop");     
digitalWrite(ml1, HIGH);
digitalWrite(ml2, HIGH);
digitalWrite(mr1, HIGH);
digitalWrite(mr2, HIGH); 
}
}

BLYNK_WRITE(V4)
{
int y = param.asInt();
if(y == 1)
{
lcd.clear();
lcd.print(1, 0, "Reverse"); // use: (position X: 0-15, position Y: 0-1, "Message you want to print") 
Serial.println("reverse");
digitalWrite(ml1, LOW);//reverse
digitalWrite(ml2,HIGH);
digitalWrite(mr1,LOW);
digitalWrite(mr2,HIGH);
}

else
{
 lcd.clear();
lcd.print(1, 0, "Stop");     
digitalWrite(ml1, HIGH);
digitalWrite(ml2, HIGH);
digitalWrite(mr1, HIGH);
digitalWrite(mr2, HIGH); 
}
}

BLYNK_WRITE(V5)
{
int u = param.asInt();
if(u == 1)
{
lcd.clear();
lcd.print(1, 0, "Left"); // use: (position X: 0-15, position Y: 0-1, "Message you want to print")
Serial.println("Left");
digitalWrite(ml1, HIGH);//left
digitalWrite(ml2,LOW);
digitalWrite(mr1,LOW);
digitalWrite(mr2,LOW);
}
else
{
 lcd.clear();
lcd.print(1, 0, "Stop");     
digitalWrite(ml1, HIGH);
digitalWrite(ml2, HIGH);
digitalWrite(mr1, HIGH);
digitalWrite(mr2, HIGH); 
}
}

BLYNK_WRITE(V6)
{
int z = param.asInt();
if(z == 1)
{
lcd.clear();
Serial.println("Right");
lcd.print(1, 0, "Right");  
digitalWrite(ml1, LOW);
digitalWrite(ml2,LOW);
digitalWrite(mr1,HIGH);
digitalWrite(mr2,LOW);
}
else
{
 lcd.clear();
lcd.print(1, 0, "Stop");     
digitalWrite(ml1, HIGH);
digitalWrite(ml2, HIGH);
digitalWrite(mr1, HIGH);
digitalWrite(mr2, HIGH); 
}
}


BLYNK_WRITE(V3)
{
  int q = param[0].asInt();
  myservo.write(q);  
  Serial.print("Q:");
  Serial.print(q);
  Serial.println();
}

void ultrasonic()
{
digitalWrite(trigPin, LOW);
delayMicroseconds(5);
digitalWrite(trigPin, HIGH);
delayMicroseconds(10);
digitalWrite(trigPin, LOW);
pinMode(echoPin, INPUT);
duration = pulseIn(echoPin, HIGH);
// Convert the time into a distance
cm = (duration/2) / 29.1;     
inches = (duration/2) / 74;  
Blynk.virtualWrite(V2,cm);

delay(100);
if((cm >= 2)&&(cm <=25))
{
uss_status = 1;
lcd.clear();
lcd.print(1, 0, "Obstacle Found");  
Obstacle_Alert();
}
else
{
 uss_status = 0; 
}

}
void setup()
{
pinMode(ml1, OUTPUT);
pinMode(ml2,OUTPUT);
pinMode(mr1, OUTPUT);
pinMode(mr2,OUTPUT);
pinMode(trigPin, OUTPUT);
pinMode(echoPin, INPUT);
pinMode(RPI_Pin,INPUT);
pinMode(Buzzer,OUTPUT);
ESP32PWM::allocateTimer(0);
ESP32PWM::allocateTimer(1);
ESP32PWM::allocateTimer(2);
ESP32PWM::allocateTimer(3);
myservo.setPeriodHertz(50);// Standard 50hz servo
myservo.attach(servoPin, 500, 2400);   // attaches the servo on pin 18 to the servo object         
Serial.begin(9600);
Blynk.begin(auth, ssid, pass);
}



void loop()
{
RPI = digitalRead(RPI_Pin);
if(RPI == 1)
{
  Blynk.virtualWrite(V7,10); 
  RPI_lcd.clear();
  RPI_lcd.print(0, 0, "Unknown Person");  
  RPI_lcd.print(0, 1, "  Found");  
}
else
{
 Blynk.virtualWrite(V7,0); 
 RPI_lcd.clear(); 
}
ultrasonic();
Blynk.run();
}

Credits

vinay y.n
27 projects • 48 followers
An electronic product engineer with 8 years of experience in the field. The passion for electronics began as a hobby 11 years ago.
Contact
Thanks to Caroline Dunn.

Comments

Please log in or sign up to comment.