Hackster is hosting Hackster Holidays, Ep. 6: Livestream & Giveaway Drawing. Watch previous episodes or stream live on Monday!Stream Hackster Holidays, Ep. 6 on Monday!
Sumit Kumar
Published © MIT

Handmade Drawing Recognition Interface as from my Smartphone

Learn how to quickly develop TinyML models to recognize custom, user-defined drawn gestures on touch interfaces, embedded in low-power MCUs.

BeginnerFull instructions provided6 hours1,065

Things used in this project

Hardware components

M5Stack Core2 ESP32 IoT Development Kit
M5Stack Core2 ESP32 IoT Development Kit
The MCU is an ESP32 model D0WDQ6-V3 and has dual core Xtensa® 32-bit 240Mhz LX6 processors that can be controlled separately. Wi-Fi are supported as standard and it includes an on board 16MB Flash and 8MB PSRAM, USB TYPE-C interface for charging, downloading of programs and serial communication, a 2.0-inch integrated capacitive touch screen, and a built-in vibration motor.
×1

Software apps and online services

Neuton
Neuton Tiny ML Neuton
Automatically build extremely tiny and explainable models without any coding and machine learning background and embed them into any microcontroller
Arduino IDE
Arduino IDE

Story

Read more

Schematics

Working

Code

Data Collection

Arduino
Please install M5GFX.h library and M5Stack Core2 Driver
#include <M5GFX.h>

M5GFX display;
const uint8_t pixelBufferSize=255;
uint32_t* targetBuff = (uint32_t*) calloc(pixelBufferSize, sizeof(uint32_t));   // allocate memory for pixel buffer with 0s
uint32_t prev = 0;
uint16_t counter = 0;

void setup()
{
    // Initialize the M5Stack object
    display.init();
    Serial.begin(115200);

    if (!targetBuff)
    {
        Serial.println("Failed to allocate memory");
    }

    // Check if the display is present
    if (!display.touch())
    {
        display.setTextDatum(textdatum_t::middle_center);
        display.drawString("Touch not found.", display.width() / 2, display.height() / 2);        
    }

    display.startWrite(); 

    Serial.println("Width: " + String(display.width()));
    Serial.println("Height: " + String(display.height())); 

    // print headers for the csv dataset file
    // for(uint8_t i = 0; i < 255; i++)
    // {  
    //     Serial.print("touch"+String(i));
    //     Serial.print(",");
    // }
    // Serial.println();
}

void loop()
{ 
    static bool drawed = false;
    lgfx::touch_point_t tp[3];

    uint8_t nums = display.getTouchRaw(tp, 3);

    if(nums)
    {
        display.convertRawXY(tp, nums);

        for (uint8_t i = 0; i < nums; ++i)
        {
            display.setCursor(0,0);
            // print X, Y and location(L = i*width + j)
            display.printf("Convert X:%03d  Y:%03d L:%03d", tp[i].x, tp[i].y, (tp[i].y * 320 + tp[i].x));

            // ensure no duplication of touch points
            if((tp[i].y * 320 + tp[i].x) != prev && counter < 255)
            {
                // store pixel location usign the relation, l = y*width + x
                targetBuff[counter] = (tp[i].y * display.width()) + tp[i].x;
                prev = (tp[i].y * 320 + tp[i].x);
                counter++;
            }                    
        }
        display.display();

        // draw the points on the screen
        display.setColor(TFT_GREEN);
        for (uint8_t i = 0; i < nums; ++i)
        {
            uint8_t s = tp[i].size + 4;
            switch (tp[i].id)
            {
                case 0:
                    display.fillCircle(tp[i].x, tp[i].y, s);
                    break;
                case 1:
                    display.drawLine(tp[i].x-s, tp[i].y-s, tp[i].x+s, tp[i].y+s);
                    display.drawLine(tp[i].x-s, tp[i].y+s, tp[i].x+s, tp[i].y-s);
                    break;
                default:
                    break;
            }
            display.display();
        }
        drawed = true;
    }

    // clear the screen and go to default state once the sample is drawn
    else if (drawed)
    {   
        // print the pixel locations which are stored in the buffer, user can copy from serial monitor and save in csv file
        for(uint8_t i = 0; i < 255; i++)
        {
           Serial.print(targetBuff[i]);
           Serial.print(",");
        }
        Serial.println();

        drawed = false;
        display.waitDisplay();
        display.clear();
        display.display();

        prev = 0;
        counter = 0;

        free(targetBuff); // free the memory
        targetBuff = (uint32_t*) calloc(255, sizeof(uint32_t));     // reallocate memory for pixel buffer with 0s
      
        if (!targetBuff)
        {
            Serial.println("Failed to allocate memory");
        }  
    }
    vTaskDelay(1);    
}

Python App

Python
import os
import serial
import time

arduino = serial.Serial('COM3', baudrate=115200,bytesize=serial.EIGHTBITS,
                     parity=serial.PARITY_NONE,
                     stopbits=serial.STOPBITS_ONE,
                     timeout=1,
                     xonxoff=0,
                     rtscts=0
                     )
# Toggle DTR to reset Arduino
arduino.setDTR(False)
time.sleep(1)

# toss any data already received, see
# http://pyserial.sourceforge.net/pyserial_api.html#serial.Serial.flushInput
arduino.flushInput()
arduino.setDTR(True)

with arduino:
    while True:
        line = arduino.readline()
        if line:
            str = line.decode()
            symb = str.strip()
            print(symb)

            # open spotify music app if drawn gesture is m
            if symb == 'm':
                print("Opening Spotify")
                os.system('start explorer shell:appsfolder\SpotifyAB.SpotifyMusic_zpdnekdrzrea0!Spotify')

            # if drawn gesture is c then open chrome
            if symb == 'c':
                print("Opening Chrome")
                os.system('start explorer shell:appsfolder\Chrome')

            # if drawn gesture is @ then open email
            if symb == '@':
                print("Opening Email")
                os.system('start explorer shell:appsfolder\microsoft.windowscommunicationsapps_8wekyb3d8bbwe!microsoft.windowslive.mail')

            # if drawn gesture is w then open whatsapp
            if symb == 'w':
                print("Opening Whatsapp")
                os.system('start explorer shell:appsfolder\\5319275A.WhatsAppDesktop_cv1g1gvanyjgm!WhatsAppDesktop')

            # if drawn gesture is 0 then open Office
            if symb == '0':
                print("Opening Office")
                os.system('start explorer shell:appsfolder\Microsoft.MicrosoftOfficeHub_8wekyb3d8bbwe!Microsoft.MicrosoftOfficeHub')

M5Stack Core2 Neuton TinyML model Code

Credits

Sumit Kumar

Sumit Kumar

32 projects • 98 followers
21 y/o | Computer Vision Engineer(R&D) @VisAI Labs | Image Processing @e-conSystems | Ex-Embedded AI Engineer @Neuton.ai

Comments