Kutluhan Aktar
Published © CC BY

AI-based Aquatic Ultrasonic Imaging & Chemical Water Testing

Identify noxious air bubbles lurking in the substrate w/ ultrasonic scans and assess water pollution based on chemical tests simultaneously.

ExpertFull instructions provided3 days2,577

Things used in this project

Hardware components

Elecrow $1 PCB Prototype
Elecrow $1 PCB Prototype
×1
UNIHIKER - IoT Python Programming Single Board Computer with Touchscreen
DFRobot UNIHIKER - IoT Python Programming Single Board Computer with Touchscreen
×1
USB Webcam (PK-910H)
×1
Arduino Nano ESP32
×1
DFRobot URM15 - 75KHZ Ultrasonic Sensor
×1
DFRobot Gravity: RS485-to-UART Signal Adapter Module
×1
DFRobot Serial 6-Axis Accelerometer
×1
Adafruit Waterproof DS18B20 Digital temperature sensor
Adafruit Waterproof DS18B20 Digital temperature sensor
×1
SSD1306 OLED Display (128x64)
×1
LattePanda 3 Delta 864
×1
KITRONIK Edge Connector for BBC micro:bit
×1
4.7K Resistor
×1
SparkFun Button (6x6)
×4
5 mm Common Anode RGB LED
×2
Anycubic Kobra 2 Max
×1
Power Jack
×1
External Battery
×1
USB Buck-Boost Converter Board
×1
Jumper wires (generic)
Jumper wires (generic)
×1

Software apps and online services

Edge Impulse Studio
Edge Impulse Studio
NVIDIA TAO RetinaNet
Arduino IDE
Arduino IDE
OpenCV
OpenCV
Telegram
Fusion
Autodesk Fusion
Ultimaker Cura
KiCad
KiCad
Thonny
XAMPP
VS Code
Microsoft VS Code
MobaXterm

Hand tools and fabrication machines

Soldering iron (generic)
Soldering iron (generic)
Hot glue gun (generic)
Hot glue gun (generic)

Story

Read more

Custom parts and enclosures

Edge Impulse Model with the Ridge Classifier (Arduino Library)

Edge Impulse NVIDIA TAO RetinaNet Model (Linux AARCH64 Application)

Gerber Files

Fabrication Files

ai_based_aquatic_ultrasonic_imaging_case.stl

ai_based_aquatic_ultrasonic_imaging_case_connection.stl

ai_based_aquatic_ultrasonic_imaging_pointer.stl

ai_based_aquatic_ultrasonic_imaging_pointer_cover.stl

ai_based_aquatic_ultrasonic_imaging_camera_holder.stl

Schematics

PCB_1

PCB_2

PCB_3

PCB_4

PCB_5

PCB_6

UNIHIKER

Code

AIoT_Aquatic_Ultrasonic_Imaging.ino

Arduino
         /////////////////////////////////////////////  
        //   AI-based Aquatic Ultrasonic Imaging   //
       //       & Chemical Water Testing          //
      //             ---------------             //
     //           (Arduino Nano ESP32)          //           
    //             by Kutluhan Aktar           // 
   //                                         //
  /////////////////////////////////////////////

//
// Identify noxious air bubbles lurking in the substrate w/ ultrasonic scans and assess water pollution based on chemical tests simultaneously.
//
// For more information:
// https://www.hackster.io/kutluhan-aktar
//
//
// Connections
// Arduino Nano ESP32 :
//                                URM15 - 75KHZ Ultrasonic Sensor via RS485-to-UART Signal Adapter Module
// D3      ------------------------ TX
// D2      ------------------------ RX
// 3.3V    ------------------------ +
// GND     ------------------------ -
//                                Serial 6-Axis Accelerometer
// 3.3V    ------------------------ VCC
// D5      ------------------------ RXD
// D4      ------------------------ TXD
// GND     ------------------------ GND
//                                DS18B20 Waterproof Temperature Sensor
// A1      ------------------------ Data
//                                SSD1306 OLED Display (128x64)
// A4      ------------------------ SDA
// A5      ------------------------ SCL
//                                Control Button (A)
// D6      ------------------------ +
//                                Control Button (B)
// D7      ------------------------ +
//                                Control Button (C)
// D8      ------------------------ +
//                                Control Button (D)
// D9      ------------------------ +
//                                5mm Common Anode RGB LED
// D10     ------------------------ R
// D11     ------------------------ G
// D12     ------------------------ B

// Include the required libraries:
#include <WiFi.h>
#include "DFRobot_RTU.h"
#include <DFRobot_WT61PC.h>
#include <OneWire.h>
#include <DallasTemperature.h>
#include <Adafruit_GFX.h>
#include <Adafruit_SSD1306.h>

// Add the icons to be shown on the SSD1306 OLED display.
#include "logo.h"

// Include the Edge Impulse neural network model converted to an Arduino library:
#include <Aquatic_Air_Bubble_Detection_inferencing.h>

// Define the required parameters to run an inference with the Edge Impulse neural network model.
#define sample_buffer_size 400

// Define the threshold value for the model outputs (predictions).
float threshold = 0.60;

// Define the air bubble class names:
String classes[] = {"bubble", "normal"};

char ssid[] = "<________>";      // your network SSID (name)
char pass[] = "<________>";      // your network password (use for WPA, or use as key for WEP)
int keyIndex = 0;                // your network key Index number (needed only for WEP)

// Define the server on LattePanda 3 Delta 864.
char server[] = "192.168.1.22";
// Define the web application path.
String application = "/Aquatic_Ultrasonic_Imaging/";

// Initialize the WiFiClient object.
WiFiClient client; /* WiFiSSLClient client; */

// Define the buffer (array) to save the ultrasonic scan variables  20 x 20 image (400 data points).
#define scan_buffer_size  400
float ultrasonic_scan[scan_buffer_size] = {0};

// Define the URM15 ultrasonic sensor address to register variables.
#define SLAVE_ADDR  ((uint16_t)0x0F)

typedef enum{ 
  ePid,
  eVid,
  eAddr,
  eComBaudrate,
  eComParityStop,
  eDistance,
  eInternalTempreture,
  eExternTempreture,
  eControl
}eRegIndex_t;

// Define the modbus object to utilize the RS485-to-UART signal transfer module with the ultrasonic sensor.
DFRobot_RTU modbus(/*s =*/&Serial1);

// Define the 6-axis accelerometer object.
DFRobot_WT61PC accelerometer(&Serial2);

// Define the DS18B20 waterproof temperature sensor settings:
#define ONE_WIRE_BUS A1
OneWire oneWire(ONE_WIRE_BUS);
DallasTemperature DS18B20(&oneWire);

// Define the SSD1306 screen settings:
#define SCREEN_WIDTH 128 // OLED display width, in pixels
#define SCREEN_HEIGHT 64 // OLED display height, in pixels
#define OLED_RESET    -1 // Reset pin # (or -1 if sharing Arduino reset pin)

Adafruit_SSD1306 display(SCREEN_WIDTH, SCREEN_HEIGHT, &Wire, OLED_RESET);

// Create a struct (_data) including all measurements generated by the 6-axis accelerometer:
struct _data {
  float acc_x;
  float acc_y;
  float acc_z;
  float gyro_x;
  float gyro_y;
  float gyro_z;
  float ang_x;
  float ang_y;
  float ang_z;
};

// Define the RGB pin settings.
#define red_pin   D10
#define green_pin D11
#define blue_pin  D12

// Define the control buttons.
#define control_button_A D6
#define control_button_B D7
#define control_button_C D8
#define control_button_D D9

// Define the data holders:
#define RX_1_PIN D2
#define TX_1_PIN D3
#define RX_2_PIN D4
#define TX_2_PIN D5
int predicted_class = -1;
int menu_option = 0, scanned_points = -1;
volatile boolean selected_interface[] = {false, false, false, false};
float water_temperature, distance;
struct _data _acc;

void setup(){
  Serial.begin(115200);

  pinMode(control_button_A, INPUT_PULLUP); pinMode(control_button_B, INPUT_PULLUP); pinMode(control_button_C, INPUT_PULLUP); pinMode(control_button_D, INPUT_PULLUP);
  pinMode(red_pin, OUTPUT); pinMode(green_pin, OUTPUT); pinMode(blue_pin, OUTPUT);
  adjustColor(0,0,0);
    
  // Define the first hardware serial port to communicate with the URM15 ultrasonic sensor via the RS485-to-UART signal adapter module.
  Serial1.begin(19200, SERIAL_8N1, RX_1_PIN, TX_1_PIN);

  // Define the second hardware serial port to communicate with the 6-axis accelerometer.
  Serial2.begin(9600, SERIAL_8N1, RX_2_PIN, TX_2_PIN);

  // Initialize the SSD1306 screen:
  display.begin(SSD1306_SWITCHCAPVCC, 0x3C);
  display.display();
  delay(1000);

  // Set the URM15 ultrasonic sensor to trigger mode, select the external temperature compensation, and enable the temperature compensation function by writing the control register variable  byte (LSB).
  /*
     bit0:
      0 - select onboard temperature
      1 - select external temperature
     bit1:
      0 - enable temperature compensation function
      1 - disable temperature compensation function
     bit2:
      0 - activate auto detection
      1 - activate passive detection
     bit3:
      1 - read distance every 65 ms (in passive detection mode) 
  */
  modbus.writeHoldingRegister(/*id =*/SLAVE_ADDR, /*reg =*/ eControl, /*val =*/0b00000001);
  delay(1000);

  // Configure the data output frequency of the 6-axis accelerometer.
  accelerometer.modifyFrequency(FREQUENCY_200HZ); /* FREQUENCY_0_1HZ, FREQUENCY_0_5HZ, FREQUENCY_1HZ, FREQUENCY_2HZ, FREQUENCY_5HZ, FREQUENCY_10HZ, FREQUENCY_20HZ, FREQUENCY_50HZ, FREQUENCY_100HZ, FREQUENCY_125HZ, FREQUENCY_200HZ */ 

  // Initialize the DS18B20 temperature sensor.
  DS18B20.begin();

  // Connect to WPA/WPA2 network. Change this line if using an open or WEP network.
  WiFi.mode(WIFI_STA);
  WiFi.begin(ssid, pass);
  // Attempt to connect to the given Wi-Fi network.
  while(WiFi.status() != WL_CONNECTED){
    // Wait for the network connection.
    delay(500);
    Serial.print(".");
  }
  // If connected to the network successfully:
  Serial.println("Connected to the Wi-Fi network successfully!");
}

void loop(){
  // Adjust the highlighted menu option by utilizing the control buttons  A and C.
  if(!digitalRead(control_button_A)){
    menu_option-=1;
    if(menu_option < 0) menu_option = 4;
    delay(500);
  }
  if(!digitalRead(control_button_C)){
    menu_option+=1;
    if(menu_option > 4) menu_option = 0;
    delay(500);
  }

  // Show the interface (home) screen.
  show_interface("home", menu_option);

  // If the control button B is pressed, navigate to the selected interface (menu) option.
  if(!digitalRead(control_button_B) && menu_option == 1){
    selected_interface[menu_option-1] = true;
    adjustColor(255,255,0);
    while(selected_interface[menu_option-1]){
      // Read multiple sensor data packets.
      read_ultrasonic_sensor(get_temperature());
      read_accelerometer();
      // Display the retrieved sensor information on the SSD1306 screen.
      show_interface("sensor", menu_option);
      // If the control button D is pressed, redirect the user to the home screen.
      if(!digitalRead(control_button_D)){
        selected_interface[menu_option-1] = false;
        adjustColor(0,0,0);
      }
    }
  }
  
  if(!digitalRead(control_button_B) && menu_option == 2){
    selected_interface[menu_option-1] = true;
    adjustColor(0,255,255);
    // Clear the data buffer.
    scanned_points = -1;
    while(selected_interface[menu_option-1]){
      // Read multiple sensor data packets.
      read_ultrasonic_sensor(get_temperature());
      read_accelerometer();
      // Initiate the ultrasonic image scanning procedure.
      ultrasonic_imaging();
      // Display the ultrasonic scanning progress on the SSD1306 screen.
      show_interface("scan", menu_option);
      // If the control button D is pressed, redirect the user to the home screen.
      if(!digitalRead(control_button_D)){
        selected_interface[menu_option-1] = false;
        adjustColor(0,0,0);
      }
    }
  }

  if(!digitalRead(control_button_B) && menu_option == 3){
    selected_interface[menu_option-1] = true;
    adjustColor(255,0,255);
    while(selected_interface[menu_option-1]){
      // Display the selectable labels (air bubble classes).
      show_interface("save", menu_option);
      // Depending on the passed air bubble class via the control buttons (A and C), transfer the collected ultrasonic scan data (buffer) to the web application via an HTTP POST request.
      if(!digitalRead(control_button_A)){
        if(make_a_post_request("?scan=OK&type=sample&class=normal")){
          // If successful:
          display.clearDisplay();
          display.drawBitmap((SCREEN_WIDTH-connected_width)/2, (SCREEN_HEIGHT-connected_height)/2, connected_bits, connected_width, connected_height, SSD1306_WHITE);
          display.display();
          adjustColor(0,255,0);
          delay(2000);
          adjustColor(255,0,255);
        }else{
          display.clearDisplay();
          display.drawBitmap((SCREEN_WIDTH-error_width)/2, (SCREEN_HEIGHT-error_height)/2, error_bits, error_width, error_height, SSD1306_WHITE);
          display.display();
          adjustColor(255,0,0);
          delay(2000);
          adjustColor(255,0,255);
        }
      }
      if(!digitalRead(control_button_C)){
        if(make_a_post_request("?scan=OK&type=sample&class=bubble")){
          // If successful:
          display.clearDisplay();
          display.drawBitmap((SCREEN_WIDTH-connected_width)/2, (SCREEN_HEIGHT-connected_height)/2, connected_bits, connected_width, connected_height, SSD1306_WHITE);
          display.display();
          adjustColor(0,255,0);
          delay(2000);
          adjustColor(255,0,255);
        }else{
          display.clearDisplay();
          display.drawBitmap((SCREEN_WIDTH-error_width)/2, (SCREEN_HEIGHT-error_height)/2, error_bits, error_width, error_height, SSD1306_WHITE);
          display.display();
          adjustColor(255,0,0);
          delay(2000);
          adjustColor(255,0,255);
        }
      }
      // If the control button D is pressed, redirect the user to the home screen.
      if(!digitalRead(control_button_D)){
        selected_interface[menu_option-1] = false;
        adjustColor(0,0,0);
      }
    }
  }

  if(!digitalRead(control_button_B) && menu_option == 4){
    selected_interface[menu_option-1] = true;
    adjustColor(255,255,255);
    while(selected_interface[menu_option-1]){
      // Display the running inference progress on the SSD1306 screen.
      show_interface("run", menu_option);
      // If the control button A is pressed, run the Edge Impulse neural network model to detect aquatic air bubbles by applying the ultrasonic scan data points collected via the URM15 ultrasonic sensor.
      if(!digitalRead(control_button_A)){
        // Run inference.
        run_inference_to_make_predictions(true);
        delay(2000);
      }
      // After running the neural network model successfully, if the control button C is pressed, transfer the applied data record (ultrasonic scan buffer) and the detected air bubble class to the web application via an HTTP POST request.
      if(!digitalRead(control_button_C) && predicted_class > -1){
        if(make_a_post_request("?scan=OK&type=detection&class=" + classes[predicted_class])){
          // If successful:
          display.clearDisplay();
          display.drawBitmap((SCREEN_WIDTH-connected_width)/2, (SCREEN_HEIGHT-connected_height)/2, connected_bits, connected_width, connected_height, SSD1306_WHITE);
          display.display();
          adjustColor(0,255,0);
          delay(2000);
          adjustColor(255,255,255);
        }else{
          display.clearDisplay();
          display.drawBitmap((SCREEN_WIDTH-error_width)/2, (SCREEN_HEIGHT-error_height)/2, error_bits, error_width, error_height, SSD1306_WHITE);
          display.display();
          adjustColor(255,0,0);
          delay(2000);
          adjustColor(255,255,255);
        }
      }
      // If the control button D is pressed, redirect the user to the home screen.
      if(!digitalRead(control_button_D)){
        selected_interface[menu_option-1] = false;
        adjustColor(0,0,0);
        // Clear the predicted class (label).
        predicted_class = -1;
      }
    }
  }
  
}

void run_inference_to_make_predictions(bool _r){
  // Summarize the Edge Impulse neural network model inference settings (from model_metadata.h):
  Serial.print("\nInference settings:\n");
  Serial.print("\tInterval: "); Serial.print((float)EI_CLASSIFIER_INTERVAL_MS); Serial.print(" ms.\n");
  Serial.printf("\tFrame size: %d\n", EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE);
  Serial.printf("\tSample length: %d ms.\n", EI_CLASSIFIER_RAW_SAMPLE_COUNT / 16);
  Serial.printf("\tNo. of classes: %d\n", sizeof(ei_classifier_inferencing_categories) / sizeof(ei_classifier_inferencing_categories[0]));

  // If the URM15 ultrasonic sensor generates an ultrasonic scan buffer (20 x 20  400 points) successfully:
  if(ultrasonic_scan[scan_buffer_size-1] > 0){
    // Run inference:
    ei::signal_t signal;
    // Create a signal object from the resized (scaled) raw data buffer  ultrasonic scan buffer.
    numpy::signal_from_buffer(ultrasonic_scan, EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE, &signal);
    // Run the classifier:
    ei_impulse_result_t result = { 0 };
    EI_IMPULSE_ERROR _err = run_classifier(&signal, &result, false);
    if(_err != EI_IMPULSE_OK){
      Serial.printf("ERR: Failed to run classifier (%d)\n", _err);
      return;
    }

    // Print the inference timings on the serial monitor.
    Serial.printf("\nPredictions (DSP: %d ms., Classification: %d ms., Anomaly: %d ms.): \n",
        result.timing.dsp, result.timing.classification, result.timing.anomaly);

    // Obtain the prediction results for each label (class).
    for(size_t ix = 0; ix < EI_CLASSIFIER_LABEL_COUNT; ix++){
      // Print the prediction results on the serial monitor.
      Serial.printf("\t%s:\t%.5f\n", result.classification[ix].label, result.classification[ix].value);
      // Get the imperative predicted label (class).
      if(result.classification[ix].value >= threshold) predicted_class = ix;
    }
    Serial.printf("\nPredicted Class: %d [%s]\n", predicted_class, classes[predicted_class]);  

    // Detect anomalies, if any:
    #if EI_CLASSIFIER_HAS_ANOMALY == 1
      Serial.printf("Anomaly: %d \n", result.anomaly);
    #endif 

    // Release the ultrasonic scan buffer if requested.
    if(!_r){ for(int i=0; i<scan_buffer_size; i++){ ultrasonic_scan[i] = 0; } }
    
  }else{
    Serial.println("\nUltrasonic scan data buffer => Empty!");
  }
}

boolean make_a_post_request(String request){
  // Connect to the web application named Aquatic_Ultrasonic_Imaging. Change '80' with '443' if you are using SSL connection.
  if (client.connect(server, 80)){
    // If successful:
    Serial.println("\nConnected to the web application successfully!\n");
    // Create the query string:
    String query = application + request;
    // Make an HTTP POST request:
    String head = "--UltrasonicScan\r\nContent-Disposition: form-data; name=\"ultrasonic_scan\"; filename=\"new_scan.txt\"\r\nContent-Type: text/plain\r\n\r\n";
    String tail = "\r\n--UltrasonicScan--\r\n";
    // Get the total message length.
    uint32_t totalLen = head.length() + sizeof(ultrasonic_scan) + (scan_buffer_size*sizeof(char)) + tail.length();
    // Start the request:
    client.println("POST " + query + " HTTP/1.1");
    client.println("Host: 192.168.1.22");
    client.println("Content-Length: " + String(totalLen));
    client.println("Connection: Keep-Alive");
    client.println("Content-Type: multipart/form-data; boundary=UltrasonicScan");
    client.println();
    client.print(head);
    for(int i=0; i<scan_buffer_size; i++){ client.print(ultrasonic_scan[i]); client.print(",");}
    client.print(tail);
    // Wait until transferring the ultrasonic scan (text) buffer (20x20).
    delay(2000);
    // If successful:
    Serial.println("HTTP POST => Data transfer completed!\n");
    return true;
  }else{
    Serial.println("\nConnection failed to the web application!\n");
    delay(2000);
    return false;
  }
}

void ultrasonic_imaging(){
  // Define underwater device movements by utilizing the axis measurements generated by the 6-axis accelerometer  acceleration and angular velocity.
  if(_acc.acc_x > 0 && _acc.gyro_x > 0 && _acc.acc_y > 0 && _acc.gyro_y > 0){
    // If the device is moving underwater inside an arbitrary square, collect the temperature-compensated distance measurements produced by the URM15 ultrasonic sensor
    // and save them as data points to the scan data buffer  20 x 20 (400 points).
    if(scanned_points < 399){
      scanned_points+=1;
      ultrasonic_scan[scanned_points] = distance/100;
      delay(50);
    }else{
      adjustColor(0,255,0);
      Serial.println("Scan Completed!");
      delay(50);
    }
  }
}

void read_ultrasonic_sensor(float water_temp){
  // Configure the external temperature value by utilizing the evaluated water temperature to generate precise distance measurements.
  water_temp = water_temp*10;
  modbus.writeHoldingRegister(/*id =*/SLAVE_ADDR, /*reg =*/eExternTempreture, /*val =*/water_temp);
  delay(50);
  // Obtain the temperature-compensated distance measurement produced by the URM15 ultrasonic sensor.
  distance = modbus.readHoldingRegister(SLAVE_ADDR, eDistance);
  delay(50);
  // If the sensor is out of range, set the distance to -1.
  if(distance == 65535){
    distance = -1;
    Serial.println("Ultrasonic sensor is out of range!");
  }else{
    distance = distance/10;
  }
  delay(50);
}

void read_accelerometer(){
  // Obtain the X, Y, and Z-axis measurements generated by the 6-axis accelerometer  acceleration, angular velocity, angle.
  if(accelerometer.available()){
    _acc.acc_x = accelerometer.Acc.X; _acc.acc_y = accelerometer.Acc.Y; _acc.acc_z = accelerometer.Acc.Z;
    _acc.gyro_x = accelerometer.Gyro.X; _acc.gyro_y = accelerometer.Gyro.Y; _acc.gyro_z = accelerometer.Gyro.Z;
    _acc.ang_x = accelerometer.Angle.X; _acc.ang_y = accelerometer.Angle.Y; _acc.ang_z = accelerometer.Angle.Z;
  }
}

float get_temperature(){
  // Obtain the temperature measurement in Celsius, estimated by the DS18B20 temperature sensor.
  DS18B20.requestTemperatures(); 
  float t = DS18B20.getTempCByIndex(0);
  delay(50);
  return t;
}

void show_interface(String com, int menu_option){
  // Get the assigned interface logo information.
  int l_w = interface_widths[menu_option];
  int l_h = interface_heights[menu_option];
  if(com == "home"){
    display.clearDisplay();
    display.drawBitmap(0, (SCREEN_HEIGHT-l_h)/2, interface_logos[menu_option], l_w, l_h, SSD1306_WHITE);   
    display.setTextSize(1); 
    (menu_option == 1) ? display.setTextColor(SSD1306_BLACK, SSD1306_WHITE) : display.setTextColor(SSD1306_WHITE);
    display.setCursor(l_w+5, 5); 
    display.println("1.Show Readings");
    (menu_option == 2) ? display.setTextColor(SSD1306_BLACK, SSD1306_WHITE) : display.setTextColor(SSD1306_WHITE);
    display.setCursor(l_w+5, 20);
    display.println("2.Ultrasonic+++");
    (menu_option == 3) ? display.setTextColor(SSD1306_BLACK, SSD1306_WHITE) : display.setTextColor(SSD1306_WHITE);
    display.setCursor(l_w+5, 35);
    display.println("3.Save Samples");
    (menu_option == 4) ? display.setTextColor(SSD1306_BLACK, SSD1306_WHITE) : display.setTextColor(SSD1306_WHITE);
    display.setCursor(l_w+5, 50);
    display.println("4.Run Inference");
    display.display();
    delay(500);
  }
  else if(com == "sensor"){
    display.clearDisplay();
    display.drawBitmap(SCREEN_WIDTH-l_w, SCREEN_HEIGHT-l_h, interface_logos[menu_option], l_w, l_h, SSD1306_WHITE);   
    display.setTextSize(1);
    display.setCursor(0, 0); 
    display.print("Distance: "); display.print(distance); display.println("cm");
    display.setCursor(0, 20); 
    display.print("X: "); display.print(_acc.acc_x); display.print(" / "); display.print(_acc.gyro_x); 
    display.setCursor(0, 30); 
    display.print("Y: "); display.print(_acc.acc_y); display.print(" / "); display.print(_acc.gyro_y); 
    display.setCursor(0, 40); 
    display.print("Z: "); display.print(_acc.acc_z); display.print(" / "); display.print(_acc.gyro_z);
    display.display();
  }
  else if(com == "scan"){
    display.clearDisplay();
    display.drawBitmap(SCREEN_WIDTH-l_w, SCREEN_HEIGHT-l_h, interface_logos[menu_option], l_w, l_h, SSD1306_WHITE); 
    display.setTextSize(2);
    display.setCursor(0, 0); 
    display.print(scanned_points+1); display.println(" / 400");
    display.setTextSize(1);
    display.setCursor(0, 25); 
    (scanned_points < 399) ? display.print("Scanning...") : display.print("Scan Completed!");
    display.display();
  }
  else if(com == "save"){
    display.clearDisplay();
    display.drawBitmap((SCREEN_WIDTH-l_w)/2, 0, interface_logos[menu_option], l_w, l_h, SSD1306_WHITE);   
    display.setTextSize(1);
    display.setCursor(0, l_h+10); 
    display.print("A) Class => normal");
    display.setCursor(0, l_h+25); 
    display.print("C) Class => bubble");
    display.display();
  }
  else if(com == "run"){
    display.clearDisplay();   
    display.setTextSize(1);
    display.setTextColor(SSD1306_WHITE);
    display.setCursor(0, l_h+5); 
    display.print("A) Run Inference");
    display.setCursor(0, l_h+20);
    // Show the latest model detection result and the assigned class icon if the model yields a label successfully.
    String r = (predicted_class > -1) ? classes[predicted_class] : "Pending"; 
    display.print("C) Send: "+ r);
    (predicted_class > -1) ? display.drawBitmap((SCREEN_WIDTH-class_widths[predicted_class])/2, 0, class_logos[predicted_class], class_widths[predicted_class], class_heights[predicted_class], SSD1306_WHITE) : display.drawBitmap((SCREEN_WIDTH-l_w)/2, 0, interface_logos[menu_option], l_w, l_h, SSD1306_WHITE);
    display.display();
  }
}

void adjustColor(int r, int g, int b){
  analogWrite(red_pin, (255-r));
  analogWrite(green_pin, (255-g));
  analogWrite(blue_pin, (255-b));
}

_class.py

Python
# AI-based Aquatic Ultrasonic Imaging & Chemical Water Testing
#
# UNIHIKER
#
# By Kutluhan Aktar
#
# Identify noxious air bubbles lurking in the substrate w/ ultrasonic scans
# and assess water pollution based on chemical tests simultaneously.
# 
#
# For more information:
# https://www.hackster.io/kutluhan-aktar


import cv2
import numpy
from edge_impulse_linux.image import ImageImpulseRunner
from unihiker import GUI
from pinpong.board import *
from pinpong.extension.unihiker import *
import os
import requests
import datetime
from time import sleep


class aquarium_func():
    def __init__(self, model_file):
        # Initialize the USB high-quality camera feed.
        self.camera = cv2.VideoCapture(0)
        sleep(2)
        # Define the required variables to establish the connection with the web application  Aquatic_Ultrasonic_Imaging.
        self.web_app = "http://192.168.1.22/Aquatic_Ultrasonic_Imaging/"
        # Define the required variables to configure camera settings.
        self.frame_size_m = (320,320)
        self.frame_size_s = (120,120)
        # Define the required configurations to run the Edge Impulse RetinaNet (NVIDIA TAO) object detection model.
        dir_path = os.path.dirname(os.path.realpath(__file__))
        self.model_file = os.path.join(dir_path, model_file)
        self.class_names = ["sterile", "dangerous", "polluted"]
        self.class_colors = ["green", "yellow", "red"]
        self.bb_colors = {"sterile": (0,255,0), "dangerous": (0,255,255), "polluted": (0,0,255)}
        self.selected_class = -1
        self.detected_class = "Pending"
        # Define the required variables to generate an ultrasonic (radar) image.
        self.u_im = {"w": 20, "h": 20, "offset": 20, "temp_path": "./assets/ultrasonic_temp.jpg"}
        # Define the required parameters to transfer information to the given Telegram bot  @aquatic_ultrasonic_bot.
        telegram_bot_token = "<____________>" # e.g., 123456:ABC-DEF1234ghIkl-zyx57W2v1u123ew11
        self.telegram_webhook = "https://api.telegram.org/bot{}".format(telegram_bot_token)
        self.latest_air_label = "..."
        # Initiate the user interface (GUI) on UNIHIKER.
        self.interface = GUI()
        # Initiate the built-in sensor features on UNIHIKER.
        Board().begin()
        # Define the RGB LED pins.
        self.rgb = {"r": Pin(Pin.P4, Pin.OUT), "g": Pin(Pin.P5, Pin.OUT), "b": Pin(Pin.P6, Pin.OUT)}

    def run_inference(self, notify="Telegram", bb_offset=40):
        # Run inference to detect water quality levels based on chemical water tests via object detection.
        with ImageImpulseRunner(self.model_file) as runner:
            try:
                resulting_image = ""
                # Print the information of the Edge Impulse model converted to a Linux (AARCH64) application (.eim).
                model_info = runner.init()
                print('\nLoaded runner for "' + model_info['project']['owner'] + ' / ' + model_info['project']['name'] + '"')
                labels = model_info['model_parameters']['labels']
                # Get the currently captured and modified image via the high-quality USB camera.
                test_img = self.modified_image
                # After obtaining the test frame, resize (if necessary) and generate features from the retrieved frame depending on the provided model so as to run an inference.
                features, cropped = runner.get_features_from_image(test_img)
                res = runner.classify(features)
                # Obtain the prediction (detection) results for each label (class).
                if "bounding_boxes" in res["result"].keys():
                    print('Found %d bounding boxes (%d ms.)' % (len(res["result"]["bounding_boxes"]), res['timing']['dsp'] + res['timing']['classification']))
                    # If the Edge Impulse model predicts a class successfully:
                    if(len(res["result"]["bounding_boxes"]) == 0):
                        self.detected_class = "empty"
                    else:
                        for bb in res["result"]["bounding_boxes"]:
                            # Get the latest detected labels:
                            self.detected_class = bb['label']
                            print('\t%s (%.2f): x=%d y=%d w=%d h=%d' % (bb['label'], bb['value'], bb['x'], bb['y'], bb['width'], bb['height']))
                            cv2.rectangle(cropped, (bb['x']-bb_offset, bb['y']-bb_offset), (bb['x']+bb['width']+bb_offset, bb['y']+bb['height']+bb_offset), self.bb_colors[self.detected_class], 2)
                # Save the generated model resulting image with the passed bounding boxes (if any) to the detections folder.
                if self.detected_class != "empty":
                    date = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
                    resulting_image = "/detections/detection_{}_{}.jpg".format(self.detected_class, date)
                    cv2.imwrite("."+resulting_image, cropped)
                # Notify the user of the model detection results on UNIHIKER.
                self.cam_info_text.config(text="Detection: " + self.detected_class)
                print("\n\nLatest Detected Label => " + self.detected_class)
                if(self.detected_class == "sterile"): self.adjust_color([0,1,0])
                if(self.detected_class == "dangerous"): self.adjust_color([1,1,0])
                if(self.detected_class == "polluted"): self.adjust_color([1,0,0])
                sleep(2)
                self.adjust_color([0,1,1])
                # If requested, also inform the user via Telegram by transferring the modified model resulting image and the latest detected water quality class.
                if(notify == "Telegram" and self.detected_class != "empty"):
                    self.telegram_send_data("water_test", "6465514194", resulting_image)       
            # Stop the running inference.    
            finally:
                if(runner):
                    runner.stop()

    def make_a_get_request(self, com):
        # Depending on the given command, make an HTTP GET request to communicate with the web application.
        if(com == "csv"):
            # If requested, generate a CSV file from the ultrasonic scan information sent by Nano ESP32  data records.
            req = requests.get(self.web_app + "generate.php?create=csv")
            if(req.status_code == 200):
                if(req.text.find("Server => ") > -1):
                    self.ultra_info_text.config(text="CSV file generated successfully!")
                    self.adjust_color([0,1,1])
                print("\n"+req.text)
            else:
                print("Server => Connection Error: " + str(req.status_code))
        elif(com == "get_model_result"):
            # If requested, get the latest neural network model detection result.
            # Then, convert the retrieved resulting data record to an ultrasonic (radar) image.
            req = requests.get(self.web_app + "generate.php?model_result=OK")
            if(req.status_code == 200):
                data_packet = req.text.split("_")
                self.latest_air_label = data_packet[0]
                data_record = data_packet[1]
                # Generate ultrasonic image.
                self.adjust_color([1,1,0])
                self.generate_ultrasonic_image(data_record)
                # Display the latest generated ultrasonic image with the detected air bubble class (label) for further inspection.
                self.ultrasonic_img.config(image="scans/latest_ultrasonic_image.jpg")
                self.ultra_info_text.config(text="Detected Class: " + self.latest_air_label)
            else:
                print("Server => Connection Error: " + str(req.status_code))
    
    def generate_ultrasonic_image(self, data_record, scanned_image_path="./scans/latest_ultrasonic_image.jpg"):
        x = 0
        y = 0
        # Get template image.
        template = cv2.imread(self.u_im["temp_path"])
        # Obtain the individual data points by decoding the passed data record.
        data_points = data_record.split(",")
        for point in data_points:
            # Draw depth indicators on the image template according to the given data point.
            p = float(point)*100
            if(p >= 15 and p < 20): cv2.rectangle(template, (x,y), (x+self.u_im["w"],y+self.u_im["h"]), (255,255,255), -1)
            if(p >= 20 and p < 25): cv2.rectangle(template, (x,y), (x+self.u_im["w"],y+self.u_im["h"]), (255,255,0), -1)
            if(p >= 25 and p < 30): cv2.rectangle(template, (x,y), (x+self.u_im["w"],y+self.u_im["h"]), (255,0,0), -1)
            if(p >= 30 and p < 35): cv2.rectangle(template, (x,y), (x+self.u_im["w"],y+self.u_im["h"]), (0,255,255), -1)
            if(p >= 35): cv2.rectangle(template, (x,y), (x + self.u_im["w"], y + self.u_im["h"]), (0,255,0), -1)
            # Configure coordinates.
            x += self.u_im["offset"]
            if(x == 400):
                x = 0
                y += self.u_im["offset"]
            print(str(x) + ", " + str(y))
        # Save the generated ultrasonic image.
        cv2.imwrite(scanned_image_path, template)
        print("\nUltrasonic image generated and saved successfully!")

    def telegram_send_data(self, com, chat_id, file_path="/scans/latest_ultrasonic_image.jpg"):
        # Get the file directory.
        _dir = os.path.abspath(os.getcwd())
        if(com == "ultrasonic"):
            path = self.telegram_webhook + "/sendPhoto"
            image_path = _dir + file_path
            # Make an HTTP POST request to transfer the generated ultrasonic image to the given Telegram bot via the Telegram Bot API.
            req = requests.post(path, data={"chat_id": chat_id, "caption": " Ultrasonic Image Received!\n\n Detected Class: "+self.latest_air_label}, files={"photo": open(image_path, 'rb')})
            if(req.status_code == 200):
                self.adjust_color([0,1,0])
                self.ultra_info_text.config(text="Image transferred to the Telegram bot!")
                print("\nImage transferred to the Telegram bot!")
            else:
                print("Server => Connection Error: " + str(req.status_code))
        if(com == "water_test"):
            path = self.telegram_webhook + "/sendPhoto"
            image_path = _dir + file_path
            # Make an HTTP POST request to transfer the model resulting image modified with the passed bounding boxes to the given Telegram bot via the Telegram Bot API.
            req = requests.post(path, data={"chat_id": chat_id, "caption": " Inference running successfully!\n\n Detected Class: " + self.detected_class}, files={"photo": open(image_path, 'rb')})
            if(req.status_code == 200):
                self.adjust_color([0,1,0])
                self.cam_info_text.config(text="Image[{}] sent to Telegram!".format(self.detected_class))
                print("\nModel resulting image transferred to the Telegram bot!")
                sleep(2)
                self.adjust_color([0,1,1])
            else:
                print("Server => Connection Error: " + str(req.status_code))

    def display_camera_feed(self):
        # Display the real-time video stream generated by the USB camera.
        ret, img = self.camera.read()
        # Resize the captured frame depending on the given object detection model.
        self.latest_frame_m = cv2.resize(img, self.frame_size_m)
        # Resize the same frame to display it on the UNIHIKER screen (snapshot).
        self.latest_frame_s = cv2.resize(img, self.frame_size_s)
        # Stop the camera feed if requested.
        if cv2.waitKey(1) & 0xFF == ord('q'):
            self.camera.release()
            cv2.destroyAllWindows()
            print("\nCamera Feed Stopped!")
    
    def take_snapshot(self, filename="assets/snapshot.jpg"):
        # Show the latest camera frame (snapshot) on UNIHIKER to inform the user.
        cv2.imwrite("./"+filename, self.latest_frame_s)
        self.cam_snapshot_img.config(image=filename)
        # Store the latest modified image sample on the memory.
        self.modified_image = self.latest_frame_m
    
    def save_img_sample(self, given_class):
        if(given_class > -1):
            # Create the file name for the image sample.
            date = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
            filename = "IMG_{}_{}.jpg".format(self.class_names[given_class], date)
            # Save the modified image sample.
            cv2.imwrite("./samples/"+filename, self.modified_image)
            print("\nSample Saved Successfully: ./samples/" + filename)
            # Notify the user.
            self.cam_info_text.config(text="Saved: "+filename)
        else:
            self.cam_info_text.config(text="Please select a class.")
        
    def camera_feed(self):
        # Start the camera feed loop.
        while True:
            self.display_camera_feed()
    
    def create_user_interface(self, _x=120, _y=10, offset=15, origin="top_left"):
        # Design the user interface (GUI) via the built-in unihiker module.
        # Camera interface for AI-based chemical water quality test.
        self.cam_backg = self.interface.fill_rect(x=0, y=0, w=240, h=320, color="#9BB5CE")
        self.cam_snapshot_img = self.interface.draw_image(x=60, y=5, image="assets/cam_wait.jpg", origin=origin, onclick=lambda:self.interface_config("clear_class"))
        self.cam_section = self.interface.fill_round_rect(x=5, y=130, r=10, w=230, h=185, color="#215E7C")
        self.cam_run_button = self.interface.fill_round_rect(x=45, y=250, r=5, w=150, h=45, color="#FAE0D8", onclick=self.run_inference)
        self.cam_run_text = self.interface.draw_text(x=120, y=272, text="Run Inference", origin="center", color="#5C5B57", font_size=12, onclick=self.run_inference)
        self.cam_save_button = self.interface.fill_round_rect(x=45, y=195, r=5, w=150, h=45, color="#FAE0D8", onclick=lambda:self.save_img_sample(self.selected_class))
        self.cam_save_text = self.interface.draw_text(x=120, y=217, text="Capture Sample", origin="center", color="#5C5B57", font_size=12, onclick=lambda:self.save_img_sample(self.selected_class))
        self.cam_snap_button = self.interface.fill_round_rect(x=45, y=140, r=5, w=150, h=45, color="#FAE0D8", onclick=self.take_snapshot)
        self.cam_snap_text = self.interface.draw_text(x=120, y=162, text="Snapshot", origin="center", color="#5C5B57", font_size=12)
        self.cam_info_text = self.interface.draw_text(x=120, y=305, text="Pending...", origin="center", color="white", font_size=8)
        # Elements and coordinates  Camera. 
        self.cam_int_vars = [self.cam_backg, self.cam_snapshot_img, self.cam_section, self.cam_run_button, self.cam_run_text, self.cam_save_button, self.cam_save_text, self.cam_snap_button, self.cam_snap_text, self.cam_info_text]
        self.cam_int_vals = [0, 60, 5, 45, 120, 45, 120, 45, 120, 120]
        # Ultrasonic sensor interface for AI-based ultrasonic imaging.
        self.ultra_backg = self.interface.fill_rect(x=0, y=0, w=240, h=320, color="#5C5B57")
        self.ultrasonic_img = self.interface.draw_image(x=20, y=0, image="assets/ultrasonic_temp.jpg", origin=origin, onclick=lambda:self.telegram_send_data("ultrasonic", "6465514194"))
        self.ultra_section = self.interface.fill_round_rect(x=5, y=205, r=10, w=230, h=110, color="#F9E5C9")
        self.ultra_ins_button = self.interface.fill_round_rect(x=45, y=260, r=5, w=150, h=35, color="#F5F5F0", onclick=lambda:self.make_a_get_request("get_model_result"))
        self.ultra_ins_text = self.interface.draw_text(x=120, y=277, text="Generate Image", origin="center", color="#5C5B57", font_size=12, onclick=lambda:self.make_a_get_request("get_model_result"))
        self.ultra_gen_button = self.interface.fill_round_rect(x=45, y=215, r=5, w=150, h=35, color="#F5F5F0", onclick=lambda:self.make_a_get_request("csv"))
        self.ultra_gen_text = self.interface.draw_text(x=120, y=232, text="Generate CSV", origin="center", color="#5C5B57", font_size=12, onclick=lambda:self.make_a_get_request("csv"))
        self.ultra_info_text = self.interface.draw_text(x=120, y=305, text="Pending...", origin="center", color="#5C5B57", font_size=8)
        # Elements and coordinates  Ultrasonic Sensor.
        self.ultra_int_vars = [self.ultra_backg, self.ultrasonic_img, self.ultra_section, self.ultra_ins_button, self.ultra_ins_text, self.ultra_gen_button, self.ultra_gen_text, self.ultra_info_text]
        self.ultra_int_vals = [0, 20, 5, 45, 120, 45, 120, 120]
        # Home screen.
        self.main_backg = self.interface.draw_image(x=0, y=0, image="assets/background.jpg", origin=origin, onclick=lambda:self.adjust_color([0,0,0]))
        self.main_ultra_button = self.interface.fill_round_rect(x=20, y=10, r=5, w=200, h=45, color="#5C5B57", onclick=lambda:self.interface_config("ultra"))
        self.main_ultra_text = self.interface.draw_text(x=120, y=32, text="Aquatic Ultrasonic Scan", origin="center", color="white", font_size=12, onclick=lambda:self.interface_config("ultra"))
        self.main_cam_button = self.interface.fill_round_rect(x=20, y=265, r=5, w=200, h=45, color="#9BB5CE", onclick=lambda:self.interface_config("cam"))
        self.main_cam_text = self.interface.draw_text(x=120, y=287, text="Water Quality Test", origin="center", color="white", font_size=12, onclick=lambda:self.interface_config("cam"))
        # Elements and coordinates  Home Screen.
        self.home_int_vars = [self.main_backg, self.main_ultra_button, self.main_ultra_text, self.main_cam_button, self.main_cam_text]
        self.home_int_vals = [0, 20, 120, 20, 120]
        
    def board_configuration(self):
        # Utilize the integrated sensors on UNIHIKER to provide a feature-rich user experience.
        while True:
            # If the control button A is pressed, return to the home screen.
            if button_a.is_pressed() == True:
                self.interface_config("home")
                sleep(1)
            # If the control button B is pressed, change the selected class.
            if button_b.is_pressed() == True:
                self.selected_class+=1
                if self.selected_class == 3:
                    self.selected_class = 0
                self.cam_save_button.config(color=self.class_colors[self.selected_class])
                if(self.selected_class == 0): self.adjust_color([0,1,0])
                if(self.selected_class == 1): self.adjust_color([1,1,0])
                if(self.selected_class == 2): self.adjust_color([1,0,0])
                sleep(1)
    
    def interface_config(self, con, _hide=350):
        if(con == "home"):
            for i in range(len(self.home_int_vals)):
                self.home_int_vars[i].config(x=self.home_int_vals[i])
            for i in range(len(self.cam_int_vals)):
                self.cam_int_vars[i].config(x=_hide)
            for i in range(len(self.ultra_int_vals)):
                self.ultra_int_vars[i].config(x=_hide)
            self.adjust_color([0,0,0])
        elif(con == "cam"):
            for i in range(len(self.home_int_vals)):
                self.home_int_vars[i].config(x=_hide)
            for i in range(len(self.cam_int_vals)):
                self.cam_int_vars[i].config(x=self.cam_int_vals[i])
            for i in range(len(self.ultra_int_vals)):
                self.ultra_int_vars[i].config(x=_hide)
            self.adjust_color([0,1,1])
        elif(con == "ultra"):
            for i in range(len(self.home_int_vals)):
                self.home_int_vars[i].config(x=_hide)
            for i in range(len(self.cam_int_vals)):
                self.cam_int_vars[i].config(x=_hide)
            for i in range(len(self.ultra_int_vals)):
                self.ultra_int_vars[i].config(x=self.ultra_int_vals[i])
            self.adjust_color([1,0,1])
        elif(con == "clear_class"):
            self.selected_class = -1
            self.cam_save_button.config(color="#FAE0D8")
            self.cam_info_text.config(text="Pending...")
            self.adjust_color([0,0,0])

    def adjust_color(self, color):
        self.rgb["r"].write_digital(1-color[0])
        self.rgb["g"].write_digital(1-color[1])
        self.rgb["b"].write_digital(1-color[2])

main.py

Python
# AI-based Aquatic Ultrasonic Imaging & Chemical Water Testing
#
# UNIHIKER
#
# By Kutluhan Aktar
#
# Identify noxious air bubbles lurking in the substrate w/ ultrasonic scans
# and assess water pollution based on chemical tests simultaneously.
# 
#
# For more information:
# https://www.hackster.io/kutluhan-aktar


from _class import aquarium_func
from threading import Thread


# Define the aquarium object.
aquarium = aquarium_func("model/ai-based-aquatic-chemical-water-quality-testing-linux-aarch64.eim")

# Define and initialize threads.
Thread(target=aquarium.camera_feed).start()
Thread(target=aquarium.board_configuration).start()

# Show the user interface (GUI) designed with the built-in UNIHIKER modules.
aquarium.create_user_interface()

generate.php

PHP
<?php

// Obtain the data items for each ultrasonic scan stored in the sample folder as text files.
function read_scans(){
	$information = [];
	// Get all text file paths under the sample folder.
	$files = glob("./sample/*.txt");
	// Read each text file to obtain the ultrasonic scan information  data items.
	foreach($files as $scan){
		$line = [];
		// Derive the provided air bubble label from the given text file name.
		$label = explode("_", $scan)[1];
		array_push($line, $label);
		// Read the ultrasonic scan information.
		$record = fopen($scan, "r"); 
		$data_items = fread($record, filesize($scan));
		// Remove the redundant comma from the data record (scan).
		$data_items = substr($data_items, 0, -1);
		// Append the retrieved data items.
		$data_items = explode(",", $data_items);
		$line = array_merge($line, $data_items);
        array_push($information, $line);
        // Close the text file.
		fclose($record);
	}
	// Return the fetched data items.
	return $information;
}

// Generate a CSV file from the data records (ultrasonic scan information sent by Nano ESP32) stored in the sample folder.
function create_CSV(){
	// Obtain the generated data items array from ultrasonic scans  data records.
	$information = read_scans();
	// Create the scan_data_items.csv file.
	$filename = "scan_data_items.csv";
	$fp = fopen($filename, 'w');
	// Create and add the header to the CSV file.
	$header = [];
	array_push($header, "air_bubble_label");
	for($i=0;$i<400;$i++){ array_push($header, "p_".strval($i)); }
	fputcsv($fp, $header);
	// Append the retrieved data items as rows for each ultrasonic scan to the CSV file.
	foreach($information as $row){
		fputcsv($fp, $row);
	}
	// Close the CSV file.
	fclose($fp);
}

// Obtain the latest data record (ultrasonic scan data points) with the neural network model detection result stored in the detection folder.
function get_latest_detection($folder){
	$scan = scandir($folder, 1);
	// Label (model result).
	$model_result = explode("_", $scan[0])[1];
	// Data record.
	$file = $folder.$scan[0];
	$record = fopen($file, "r");
	$data_items = fread($record, filesize($file));
	// Remove the redundant comma from the data record (scan).
	$data_items = substr($data_items, 0, -1);
	// Append the model result to the data record.
	$data_packet = $model_result."_".$data_items;
	// Pass the generated data packet.
	echo $data_packet;
    // Close the text file.
    fclose($record);
}

// If requested, create a CSV file from the stored aquatic ultrasonic scan samples.
if(isset($_GET["create"]) && $_GET["create"] == "csv"){
	create_CSV();
	echo "Server => CSV file created successfully!";
}

// If requested, pass the latest data record with the neural network model detection result.
if(isset($_GET["model_result"]) && $_GET["model_result"] == "OK"){
	get_latest_detection("./detection/");
}

?>

index.php

PHP
<?php

# Get the current date and time.
$date = date("Y_m_d_H_i_s");

# Define the text file name of the received ultrasonic scan data.
$txt_file = "%s_%s__".$date;
$save_folder = "";

// If Arduino Nano ESP32 transfers the ultrasonic scan data with the selected sample type or the detected class (model results), modify the text file name dependently. 
if(isset($_GET["scan"]) && isset($_GET["type"]) && isset($_GET["class"])){
	$txt_file = sprintf($txt_file, $_GET["type"], $_GET["class"]);
	$save_folder = $_GET["type"];
}

// If Arduino Nano ESP32 transmits an ultrasonic scan sample or detection after running the neural network model, save the received information as a TXT file according to the provided variables  sample or detection.
if(!empty($_FILES["ultrasonic_scan"]['name'])){
	// Text File:
	$received_scan_properties = array(
	    "name" => $_FILES["ultrasonic_scan"]["name"],
	    "tmp_name" => $_FILES["ultrasonic_scan"]["tmp_name"],
		"size" => $_FILES["ultrasonic_scan"]["size"],
		"extension" => pathinfo($_FILES["ultrasonic_scan"]["name"], PATHINFO_EXTENSION)
	);
	
    // Check whether the uploaded file's extension is in the allowed file formats.
	$allowed_formats = array('jpg', 'png', 'bmp', 'txt');
	if(!in_array($received_scan_properties["extension"], $allowed_formats)){
		echo 'FILE => File Format Not Allowed!';
	}else{
		// Check whether the uploaded file size exceeds the 5 MB data limit.
		if($received_scan_properties["size"] > 5000000){
			echo "FILE => File size cannot exceed 5MB!";
		}else{
			// Save the uploaded file (TXT).
			move_uploaded_file($received_scan_properties["tmp_name"], "./".$save_folder."/".$txt_file.".".$received_scan_properties["extension"]);
			echo "FILE => Saved Successfully!";
		}
	}
}

?>

logo.h

C/C++
#define connected_width 40
#define connected_height 40
PROGMEM static const unsigned char connected_bits[] = {
0x00, 0x00, 0xFF, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xF0, 0x00, 0x00, 0x3F, 0xFF, 0xFC, 0x00, 0x00,
0xFF, 0xFF, 0xFF, 0x00, 0x01, 0xF3, 0xFF, 0x8F, 0x80, 0x03, 0xC7, 0xB9, 0xC3, 0xC0, 0x07, 0x8F,
0x38, 0xE1, 0xE0, 0x0F, 0x0E, 0x38, 0xF0, 0xF0, 0x1E, 0x1C, 0x38, 0x78, 0x78, 0x3F, 0xFF, 0xFF,
0xFF, 0xFC, 0x3F, 0xFF, 0xFF, 0xFF, 0xFC, 0x7F, 0xFF, 0xFF, 0xFF, 0xFE, 0x70, 0x38, 0x38, 0x1C,
0x0E, 0xE0, 0x70, 0x38, 0x0C, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0xCE, 0x63, 0x18, 0xC4, 0x63, 0xCE, 0x61, 0xBD, 0x86, 0x73, 0x6B,
0x61, 0xBD, 0x86, 0xD6, 0x7B, 0xC1, 0xE7, 0x83, 0x9E, 0x79, 0xC0, 0xE7, 0x03, 0x9E, 0x31, 0xC0,
0xC7, 0x03, 0x8C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0xE0, 0x70, 0x38, 0x0E, 0x07, 0x70, 0x38, 0x38, 0x1C, 0x0E, 0x7F, 0xFF, 0xFF, 0xFF,
0xFE, 0x3F, 0xFF, 0xFF, 0xFF, 0xFC, 0x3F, 0xFF, 0xFF, 0xFF, 0xFC, 0x1E, 0x1C, 0x38, 0x78, 0x38,
0x0F, 0x0E, 0x38, 0xF0, 0xF0, 0x07, 0x8F, 0x38, 0xE1, 0xE0, 0x03, 0xC7, 0xB9, 0xC3, 0xC0, 0x01,
0xF3, 0xFF, 0x8F, 0x80, 0x00, 0xFF, 0xFF, 0xBF, 0x00, 0x00, 0x3F, 0xFF, 0xFC, 0x00, 0x00, 0x0F,
0xFF, 0xF0, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 
};

#define error_width 40
#define error_height 33
PROGMEM static const unsigned char error_bits[] = {
0x00, 0x00, 0x3C, 0x00, 0x00, 0x00, 0x00, 0x7E, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
0x00, 0xFF, 0x80, 0x00, 0x00, 0x01, 0xFF, 0x80, 0x00, 0x00, 0x01, 0xFF, 0xC0, 0x00, 0x00, 0x03,
0xFF, 0xC0, 0x00, 0x00, 0x07, 0xE7, 0xE0, 0x00, 0x00, 0x07, 0xC3, 0xF0, 0x00, 0x00, 0x0F, 0xC3,
0xF0, 0x00, 0x00, 0x0F, 0xC3, 0xF8, 0x00, 0x00, 0x1F, 0xC3, 0xF8, 0x00, 0x00, 0x3F, 0xC3, 0xFC,
0x00, 0x00, 0x3F, 0xC3, 0xFE, 0x00, 0x00, 0x7F, 0xC3, 0xFE, 0x00, 0x00, 0xFF, 0xC3, 0xFF, 0x00,
0x00, 0xFF, 0xC3, 0xFF, 0x80, 0x01, 0xFF, 0xC3, 0xFF, 0x80, 0x01, 0xFF, 0xC3, 0xFF, 0xC0, 0x03,
0xFF, 0xC3, 0xFF, 0xC0, 0x07, 0xFF, 0xC3, 0xFF, 0xE0, 0x07, 0xFF, 0xFF, 0xFF, 0xF0, 0x0F, 0xFF,
0xFF, 0xFF, 0xF0, 0x1F, 0xFF, 0xFF, 0xFF, 0xF8, 0x1F, 0xFF, 0xC3, 0xFF, 0xF8, 0x3F, 0xFF, 0xC3,
0xFF, 0xFC, 0x3F, 0xFF, 0xC3, 0xFF, 0xFE, 0x7F, 0xFF, 0xC3, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x3F, 0xFF, 0xFF, 0xFF, 0xFE, 
};

#define home_width 32
#define home_height 23
PROGMEM static const unsigned char home_bits[] = {
0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0xE0, 0x00, 0x00, 0x0F, 0xC0, 0x00, 0x00, 0x19, 0xE0, 0x00,
0x00, 0x3E, 0x30, 0x00, 0x01, 0xFF, 0xDC, 0x1C, 0x03, 0xFF, 0xF0, 0x78, 0x07, 0xFF, 0xF8, 0xF0,
0x0E, 0xFF, 0xFF, 0xF0, 0x0F, 0xFF, 0xFF, 0xF0, 0x1F, 0xFF, 0xFF, 0xF0, 0x07, 0xFF, 0xF8, 0xF0,
0x03, 0xFF, 0xE4, 0x78, 0x00, 0xFF, 0x86, 0x38, 0x00, 0x00, 0x60, 0x0C, 0x00, 0x01, 0xF0, 0x06,
0x00, 0x00, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 
};

#define data_width 32
#define data_height 32
PROGMEM static const unsigned char data_bits[] = {
0x00, 0x00, 0x80, 0x00, 0x00, 0x1F, 0xFC, 0x00, 0x00, 0x7F, 0xFF, 0x00, 0x01, 0xFF, 0xFF, 0xC0,
0x03, 0xFF, 0xFF, 0xE0, 0x07, 0xFF, 0xFF, 0xF0, 0x0F, 0xFF, 0xFF, 0xB8, 0x1F, 0xFF, 0xFF, 0x38,
0x1F, 0xFF, 0xFF, 0x3C, 0x3F, 0xFF, 0xFE, 0xFC, 0x3F, 0xFF, 0xFD, 0xFE, 0x7F, 0xFF, 0xF9, 0x7E,
0x7F, 0xF9, 0xF2, 0x7E, 0x7F, 0xF0, 0xE6, 0x7F, 0x7F, 0xE7, 0x0A, 0x7F, 0x7F, 0xCD, 0x92, 0x7F,
0x7F, 0x98, 0xF2, 0x7F, 0x7E, 0x69, 0xF2, 0x7F, 0x7C, 0xC9, 0x92, 0x7F, 0x7F, 0xC9, 0x92, 0x7E,
0x7F, 0x49, 0x92, 0x7E, 0x3E, 0x49, 0x92, 0x7E, 0x3E, 0x49, 0x92, 0x7E, 0x1F, 0xFF, 0xFF, 0xFC,
0x18, 0x00, 0x00, 0x1C, 0x0C, 0x00, 0x00, 0x38, 0x07, 0xFF, 0xFF, 0xF0, 0x03, 0xFF, 0xFF, 0xE0,
0x01, 0xFF, 0xFF, 0xC0, 0x00, 0xFF, 0xFF, 0x80, 0x00, 0x3F, 0xFE, 0x00, 0x00, 0x07, 0xE0, 0x00
};

#define sensor_width 32
#define sensor_height 29
PROGMEM static const unsigned char sensor_bits[] = {
0x00, 0x00, 0x00, 0x70, 0xE0, 0x00, 0x02, 0x70, 0xF0, 0x00, 0x07, 0x38, 0xF0, 0x00, 0x07, 0x38,
0xF0, 0x00, 0x63, 0x9C, 0xF0, 0x00, 0x71, 0x9C, 0xF0, 0x02, 0x39, 0xCE, 0xF0, 0x07, 0x18, 0xCE,
0xF0, 0x03, 0x9C, 0xEE, 0x00, 0x71, 0xCC, 0xE6, 0x30, 0x79, 0xCE, 0xE6, 0x3C, 0x3C, 0xCE, 0x67,
0x3C, 0x1C, 0xCE, 0x67, 0x3E, 0x0C, 0xEE, 0x67, 0x3E, 0x0C, 0xEE, 0x67, 0x3C, 0x1C, 0xCE, 0x67,
0x3C, 0x1C, 0xCE, 0x67, 0x30, 0x79, 0xCE, 0xE6, 0x00, 0x71, 0xCC, 0xE6, 0xF0, 0x03, 0x9C, 0xE6,
0xF0, 0x07, 0x1C, 0xCE, 0xF0, 0x02, 0x39, 0xCE, 0xF0, 0x00, 0x71, 0x9C, 0xF0, 0x00, 0x73, 0x9C,
0xF0, 0x00, 0x07, 0x18, 0xF0, 0x00, 0x07, 0x38, 0xE0, 0x00, 0x06, 0x70, 0x00, 0x00, 0x00, 0x70,
0x00, 0x00, 0x00, 0x60, 
};

#define save_width 32
#define save_height 32
PROGMEM static const unsigned char save_bits[] = {
0x3F, 0xFF, 0xFF, 0xC0, 0x7E, 0x00, 0x00, 0xF0, 0xFC, 0x00, 0x00, 0x78, 0xFC, 0x00, 0x3C, 0x7C,
0xFC, 0x00, 0x3C, 0x7E, 0xFC, 0x00, 0x3C, 0x7E, 0xFC, 0x00, 0x3C, 0x7F, 0xFC, 0x00, 0x3C, 0x7F,
0xFC, 0x00, 0x3C, 0x7F, 0xFC, 0x00, 0x3C, 0x7F, 0xFC, 0x00, 0x3C, 0x7F, 0xFC, 0x00, 0x00, 0x7F,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xF0, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x0F,
0xF0, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x0F,
0xF0, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x0F,
0xF0, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x0F, 0x70, 0x00, 0x00, 0x0E, 0x3F, 0xFF, 0xFF, 0xFC
};

#define run_width 32
#define run_height 34
PROGMEM static const unsigned char run_bits[] = {
0x00, 0x00, 0x01, 0x80, 0x00, 0x0F, 0x82, 0xC0, 0x00, 0x08, 0x86, 0x40, 0x00, 0x08, 0x82, 0x40,
0x00, 0x18, 0x81, 0x80, 0x00, 0x70, 0x81, 0x00, 0x0C, 0xC0, 0x81, 0x00, 0x13, 0x00, 0x81, 0x00,
0x30, 0x00, 0x81, 0x0E, 0x20, 0x03, 0x81, 0x12, 0x60, 0x0F, 0xFF, 0x12, 0x40, 0x18, 0xFE, 0x3E,
0xC0, 0x20, 0x80, 0x64, 0x30, 0x60, 0x81, 0x80, 0x10, 0x40, 0xFE, 0x00, 0x10, 0xC0, 0x80, 0x1E,
0x10, 0xC0, 0xFF, 0xF2, 0x10, 0xC0, 0xFF, 0xF2, 0x10, 0xC0, 0x80, 0x1E, 0x10, 0x40, 0xFE, 0x00,
0x30, 0x60, 0xFF, 0x80, 0x40, 0x20, 0x80, 0xE0, 0x40, 0x10, 0xFE, 0x3E, 0x40, 0x0F, 0xFF, 0x12,
0x20, 0x03, 0x81, 0x12, 0x30, 0x00, 0x81, 0x1E, 0x13, 0x00, 0x81, 0x00, 0x1C, 0xC0, 0x81, 0x00,
0x00, 0x70, 0x81, 0x00, 0x00, 0x18, 0x81, 0x80, 0x00, 0x08, 0x82, 0x40, 0x00, 0x08, 0x86, 0x40,
0x00, 0x0F, 0x82, 0x40, 0x00, 0x00, 0x01, 0x80, 
};

// Define the assigned interface logo information as arrays.
PROGMEM static const unsigned char *interface_logos[] = {home_bits, data_bits, sensor_bits, save_bits, run_bits};
int interface_widths[] = {home_width, data_width, sensor_width, save_width, run_width};
int interface_heights[] = {home_height, data_height, sensor_height, save_height, run_height};

#define bubble_width 32
#define bubble_height 32
PROGMEM static const unsigned char bubble_bits[] = {
0x00, 0x00, 0x00, 0x70, 0x00, 0x00, 0x01, 0xFC, 0x00, 0x00, 0x01, 0xCE, 0x00, 0x00, 0x03, 0xC6,
0x00, 0x00, 0x03, 0xF2, 0x00, 0x00, 0x03, 0xF3, 0x0F, 0x80, 0x03, 0xFB, 0x1F, 0xE0, 0x03, 0xFE,
0x3F, 0xF0, 0x03, 0xFE, 0x79, 0xF0, 0x01, 0xFE, 0x71, 0xE0, 0x01, 0xFC, 0x67, 0xC7, 0xC0, 0x70,
0xE7, 0x9F, 0xF0, 0x00, 0xEF, 0x3F, 0xF8, 0x00, 0xFE, 0x78, 0xFC, 0x00, 0xFE, 0xF0, 0xFE, 0x00,
0x7C, 0xE7, 0xFE, 0x00, 0x7C, 0xCF, 0xFF, 0x00, 0x3D, 0xCF, 0xFF, 0x00, 0x39, 0xDF, 0xFF, 0x00,
0x09, 0x9F, 0xFF, 0x80, 0x01, 0x9F, 0xFF, 0x80, 0x01, 0xFF, 0xFF, 0x80, 0x01, 0xFF, 0xFF, 0x00,
0x01, 0xFF, 0xFF, 0x00, 0x00, 0xFF, 0xFF, 0x00, 0x00, 0xFF, 0xFE, 0x00, 0x00, 0x7F, 0xFE, 0x00,
0x00, 0x7F, 0xFC, 0x00, 0x00, 0x3F, 0xF8, 0x00, 0x00, 0x0F, 0xF0, 0x00, 0x00, 0x01, 0x80, 0x00
};

#define normal_width 32
#define normal_height 32
PROGMEM static const unsigned char normal_bits[] = {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0xFE, 0x3F, 0xC0, 0x1F, 0xFE, 0x7F, 0xF0, 0x3F, 0xFC,
0x3F, 0xF8, 0x3F, 0xF8, 0x1F, 0xF8, 0x7F, 0xF0, 0x0F, 0xFC, 0x7F, 0xF0, 0x0F, 0xFC, 0x7F, 0xE0,
0x07, 0xFC, 0x7F, 0xC0, 0x03, 0xFE, 0x7F, 0x80, 0x00, 0xF3, 0xDF, 0x00, 0x00, 0x01, 0x80, 0x00,
0x00, 0x01, 0xFC, 0x00, 0x00, 0x01, 0xC7, 0x00, 0x00, 0x00, 0x01, 0x80, 0x00, 0x00, 0x00, 0x80,
0x00, 0x00, 0x01, 0x80, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0xFF, 0xF8, 0x00,
0x03, 0xFF, 0xF8, 0x00, 0x07, 0xFF, 0xFC, 0x00, 0x0C, 0x1F, 0xFE, 0x00, 0x01, 0x87, 0xFF, 0x00,
0x0F, 0xF3, 0xFF, 0x00, 0x0F, 0xF9, 0xFE, 0x00, 0x0F, 0xFE, 0x00, 0x00, 0x0F, 0xFF, 0x86, 0x00,
0x07, 0xFF, 0xFC, 0x00, 0x03, 0xFF, 0xF8, 0x00, 0x00, 0x7F, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00
};

// Define the assigned air bubble class icon information as arrays.
PROGMEM static const unsigned char *class_logos[] = {bubble_bits, normal_bits};
int class_widths[] = {bubble_width, normal_width};
int class_heights[] = {bubble_height, normal_height};

Credits

Kutluhan Aktar

Kutluhan Aktar

81 projects • 307 followers
AI & Full-Stack Developer | @EdgeImpulse | @Particle | Maker | Independent Researcher

Comments