Hackster is hosting Hackster Holidays, Ep. 6: Livestream & Giveaway Drawing. Watch previous episodes or stream live on Monday!Stream Hackster Holidays, Ep. 6 on Monday!
Cindy
Created September 8, 2021

Running Posture Analysis

Running Posture Analysis

93
Running Posture Analysis

Things used in this project

Story

Read more

Schematics

Connection KV260

Connection KV260

Code

rungesture.ipynb

Python
rungesture with SSD
{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "e81456c5-ee48-4060-a5ff-317ee6211ac7",
   "metadata": {},
   "source": [
    "# Run Gesture with PYNY-DPY\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "8e2c42cc-39bd-4fe5-86e9-ecd8af865e5c",
   "metadata": {},
   "source": [
    "### 1. Prepare the overlay, download the overlay onto the board.And Import libs."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "id": "8fbf247d-e976-4361-8519-1548c236fdae",
   "metadata": {},
   "outputs": [],
   "source": [
    "from pynq_dpu import DpuOverlay\n",
    "#overlay = DpuOverlay(\"dpu.bit\"); #overlaySSD = DpuOverlay(\"dpu.bit\");overlaySP = DpuOverlay(\"dpu.bit\")\n",
    "import os\n",
    "import time\n",
    "import numpy as np\n",
    "import cv2\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "from pynq.lib.video import *\n",
    "from PIL import Image\n",
    "%matplotlib inline\n",
    "#help(overlay) #help (overlaySP)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "id": "44a01791-6d2a-4048-a008-1e2b1541d7c7",
   "metadata": {},
   "outputs": [],
   "source": [
    "#!pip install tflite_runtime-2.1.0.post1-cp38-cp38-linux_aarch64.whl\n",
    "#import tflite_runtime.interpreter as tflite\n",
    "#interpreter = tflite.Interpreter(model_path='resources/movenet.tflite')"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "fa2236b8-21d3-4b90-b5a7-a907758e5b06",
   "metadata": {},
   "source": [
    "### 2. Prepare the environment\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "id": "6aacf9c3-2695-4ab2-a1be-d5fbbadc6907",
   "metadata": {},
   "outputs": [],
   "source": [
    "overlay = DpuOverlay(\"dpu.bit\")\n",
    "overlay.load_model(\"resources/ssd_pedestrian_pruned_0_97.xmodel\")\n",
    "#overlay1.load_model(\"resources/ssd_pedestrian_pruned_0_97.xmodel\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 78,
   "id": "2f306ef2-d454-4510-902d-b421f733e72f",
   "metadata": {},
   "outputs": [],
   "source": [
    "_R_MEAN = 123.68\n",
    "_G_MEAN = 116.78\n",
    "_B_MEAN = 103.94\n",
    "\n",
    "MEANS = [_B_MEAN,_G_MEAN,_R_MEAN]\n",
    "\n",
    "def resize_shortest_edge(image, size):\n",
    "    H, W = image.shape[:2]\n",
    "    if H >= W:\n",
    "        nW = size\n",
    "        nH = int(float(H)/W * size)\n",
    "    else:\n",
    "        nH = size\n",
    "        nW = int(float(W)/H * size)\n",
    "    return cv2.resize(image,(nW,nH))\n",
    "\n",
    "def mean_image_subtraction(image, means):\n",
    "    B, G, R = cv2.split(image)\n",
    "    B = B - means[0]\n",
    "    G = G - means[1]\n",
    "    R = R - means[2]\n",
    "    image = cv2.merge([R, G, B])\n",
    "    return image\n",
    "\n",
    "def BGR2RGB(image):\n",
    "    B, G, R = cv2.split(image)\n",
    "    image = cv2.merge([R, G, B])\n",
    "    return image\n",
    "\n",
    "def central_crop(image, crop_height, crop_width):\n",
    "    image_height = image.shape[0]\n",
    "    image_width = image.shape[1]\n",
    "    offset_height = (image_height - crop_height) // 2\n",
    "    offset_width = (image_width - crop_width) // 2\n",
    "    return image[offset_height:offset_height + crop_height, offset_width:\n",
    "                 offset_width + crop_width, :]\n",
    "\n",
    "def normalize(image):\n",
    "    image=image/256.0\n",
    "    image=image-0.5\n",
    "    image=image*2\n",
    "    return image\n",
    "\n",
    "def preprocess_fn(image, crop_height = 360, crop_width = 640):\n",
    "    image = resize_shortest_edge(image, 256)\n",
    "    image = mean_image_subtraction(image, MEANS)\n",
    "    image = central_crop(image, crop_height, crop_width)\n",
    "    return image\n",
    "\n",
    "def preprocess_ssd(image, crop_height = 640, crop_width = 360):\n",
    "    image = cv2.resize(image, (crop_height,crop_width))\n",
    "    #image = mean_image_subtraction(image, MEANS)\n",
    "    #image = central_crop(image, crop_height, crop_width)\n",
    "    return image"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 79,
   "id": "d3dd6e3b-87f2-47a2-a854-b9f53e411752",
   "metadata": {},
   "outputs": [],
   "source": [
    "image_folder = 'resources'\n",
    "original_images = [i for i in os.listdir(image_folder) if i.endswith(\"jpg\")];\n",
    "total_images = len(original_images)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 80,
   "id": "b5010e25-18c9-4aa7-a6ec-f1708a9c82e7",
   "metadata": {},
   "outputs": [],
   "source": [
    "#label_path = os.path.join(image_folder, \"words.txt\");print (label_path)\n",
    "#original_image = Image.open(\"resources/rungesture.jpg\");plt.imshow(original_image)\n",
    "#original_image = Image.open(\"resources/rungesture.jpg\")  #plt.imshow(original_image)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 81,
   "id": "3c016ec7-99b8-4810-951d-12c82985c9e3",
   "metadata": {},
   "outputs": [],
   "source": [
    "def calculate_softmax(data):\n",
    "    result = np.exp(data)\n",
    "    return result\n",
    "\n",
    "def predict_label(softmax):\n",
    "    label_path = os.path.join(image_folder, \"words.txt\")\n",
    "    #with open(\"resources/words.txt\", \"r\") as f:\n",
    "    with open(label_path, \"r\") as f:\n",
    "        lines = f.readlines()\n",
    "    return lines[np.argmax(softmax)-1]"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "02fc3dc9-d0c4-4851-8ce0-c6c01639c3a1",
   "metadata": {},
   "source": [
    "### 3. Prepare VART\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 82,
   "id": "29287a79-afd4-40cd-8bc1-56e6fd306a21",
   "metadata": {},
   "outputs": [],
   "source": [
    "dpu = overlay.runner\n",
    "\n",
    "inputTensors = dpu.get_input_tensors()\n",
    "outputTensors = dpu.get_output_tensors()\n",
    "\n",
    "shapeIn = tuple(inputTensors[0].dims)\n",
    "shapeOut = tuple(outputTensors[0].dims)\n",
    "outputSize = int(outputTensors[0].get_data_size() / shapeIn[0])\n",
    "\n",
    "softmax = np.empty(outputSize)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 83,
   "id": "bf641c1e-86e4-43ee-88ec-a4e0d5417b98",
   "metadata": {},
   "outputs": [],
   "source": [
    "output_data = [np.empty(shapeOut, dtype=np.float32, order=\"C\")]\n",
    "input_data = [np.empty(shapeIn, dtype=np.float32, order=\"C\")]\n",
    "image = input_data[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 84,
   "id": "0304b5a5-e62c-4d97-90fd-db09a7e41980",
   "metadata": {},
   "outputs": [],
   "source": [
    "def run(image_index, display=False):\n",
    "    #preprocessed = preprocess_fn(cv2.imread(os.path.join(image_folder, original_images[image_index])))\n",
    "    preprocessed = preprocess_ssd(cv2.imread(os.path.join(image_folder, original_images[image_index])))\n",
    "    image[0,...] = preprocessed.reshape(shapeIn[1:])\n",
    "    job_id = dpu.execute_async(input_data, output_data)\n",
    "    dpu.wait(job_id)\n",
    "    #temp = [j.reshape(1, outputSize) for j in output_data]\n",
    "    #softmax = calculate_softmax(temp[0][0])\n",
    "    if display:\n",
    "        display_image = cv2.imread(os.path.join(\n",
    "            image_folder, original_images[image_index]))\n",
    "        _, ax = plt.subplots(1)\n",
    "        _ = ax.imshow(cv2.cvtColor(display_image, cv2.COLOR_BGR2RGB))\n",
    "        print(\"Classification: {}\".format(predict_label(softmax)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 85,
   "id": "8ec86faa",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0"
      ]
     },
     "execution_count": 85,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "img= cv2.imread('resources/gym.jpg')\n",
    "ipt_im=cv2.resize(img,(360,640))\n",
    "#ipt_im=np.array(ipt_im,dtype='int32')\n",
    "#ipt_im=np.expand_dims(ipt_im,axis=0)\n",
    "image[0,...] = ipt_im.reshape(shapeIn[1:])\n",
    "job_id = dpu.execute_async(input_data, output_data)\n",
    "dpu.wait(job_id)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 64,
   "id": "0e37dde7-c781-4bed-bcde-d17b8cfcfb5c",
   "metadata": {
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "run(0, display=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 86,
   "id": "ebbd05fc",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1, 45, 80, 52)\n"
     ]
    }
   ],
   "source": [
    "print(output_data[0].shape) #opt_img=cv2.rectangle(ipt_im, (1,45), (81, 97), (255, 0, 0), thickness=2) #plt.imshow(opt_img)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "id": "643ef9a5-66fc-4a4d-ad7a-44dec9dedb22",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Performance: 15.437943523453374 FPS\n"
     ]
    }
   ],
   "source": [
    "time1 = time.time()\n",
    "[run(i) for i in range(total_images)]\n",
    "time2 = time.time()\n",
    "fps = total_images/(time2-time1)\n",
    "print(\"Performance: {} FPS\".format(fps))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 52,
   "id": "ff9a4b7e-7329-4b16-a5e9-110e8b61d724",
   "metadata": {},
   "outputs": [],
   "source": [
    "del overlay\n",
    "del dpu"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 87,
   "id": "063fabf4-ef1a-4f75-b6c5-47885b835230",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "End of SSD Codes and quit now.\n"
     ]
    }
   ],
   "source": [
    "print (\"End of SSD Codes and quit now.\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 88,
   "id": "17d8c20d",
   "metadata": {},
   "outputs": [
    {
     "ename": "AssertionError",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mAssertionError\u001b[0m                            Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-88-ef398dc9ba1d>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[0moverlay\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mDpuOverlay\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"dpu.bit\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0moverlay\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_model\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"resources/spnet.xmodel\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      3\u001b[0m \u001b[0;31m#overlay1.load_model(\"resources/ssd_pedestrian_pruned_0_97.xmodel\")\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/share/pynq-venv/lib/python3.8/site-packages/pynq_dpu/dpu.py\u001b[0m in \u001b[0;36mload_model\u001b[0;34m(self, model)\u001b[0m\n\u001b[1;32m    169\u001b[0m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgraph\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mxir\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mGraph\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdeserialize\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mabs_model\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    170\u001b[0m             \u001b[0msubgraphs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_child_subgraph_dpu\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgraph\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 171\u001b[0;31m             \u001b[0;32massert\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msubgraphs\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    172\u001b[0m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrunner\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mvart\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mRunner\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcreate_runner\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msubgraphs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"run\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mAssertionError\u001b[0m: "
     ]
    }
   ],
   "source": [
    "overlay = DpuOverlay(\"dpu.bit\")\n",
    "overlay.load_model(\"resources/spnet.xmodel\")\n",
    "#overlay1.load_model(\"resources/ssd_pedestrian_pruned_0_97.xmodel\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "e04f3603-711a-4c06-89ce-1fa3a867b37e",
   "metadata": {},
   "source": [
    "## End of Code"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "9a6aa288",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.10"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}

Credits

Cindy

Cindy

5 projects • 3 followers
bonjour

Comments