Shubham
Created May 7, 2020 © GPL3+

WeCare

Get control of your anxiety depression with our solution. Helping menopausal woman.

IntermediateFull instructions provided14 hours67
WeCare

Things used in this project

Hardware components

MATRIX Creator
MATRIX Labs MATRIX Creator
Thanking matrix and hackster community for the free hardware. a fully-featured development board, including sensors, wireless communications, and an FPGA.
×1
Raspberry Pi 3 Model B
Raspberry Pi 3 Model B
×1
SG90 Micro-servo motor
SG90 Micro-servo motor
×2

Software apps and online services

Raspbian
Raspberry Pi Raspbian
Rhasspy

Hand tools and fabrication machines

10 Pc. Jumper Wire Kit, 5 cm Long
10 Pc. Jumper Wire Kit, 5 cm Long

Story

Read more

Schematics

Matrix Creator pin layout

Raspberry pi 3B+ pin layout with matrix creator

Matrix Creator

Code

Python_giphy_client_integration.py

Python
Integrate giphy to show GIFs in the application
import time
import giphy_client
from giphy_client.rest import ApiException
#import webbrowser
import json
#import os
import random
import subprocess #To check wether we get the desired GIF or not.

#this function is not necessary as we will show the webpage in pyqt application for the final.
def show_gif(data):
    p = subprocess.Popen(["chromium-browser", data])# open the browser
    time.sleep(30)
    p.kill()
    
# emotion stores the sentences to be searched d trend stores whether to show trending video or not.
def giphy_integration(emotion, trend):
    # create an instance of the API class
    api_instance = giphy_client.DefaultApi()
    api_key = 'r43Tm0MNfDDSz1Rqzi2cBS7ry4lxS8Ep' # str | Giphy API Key.
    q = emotion # str | Search query term or phrase.
    limit = 90 # int | The maximum number of records to return. (optional) (default to 25)
    offset = 0 # int | An optional results offset. Defaults to 0. (optional) (default to 0)
    rating = 'g' # str | Filters results by specified rating. (optional)
    lang = 'en' # str | Specify default country for regional content; use a 2-letter ISO 639-1 country code. See list of supported languages <a href = \"../language-support\">here</a>. (optional)
    fmt = 'json' # str | Used to indicate the expected response format. Default is Json. (optional) (default to json)
    ran = random.choice([1, 2, 3, 4, 5])
    print(ran)
    category = 'motivation'
    
    try:
        # Search Endpoint
        if trend == 0:
            if ran == 1:
                api_response = api_instance.gifs_search_get(api_key, q, limit=limit, offset=offset, rating=rating, lang=lang, fmt=fmt)
                api_response= str(api_response.data).split()
                api_response= api_response[9:10]
                api_response= str(api_response)
                api_response= api_response[3:-4]
                print(api_response)
                return(api_response)
            elif ran == 2:
                 # Search Endpoint
                api_response = api_instance.gifs_translate_get(api_key, q)
                api_response = api_response.data.embed_url
                print(api_response)
                return(api_response)
            elif ran == 3:
                api_response = api_instance.gifs_categories_category_tag_get(api_key, category, tag=emotion, limit=limit, offset=offset)
                api_response = str(api_response.data).split()
                api_response = api_response[9:10]
                api_response = str(api_response)
                api_response = api_response[3:-4]
                print(api_response)
                return(api_response)
            elif ran == 4:
                api_response = api_instance.gifs_random_get(api_key, tag=emotion, rating=rating, fmt=fmt)
                api_response = api_response.data.image_url
                print(api_response)
                return(api_response)
            elif ran == 5:
                api_response = api_instance.gifs_translate_get(api_key, emotion)
                api_response = api_response.data.embed_url
                print(api_response)
                return(api_response)
        elif trend == 1:
            api_response = api_instance.gifs_trending_get(api_key, limit=limit, rating=rating, fmt=fmt)
            api_response = str(api_response.data).split()
            api_response = api_response[9:10]
            api_response = str(api_response)
            api_response = api_response[3:-4]
            print(api_response)
            return(api_response)
            
    except ApiException as e:
        print("Exception when calling DefaultApi->gifs_search_get: %s\n" % e)

Control.py

Python
Code the servos and various patterns for the menopausal symptoms
from matrix_lite import gpio
from matrix_lite import led
from time import sleep
from math import pi, sin
import random

# Tell pin 3 and pin 4 to set servo to 0 degrees
gpio.setFunction(3, 'PWM')
gpio.setFunction(4, 'PWM')
gpio.setMode(3, 'output')
gpio.setMode(4, 'output')

everloop = ['black'] * led.length

ledAdjust = 0.0
if len(everloop) == 35:
    ledAdjust = 0.51 # MATRIX Creator
else:
    ledAdjust = 1.01 # MATRIX Voice



def initial_position():
    gpio.setServoAngle({
        "pin": 3,
        "angle": 0,
        # min_pulse_ms (minimum pulse width for a PWM wave in milliseconds)
        "min_pulse_ms": 0.8,
    })
    gpio.setServoAngle({
        "pin": 4,
        "angle": 0,
        # min_pulse_ms (minimum pulse width for a PWM wave in milliseconds)
        "min_pulse_ms": 0.8,
    })
    
def change_position():
    initial_position()
    sleep(1)
    gpio.setServoAngle({
        "pin": 3,
        "angle": 150,
        # min_pulse_ms (minimum pulse width for a PWM wave in milliseconds)
        "min_pulse_ms": 0.8,
    })
    gpio.setServoAngle({
        "pin": 4,
        "angle": 150,
        # min_pulse_ms (minimum pulse width for a PWM wave in milliseconds)
        "min_pulse_ms": 0.8,
    })
    sleep(2)
    initial_position()
    

def sleep_pattern():
    #frequency1 = random.uniform(0.0, 1.0)
    frequency1 = 0.00375
    print(frequency1)
    counter = 0.0
    tick = len(everloop) - 1
    c=1
    while (c<4000):
        # Create pattern
        for i in range(len(everloop)):
            r = round(max(0, (sin(frequency1*counter+(pi/180*240))*155+100)/2))
            counter += ledAdjust

            everloop[i] = {'r':r}

        # Slowly show the pattern
        if tick != 0:
            for i in reversed(range(tick)):
                everloop[i] = {}
            tick -= 1

        led.set(everloop)
        c = c+1
        #print(c)
        sleep(.035)
    led.set('black')
    
def classic_pattern():
    frequency1 = 0.9947961070906801
    print(frequency1)
    #frequency1 = 0.0375 
    counter = 0.0
    tick = len(everloop) - 1
    c=1
    #inten = random.randint(1,10)
    inten = 10
    
    while (c<4000):
        # Create pattern
        for i in range(len(everloop)):
            r = round(max(0, (sin(frequency1*counter+(pi/180*240))*155+100)/inten))
            g = round(max(0, (sin(frequency1*counter+(pi/180*120))*155+100)/inten))
            b = round(max(0, (sin(frequency1*counter)*155+100)/inten))

            counter += ledAdjust

            everloop[i] = {'r':r, 'g':g, 'b':b}

        # Slowly show rainbow
        if tick != 0:
            for i in reversed(range(tick)):
                everloop[i] = {}
            tick -= 1

        led.set(everloop)
        c = c+1
        #print(c)
        sleep(.035)
    led.set('black')

play_video_in_pyqt.py

Python
To play video in pyqt application as we can call late function with the video-file name.
import platform
#import os
import sys
from PyQt5 import QtWidgets, QtGui, QtCore
import vlc
from time import sleep

process = []

class Player(QtWidgets.QMainWindow):
    
    def __init__(self, data, master=None):
        QtWidgets.QMainWindow.__init__(self, master)

        # Create a basic vlc instance
        self.instance = vlc.Instance()

        self.media = None

        # Create an empty vlc media player
        self.mediaplayer = self.instance.media_player_new()
        self.widget = QtWidgets.QWidget(self)
        self.setCentralWidget(self.widget)

        # In this widget, the video will be drawn
        if platform.system() == "Darwin": # for MacOS
            self.videoframe = QtWidgets.QMacCocoaViewContainer(0)
        else:
            self.videoframe = QtWidgets.QFrame()

        self.palette = self.videoframe.palette()
        self.palette.setColor(QtGui.QPalette.Window, QtGui.QColor(150, 60, 25))
        self.videoframe.setPalette(self.palette)
        self.videoframe.setAutoFillBackground(True)

        self.vboxlayout = QtWidgets.QVBoxLayout()
        self.vboxlayout.addWidget(self.videoframe)
        
        self.widget.setLayout(self.vboxlayout)

        filename = '/home/pi/Desktop/Hacking Menopause/'+data, 'All Files (*)'

        self.media = self.instance.media_new(filename[0])

        self.mediaplayer.set_media(self.media)

        self.media.parse()

        # Set the title of the track as window title
        self.setWindowTitle('For You')

        # The media player has to be 'connected' to the QFrame (otherwise the
        # video would be displayed in it's own window). This is platform
        # specific, so we must give the ID of the QFrame (or similar object) to
        # vlc. Different platforms have different functions for this
        if platform.system() == "Linux": # for Linux using the X Server
            self.mediaplayer.set_xwindow(int(self.videoframe.winId()))
        elif platform.system() == "Windows": # for Windows
            self.mediaplayer.set_hwnd(int(self.videoframe.winId()))
        elif platform.system() == "Darwin": # for MacOS
            self.mediaplayer.set_nsobject(int(self.videoframe.winId()))
        
        #sleep(1.5)
        self.mediaplayer.play()
        sleep(1.5) # startup time.
        duration = self.mediaplayer.get_length() / 1000
        print(duration)
        process.append(duration)
        
    
        

def late(da):
    app = QtWidgets.QApplication(sys.argv)
    player = Player(da)
    player.showMaximized()
    
    app.exec_()

app.py

Python
Main file to talk with websocket and connects with rhasspy.
import websocket
import requests
import json
from matrix_lite import led
import gpio_control as gpi
import vlc

def say(text):
    url = "http://localhost:12101/api/text-to-speech"
    requests.post(url, text)

# Intents are passed through here
def on_message(ws, message):
    data = json.loads(message)
    print("**Captured New Intent**")
    print(data)
    
    #anyone is free to modify according to their favourable statement as I have trained very few so I am omitting the condition. Modify acc. to the below pattern. 
    #if ("GetSleep" == data["intent"]["name"]):
     #   led.set(data["slots"]["color"])
      #  say("Device changed to: " + data["slots"]["color"])

def on_error(ws, error):
    print(error)

def on_close(ws):
    print("\n**Disconnected**\n")

def on_open(ws):
    print("\n**Connected**\n")

# Start web socket client
if __name__ == "__main__":
    ws = websocket.WebSocketApp("ws://localhost:12101/api/events/intent",
                              on_message = on_message,
                              on_error = on_error,
                              on_close = on_close)
    ws.on_open = on_open
    ws.run_forever()

play_music.py

Python
Simply use the python-vlc.
import vlc
def play_music(file):
  p = vlc.MediaPlayer(file)
  p.play()

view_giphy_in_pyqt.py

Python
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtWebKit import *
from PyQt5.QtWebKitWidgets import *
from PyQt5.QtWidgets import QApplication, QWidget, QMainWindow

def view_giphy(file):
    app = QApplication(sys.argv)
    web = QWebView()
    web.setUrl(QUrl(file))
    #web.load(QUrl(file))
    web.showMaximized()
    app.exec()

Credits

Shubham

Shubham

6 projects • 11 followers
Turned 20 and can't resist myself from learning and using AI. Learning to tackle global problems.

Comments