├── .gitignore ├── CV.py ├── CV_realtime.py ├── EMG.py ├── EMG_Model.py ├── GP.py ├── GP.ui ├── README.md ├── open_myo.py ├── poweroff.py ├── screenshots ├── 1.PNG ├── 2.PNG ├── 3.PNG ├── 4.PNG ├── 5.PNG ├── 6.PNG ├── finger_spread.png ├── palmar_neutral.png ├── palmar_pronated.png ├── pinch.png ├── rest.png ├── th.png ├── tripod.png ├── wrist_extension.png ├── wrist_ulnar_deviation.png └── x.png └── tools ├── 0.csv ├── 1.csv ├── 2.csv ├── 3.csv ├── GP_WOC_95.h5 ├── GP_Weights.h5 ├── class 1 ├── 222_r320.png ├── 50_r110.png ├── 728_r320.png └── 98_r255.png ├── class 2 ├── 206_r225.png ├── 277_r355.png ├── 630_r175.png └── 640_r320.png ├── class 3 ├── 100_r125.png ├── 104_r325.png ├── 510_r50.png └── 511_r125.png ├── class 4 ├── 212_r115.png ├── 474_r120.png ├── 710_r175.png └── 89_r225.png └── hannon.pickle /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | *.pyc 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # SageMath parsed files 81 | *.sage.py 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | .venv 88 | venv/ 89 | ENV/ 90 | 91 | # Spyder project settings 92 | .spyderproject 93 | .spyproject 94 | 95 | # Rope project settings 96 | .ropeproject 97 | 98 | # mkdocs documentation 99 | /site 100 | 101 | # mypy 102 | .mypy_cache/ 103 | 104 | # Pickle files 105 | # *.pkl 106 | -------------------------------------------------------------------------------- /CV.py: -------------------------------------------------------------------------------- 1 | import h5py 2 | import numpy as np 3 | import random 4 | import time 5 | from scipy import misc 6 | #import queue ##If python 3 7 | import Queue as queue ##If python 2 8 | import threading 9 | from keras.layers import Input, Add, Dense, Activation,Dropout , BatchNormalization, Flatten, Conv2D,MaxPooling2D 10 | from keras.models import Model #, load_model 11 | from keras.initializers import glorot_uniform 12 | from keras import backend as K 13 | import tensorflow as tf 14 | 15 | class CV(): 16 | def __init__(self, queue_size=8): 17 | self.q = queue.Queue() 18 | self.stage = 0 19 | self.corrections = 0 20 | self.all_grasps = [1, 2, 3, 4] 21 | self.Choose_grasp = list( self.all_grasps ) 22 | self.graph = tf.get_default_graph() 23 | 24 | def rgb2gray(self,rgb_image): 25 | return np.dot( rgb_image, [0.299, 0.587, 0.114] ) 26 | 27 | 28 | def real_preprocess(self,img): 29 | # gray level 30 | img_gray = self.rgb2gray( img ) 31 | # resize the image 48x36: 32 | img_resize = misc.imresize( img_gray, (48, 36) ) 33 | # Normalization: 34 | img_norm = (img_resize - img_resize.mean()) / img_resize.std() 35 | return img_norm 36 | 37 | 38 | def Nazarpour_model(self,input_shape, num_of_layers=2): 39 | x_input = Input( input_shape ) 40 | x = Conv2D( 5, (5, 5), strides=(1, 1), padding='valid' )( x_input ) 41 | x = BatchNormalization( axis=3 )( x ) 42 | x = Activation( 'relu' )( x ) 43 | x = Dropout( 0.2 )( x ) 44 | if num_of_layers == 2: 45 | x = Conv2D( 25, (5, 5), strides=(1, 1), padding='valid' )( x ) 46 | x = BatchNormalization( axis=3 )( x ) 47 | x = Activation( 'relu' )( x ) 48 | x = MaxPooling2D( (2, 2), strides=(2, 2) )( x ) 49 | x = Dropout( 0.2 )( x ) 50 | x = Flatten()( x ) 51 | x = Dense( 4, activation='softmax', kernel_initializer=glorot_uniform( seed=0 ) )( x ) 52 | model = Model( inputs=x_input, outputs=x ) 53 | return model 54 | 55 | 56 | def grasp_type(self,path_of_test_real, model_name): 57 | """ 58 | path_of_test_real : the path of the uploaded image in case of offline. 59 | model_name: the name of the trained model, 'tmp.h5' 60 | 61 | """ 62 | 63 | n_row = 48 64 | n_col = 36 65 | nc = 1 66 | model = self.Nazarpour_model( (n_row, n_col, nc), num_of_layers=2 ) 67 | model.compile( 'adam', loss='categorical_crossentropy', metrics=['accuracy'] ) 68 | model.load_weights( self.path1 ) 69 | 70 | i = misc.imread( self.model_name ) 71 | img_after_preprocess = self.real_preprocess( i ) 72 | x = np.expand_dims( img_after_preprocess, axis=0 ) 73 | x = x.reshape( (1, n_row, n_col, nc) ) 74 | out = model.predict( x ) 75 | grasp = np.argmax( out ) + 1 76 | if grasp == 1 : 77 | print( ("Grasp_Type : Pinch , Class = 1 \n ") ) 78 | if grasp == 2 : 79 | print( ("Grasp_Type : Palmar Wrist Neutral , Class = 2 \n ") ) 80 | if grasp == 3 : 81 | print( ("Grasp_Type : Tripod , Class = 3 \n ") ) 82 | if grasp == 4 : 83 | print( ("Grasp_Type : Palmar Wrist Pronated,, Class = 4 \n ") ) 84 | return grasp 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | def Main_algorithm(self,path1,path2=None): 93 | 94 | self.path1 = path1 # put the path of the tested picture 95 | if path2: 96 | self.model_name = path2 97 | else: 98 | self.model_name = 'tools/class 1/50_r110.png' 99 | 100 | 101 | 102 | 103 | # path_of_real_test='/home/ghadir/Downloads/__/class 1/50_r110.png' #put the path of the tested picture 104 | # CV_model_name='GP_Weights.h5' 105 | # """ 106 | 107 | while not (self.q.empty()): 108 | EMG_class_recieved = self.q.get() 109 | if (EMG_class_recieved == 1 or self.stage == 0): 110 | print("EMG_class {0}, Stage {1} : \n".format( EMG_class_recieved, self.stage )) 111 | self.System_power( 1 ) # Start system 112 | 113 | elif (EMG_class_recieved == 1): 114 | print("EMG_class {0}, Stage {1} : \n".format( EMG_class_recieved, self.stage )) 115 | self.Confirmation() 116 | 117 | elif (EMG_class_recieved == 2): 118 | print("EMG_class {0}, Stage {1} : \n".format( EMG_class_recieved, self.stage )) 119 | self.Cancellation() 120 | 121 | elif (EMG_class_recieved == 0): 122 | print("EMG_class {0}, Stage {1} : \n".format( EMG_class_recieved, self.stage )) 123 | self.System_power( 0 ) # Turn system off 124 | 125 | def System_power(self,Turn_on): 126 | 127 | 128 | 129 | # Reset values: 130 | self.stage = 0 131 | # corrections= 0 132 | self.Choose_grasp = list( self.all_grasps ) 133 | 134 | if not Turn_on: 135 | self.corrections = 0 136 | # Turn off 137 | print ("Turning off ... back to rest state. \n\n\n") 138 | else: 139 | # Start/restart 140 | with self.graph.as_default(): 141 | self.grasp = self.grasp_type( self.path1, self.model_name ) 142 | print('Preshaping grasp type {}\n\n').format( self.grasp ) 143 | self.stage = 1 144 | 145 | def Confirmation(self): 146 | 147 | 148 | print(" Confirmed! \n") 149 | if self.stage < 2: 150 | self.stage += 1 151 | self.corrections = 0 152 | self.Choose_grasp = list( self.all_grasps ) 153 | print("Grasping ... grasp type{} \n\n").format( self.grasp ) 154 | # Do the action 155 | else: 156 | print ('Releasing ... \n') 157 | self.System_power( 0 ) 158 | 159 | 160 | def Cancellation(self): 161 | 162 | 163 | 164 | if self.stage > 0: 165 | print(" Cancelled! \n") 166 | self.stage -= 1 167 | # corrections +=1 168 | if (self.stage == 0 and self.corrections > 3): 169 | print("Exceeded maximum iteration: \n Choosing from remaining grasps") 170 | if self.Choose_grasp: 171 | if self.grasp in self.Choose_grasp: 172 | self.Choose_grasp.remove( self.grasp ) 173 | if not self.Choose_grasp: #Check if list is empty after removing an element. 174 | self.Choose_grasp = list( self.all_grasps ) 175 | self.corrections = 0 176 | self.grasp = random.SystemRandom().choice( self.Choose_grasp ) 177 | print('preshaping grasp type {}\n\n').format( self.grasp ) 178 | self.stage = 1 179 | else: 180 | # Redo previous action: 181 | if self.stage == 0: 182 | self.System_power( 1 ) 183 | self.corrections += 1 184 | print ("Restarting ... \n") 185 | elif self.stage == 1: 186 | print('Preshaping grasp type {}\n\n').format( self.grasp ) 187 | elif self.stage == 2: 188 | print("Grasping ... grasp type{} \n\n").format( self.grasp ) 189 | print ("Correction no. {}").format( self.corrections + 1 ) 190 | 191 | 192 | else: 193 | print ('No previous stage, restarting ... \n') 194 | self.System_power( 1 ) 195 | 196 | 197 | 198 | 199 | 200 | 201 | #q = queue.Queue() 202 | 203 | 204 | 205 | """ 206 | Stages meanings: 207 | 0: System off 208 | 1: Taking photos, deciding grasp type, preshaping. 209 | 2: Grasping 210 | 3: Releasing 211 | """ 212 | 213 | #cv =CV() 214 | 215 | 216 | 217 | # t1 = threading.Thread(target = EMG_Listener, name ='thread1') 218 | # t2 = threading.Thread(target = Main_algorithm, name ='thread2') 219 | 220 | # t1.daemon = True 221 | # t2.daemon = True 222 | 223 | # t1.start() 224 | # t2.start() 225 | 226 | # t1.join() 227 | #grasp = cv.grasp_type( 'tools/class 1/50_r110.png', 'tools/GP_Weights.h5' ) 228 | #print ('Grasp type no.{0} \n'.format( grasp )) 229 | 230 | -------------------------------------------------------------------------------- /CV_realtime.py: -------------------------------------------------------------------------------- 1 | #============================================================================== 2 | # REAL LIFE CLASSIFICATION 3 | #============================================================================== 4 | import tensorflow as tf 5 | from keras.applications import imagenet_utils 6 | #from keras.applications import VGG16 7 | from keras.applications import ResNet50 8 | import cv2, threading 9 | import numpy as np 10 | import time 11 | import random 12 | #import queue ##If python 3 13 | import Queue as queue ##If python 2 14 | l1 =[ "wooden_spoon" , "fountain_pen", "revolver" ,"kite" , "necklace" , "ballpoint"] 15 | l2 =["beer_glass" , "hourglass" , "cup" , "measuring_cup" , "water_bottle" , "coffee_mug" ,"coffeepot" , "pill_bottle" ,"pop_bottle" ,"wine_bottle" ,"beer_bottle","hair_spray"] 16 | l3= ["cassette" , "cellular_telephone" , "wallet" , "iPod" , "notebook" , "bottlecap" , "remote_control" , "rubber_eraser" , "digital_watch"] 17 | l4=[ "pencil_box" , "plate" , "toilet_tissue" , "baseball" , "croquet_ball" , "golf_ball" , "ping-pong_ball" , "tennis_ball" , "cheeseburger" ,"vase","pitcher","ballon"] 18 | 19 | class MyThread(threading.Thread): 20 | def __init__(self): 21 | threading.Thread.__init__(self) 22 | self.label = '' 23 | self.frame_to_predict = None 24 | self.classification = True 25 | self.model = ResNet50( weights='imagenet' ) 26 | self.graph = tf.get_default_graph() 27 | self.score = .0 28 | ### 29 | self.q = queue.Queue() 30 | self.stage = 0 31 | self.corrections = 0 32 | self.all_grasps = [1, 2, 3, 4] 33 | self.Choose_grasp = list( self.all_grasps ) 34 | self.grasp1 = None 35 | self.grasp_number = 0 36 | self.grasp_name = "None" 37 | self.final = None 38 | self.flag1 = None 39 | ### 40 | print( 'Loading network...' ) 41 | # self.model = VGG16(weights='imagenet') 42 | self.model = ResNet50( weights='imagenet' ) 43 | self.graph = tf.get_default_graph() 44 | print( 'Network loaded successfully!' ) 45 | 46 | def run(self): 47 | 48 | with self.graph.as_default(): 49 | 50 | while self.classification is True: 51 | if self.frame_to_predict is not None: 52 | self.frame_to_predict = cv2.cvtColor(self.frame_to_predict, cv2.COLOR_BGR2RGB).astype(np.float32) 53 | self.frame_to_predict = self.frame_to_predict.reshape((1, ) + self.frame_to_predict.shape) 54 | self.frame_to_predict = imagenet_utils.preprocess_input(self.frame_to_predict) 55 | predictions = self.model.predict(self.frame_to_predict) 56 | (self.imageID, self.label, self.score) = imagenet_utils.decode_predictions(predictions)[0][0] 57 | self.grasp_type() 58 | #print ((self.label ,self.score)) 59 | if self.classification == False : 60 | break; 61 | def run_camera(self): 62 | # Initialize OpenCV video captue 63 | self.video_capture = cv2.VideoCapture( 0 ) # Set to 1 for front camera 64 | self.video_capture.set( 4, 800 ) # Width 65 | self.video_capture.set( 5, 600 ) # Height 66 | 67 | # Start the video capture loop 68 | while (True): 69 | 70 | # Get the original frame from video capture 71 | ret, original_frame = self.video_capture.read() 72 | # Resize the frame to fit the imageNet default input size 73 | self.frame_to_predict = cv2.resize( original_frame, (224, 224) ) 74 | 75 | # Add text label and network score to the video captue 76 | cv2.putText( original_frame, "Label: %s | Score: %.2f" % (self.label, self.score), 77 | (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 255, 0), 2 ) 78 | # Display the video 79 | cv2.imshow( "Classification", original_frame ) 80 | 81 | # Hit q or esc key to exit 82 | if (cv2.waitKey( 1 ) & 0xFF == ord( 'q' )): 83 | break; 84 | 85 | def close(self): 86 | self.classification = False 87 | self.video_capture.release() 88 | cv2.destroyAllWindows() 89 | 90 | def grasp_type(self): 91 | if self.label in l1 : 92 | self.grasp_number =1 93 | self.grasp_name = "Pinch" 94 | #print( ("Grasp_Type : Pinch \n ") ) 95 | 96 | elif self.label in l2: 97 | self.grasp_number =2 98 | self.grasp_name = "Palmar Wrist Neutral" 99 | #print( ("Grasp_Type : Palmar Wrist Neutral \n ") ) 100 | 101 | elif self.label in l3: 102 | self.grasp_number =3 103 | self.grasp_name = "Tripod" 104 | #print( ("Grasp_Type : Tripod \n ") ) 105 | 106 | elif self.label in l4: 107 | self.grasp_number =4 108 | self.grasp_name = "Palmar Wrist Pronated" 109 | #print( ("Grasp_Type : Palmar Wrist Pronated \n ") ) 110 | else : 111 | #print (("Not Defined Grasp")) 112 | self.grasp_number =0 113 | self.grasp_name ="None" 114 | return self.grasp_number ,self.grasp_name 115 | 116 | def Main_algorithm(self): 117 | 118 | while not (self.q.empty()): 119 | EMG_class_recieved = self.q.get() 120 | if (EMG_class_recieved == 1 ): 121 | print(("EMG_class {0} : ".format( EMG_class_recieved))) 122 | self.System_power( 1 ) # Start system 123 | 124 | elif (EMG_class_recieved == 2 and self.flag1 ==1): 125 | print(("EMG_class {0} : ".format( EMG_class_recieved))) 126 | self.Confirmation() 127 | 128 | elif (EMG_class_recieved == 3 and self.flag1 ==1): 129 | print(("EMG_class {0} : ".format( EMG_class_recieved ))) 130 | self.Cancellation() 131 | 132 | elif (EMG_class_recieved == 0): 133 | print(("EMG_class {0}: ".format( EMG_class_recieved))) 134 | self.System_power( 0 ) # Turn system off 135 | 136 | def System_power(self,Turn_on): 137 | 138 | 139 | 140 | # Reset values: 141 | #self.stage = 0 142 | # corrections= 0 143 | #self.Choose_grasp = list( self.all_grasps ) 144 | 145 | if not Turn_on: 146 | #self.corrections = 0 147 | # Turn off 148 | print ("Turning off ... back to rest state.") 149 | else: 150 | # Start/restart 151 | self.grasp1,_ = self.grasp_type( ) 152 | 153 | print(('grasp type {} \n').format( self.grasp1 )) 154 | self.flag1=1 155 | #self.stage = 1 156 | 157 | def Confirmation(self): 158 | 159 | 160 | print(" Confirmed!") 161 | #if self.stage < 2: 162 | #self.stage += 1 163 | #self.corrections = 0 164 | #self.Choose_grasp = list( self.all_grasps ) 165 | self.final =self.grasp1 166 | print(("Grasping ... grasp type{} \n").format( self.grasp1 )) 167 | self.flag1=None 168 | # Do the action 169 | #else: 170 | #print ('Releasing ... \n') 171 | #self.System_power( 0 ) 172 | 173 | 174 | def Cancellation(self): 175 | 176 | 177 | 178 | #if self.stage > 0: 179 | print(" Cancelled! \n") 180 | self.flag1 =None 181 | 182 | # Start a keras thread which will classify the frame returned by openCV 183 | #keras_thread = MyThread() 184 | #keras_thread.start() 185 | #keras_thread.run_camera() 186 | #time.sleep(2) 187 | #keras_thread.close() 188 | 189 | 190 | 191 | 192 | -------------------------------------------------------------------------------- /EMG.py: -------------------------------------------------------------------------------- 1 | 2 | # for memory error >> try to change the float 64 to float 32 3 | import numpy as np 4 | #import scipy.io as sio 5 | import pandas as pd 6 | #import matplotlib.pyplot as plt 7 | from scipy.signal import butter,lfilter,filtfilt 8 | #from sklearn.neighbors import KNeighborsClassifier 9 | #from sklearn import svm 10 | #from scipy import stats 11 | #from sklearn.linear_model import SGDClassifier 12 | from sklearn.externals import joblib 13 | import sys 14 | #import test 15 | import time 16 | #from test import MyoRaw 17 | import open_myo as myo 18 | import threading 19 | #import GP 20 | class RealTime(): 21 | 22 | def __init__(self): 23 | #super(RealTime, self).__init__() 24 | #self.setupUi(self) 25 | 26 | self.EMG = np.empty( [0, 8] ) 27 | self.predictions_array = [] 28 | self.p=np.empty([0,8]) 29 | self.emg_total = np.empty( [0, 8] ) 30 | self.iteration = 0 31 | self.Flag_Graph0=None 32 | self.Flag_Graph=None 33 | self.Flag_Predict =None 34 | self.prediction = None 35 | self.stop_request =True 36 | 37 | #self.set_GP_instance(GP) 38 | 39 | def set_GP_instance(self,GP): 40 | self.GP=GP 41 | 42 | 43 | #search on Hampel filter to remove spikes. and make notch filter on 50 hz 44 | def filteration (self,data,sample_rate=2000.0,cut_off=20.0,order=5,ftype='highpass'): 45 | nyq = .5 * sample_rate 46 | b,a= butter(order,cut_off/nyq,btype=ftype) 47 | d= lfilter(b,a,data,axis=0) 48 | return pd.DataFrame(d) 49 | 50 | def MES_analysis_window(self,df, width, tau, win_num): 51 | df_2 = pd.DataFrame() 52 | start = win_num * tau 53 | end = start + width 54 | df_2 = df.iloc[start:end] 55 | return end, df_2 56 | 57 | def features_extraction(self,df, th=0): 58 | # F1 : mean absolute value (MAV) 59 | MAV = abs( df.mean( axis=0 ) ) 60 | 61 | MAV = list( MAV ) 62 | WL = [] 63 | SSC = [] 64 | ZC = [] 65 | for col, series in df.iteritems(): 66 | # F2 : wave length (WL) 67 | s = abs( np.array( series.iloc[:-1] ) - np.array( series.iloc[1:] ) ) 68 | WL_result = np.sum( s ) 69 | WL.append( WL_result ) 70 | 71 | # F3 : zero crossing(ZC) 72 | _1starray = np.array( series.iloc[:-1] ) 73 | _2ndarray = np.array( series.iloc[1:] ) 74 | ZC.append( ((_1starray * _2ndarray < 0) & (abs( _1starray - _2ndarray ) >= th)).sum() ) 75 | 76 | # F4 : slope sign change(SSC) 77 | _1st = np.array( series.iloc[:-2] ) 78 | _2nd = np.array( series.iloc[1:-1] ) 79 | _3rd = np.array( series.iloc[2:] ) 80 | SSC.append( ((((_2nd - _1st) * (_2nd - _3rd)) > 0) & ( 81 | ((abs( _2nd - _1st )) >= th) | ((abs( _2nd - _3rd )) >= th))).sum() ) 82 | 83 | features_array = np.array( [MAV, WL, ZC, SSC] ).T 84 | return features_array 85 | 86 | 87 | 88 | 89 | def get_predictors(self,emg,width=512,tau=128): 90 | 91 | x=[]; 92 | end=0; win_num=0; 93 | while((len(emg)-end) >= width): 94 | end,window_df=self.MES_analysis_window(emg,width,tau,win_num) 95 | win_num=win_num + 1 96 | 97 | ff=self.features_extraction(window_df) 98 | x.append(ff) 99 | 100 | predictors_array=np.array(x) 101 | 102 | nsamples, nx, ny = predictors_array.shape 103 | predictors_array_2d = predictors_array.reshape((nsamples,nx*ny)) 104 | 105 | return np.nan_to_num(predictors_array_2d) 106 | 107 | 108 | 109 | """ 110 | def predict(self,emg,tau=128): 111 | #emg = np.random.rand(512,8) 112 | #global b,emg_total,iteration 113 | self.emg_total= np.append(self.emg_total,self.EMG,axis=0) 114 | print (self.emg_total.shape) 115 | if self.emg_total.shape[0] == 512: 116 | data= pd.DataFrame(self.emg_total) 117 | filtered_emg=self.filteration (data,sample_rate=200) 118 | predictors_test = self.get_predictors(filtered_emg) 119 | self.emg_total = self.emg_total[128:] 120 | filename = 'EMG_hanna_model2.pickle' 121 | pickled_clf=joblib.load(filename) 122 | self.EMG= np.empty([0,8]) 123 | return pickled_clf.predict(predictors_test) 124 | self.EMG= np.empty([0,8]) 125 | return 0 126 | """ 127 | def predict(self , path): 128 | if self.emg_total.shape[0] >= 512: 129 | self.flag_Predict =1 130 | #print ("Hiiii") 131 | self.emg_total = np.append( self.emg_total, self.EMG[:128], axis=0 ) 132 | self.EMG = self.EMG[128:] 133 | data = pd.DataFrame( self.emg_total ) 134 | filtered_emg = self.filteration( data, sample_rate=200 ) 135 | predictors_test = self.get_predictors( filtered_emg ) 136 | self.emg_total = self.emg_total[128:] 137 | filename = path 138 | pickled_clf = joblib.load( filename ) 139 | return pickled_clf.predict( predictors_test ) 140 | else : 141 | n= self.EMG.shape[0] 142 | self.emg_total = np.append( self.emg_total, self.EMG[:n], axis=0 ) 143 | self.EMG = self.EMG[n:] 144 | return np.array( [] ) 145 | 146 | 147 | 148 | def start_MYO(self): 149 | myo_mac_addr = myo.get_myo() 150 | print("MAC address: %s" % myo_mac_addr) 151 | 152 | self.myo_device = myo.Device() 153 | self.myo_device.services.sleep_mode( 1 ) # never sleep 154 | self.myo_device.services.set_leds( [128, 128, 255], [128, 128, 255] ) # purple logo and bar LEDs) 155 | self.myo_device.services.vibrate( 1 ) # short vibration 156 | fw = self.myo_device.services.firmware() 157 | print("Firmware version: %d.%d.%d.%d \n" % (fw[0], fw[1], fw[2], fw[3])) 158 | 159 | batt = self.myo_device.services.battery() 160 | print("Battery level: %d" % batt) 161 | 162 | # myo_device.services.emg_filt_notifications() 163 | self.myo_device.services.emg_raw_notifications() 164 | # myo_device.services.imu_notifications() 165 | # myo_device.services.classifier_notifications() 166 | # myo_device.services.battery_notifications() 167 | self.myo_device.services.set_mode( myo.EmgMode.RAW, myo.ImuMode.OFF, myo.ClassifierMode.OFF ) 168 | self.myo_device.add_emg_event_handler( self.process_emg ) 169 | # myo_device.add_emg_event_handler(led_emg) 170 | # myo_device.add_imu_event_handler(process_imu) 171 | # myo_device.add_sync_event_handler(process_sync) 172 | # myo_device.add_classifier_event_hanlder(process_classifier) 173 | 174 | 175 | 176 | 177 | 178 | def final(self,emg): 179 | print (":D") 180 | 181 | print (emg.shape) 182 | # print emg[:,0] ## if you want a single channel 183 | #global b 184 | self.EMG = np.empty( [0, 8] ) 185 | 186 | def process_emg(self,emg): 187 | # unfortunately the Filtered Array provide 1 array of 8 element at a time ==> in te Form of Tuple 188 | # while The RAW_EMG provide 2 array at a time 8 elements each , ===> in the form of list that contains 2 tuples 189 | # print(emg) 190 | #global b 191 | ## for RAW_EMG 192 | self.EMG = np.append( self.EMG, emg, axis=0 ) 193 | #print (self.EMG.shape[0]) 194 | #if self.Flag_Predict == True and self.EMG.shape[0] == 128 : 195 | #self.predictions_array.append(self.predict( self.EMG )) 196 | #self.p = np.append(self.p,self.predict(self.EMG), axis=0) 197 | #c=self.predict( self.EMG ) 198 | 199 | #self.prediction= self.predict( self.EMG ) 200 | # final(b) 201 | 202 | #elif self.Flag_Graph == True and self.EMG.shape[0] ==1000 : 203 | #self.EMG= np.empty([0,8]) 204 | 205 | 206 | 207 | ## For Filtered_EMG 208 | # b= np.append(b,[[emg[0],emg[1],emg[2],emg[3],emg[4],emg[5],emg[6],emg[7]]],0) 209 | # if b.shape[0]==512: 210 | # final(b) 211 | 212 | def process_imu(self,quat, acc, gyro): 213 | print(quat) 214 | 215 | def process_sync(self,arm, x_direction): 216 | print(arm, x_direction) 217 | 218 | def process_classifier(self,pose): 219 | print(pose) 220 | 221 | def process_battery(self,batt): 222 | print("Battery level: %d" % batt) 223 | 224 | def led_emg(self,emg): 225 | if (emg[0] > 80): 226 | myo_device.services.set_leds( [255, 0, 0], [128, 128, 255] ) 227 | else: 228 | myo_device.services.set_leds( [128, 128, 255], [128, 128, 255] ) 229 | def ReadEMG(self): 230 | while self.stop_request : 231 | time.sleep(0.09) 232 | if self.myo_device.services.waitForNotifications( 1 ): 233 | continue 234 | print("Waiting...") 235 | 236 | """ 237 | Real = RealTime() 238 | Real.start_MYO() 239 | Real.stop_request = True 240 | threading.Thread( target=Real.ReadEMG() ).start() 241 | #time.sleep(3) 242 | Real.stop_request = False 243 | print ("Hi") 244 | 245 | 246 | 247 | while True: 248 | if Real.myo_device.services.waitForNotifications(1): 249 | print (Real.EMG.shape[0]) 250 | continue 251 | 252 | 253 | 254 | print("Waiting...") 255 | 256 | 257 | """ 258 | 259 | 260 | 261 | 262 | 263 | -------------------------------------------------------------------------------- /EMG_Model.py: -------------------------------------------------------------------------------- 1 | from sklearn.externals import joblib 2 | import numpy as np 3 | import pandas as pd 4 | from scipy.signal import butter,lfilter,filtfilt 5 | from scipy import stats 6 | from sklearn import svm 7 | class EMG_Model(): 8 | 9 | def filteration (self,data,sample_rate=2000.0,cut_off=20.0,order=5,ftype='highpass'): 10 | nyq = 0.5 * sample_rate 11 | b,a=butter(order,cut_off/nyq,btype=ftype) 12 | d= lfilter(b,a,data,axis=0) 13 | return pd.DataFrame(d) 14 | 15 | 16 | def mean_std_normalization (self,df): 17 | m = df.mean(axis=0) 18 | s =df.std(axis=0) 19 | normalized_df =df/m 20 | return m,s,normalized_df 21 | 22 | 23 | 24 | def MES_analysis_window (self,df,width,tau,win_num): 25 | df_2=pd.DataFrame() 26 | start= win_num*tau 27 | end= start+width 28 | df_2=df.iloc[start:end] 29 | return end,df_2 30 | 31 | 32 | 33 | def prepare_df(self,rep,normalized_emg): 34 | df=normalized_emg.loc[rep] 35 | df=df.reset_index() 36 | LL=df['label'] 37 | df=df.drop(['rep','label'],1) 38 | 39 | return df,LL 40 | 41 | def features_extraction (self,df,th=0): 42 | #F1 : mean absolute value (MAV) 43 | MAV=abs(df.mean(axis=0)) 44 | 45 | MAV=list(MAV) 46 | WL = [] 47 | SSC= [] 48 | ZC = [] 49 | for col,series in df.iteritems(): 50 | #F2 : wave length (WL) 51 | s=abs(np.array(series.iloc[:-1])- np.array(series.iloc[1:])) 52 | WL_result=np.sum(s) 53 | WL.append( WL_result) 54 | 55 | #F3 : zero crossing(ZC) 56 | _1starray=np.array(series.iloc[:-1]) 57 | _2ndarray=np.array(series.iloc[1:]) 58 | ZC.append(((_1starray*_2ndarray<0) & (abs(_1starray - _2ndarray)>=th) ).sum()) 59 | 60 | 61 | #F4 : slope sign change(SSC) 62 | _1st=np.array(series.iloc[:-2]) 63 | _2nd=np.array(series.iloc[1:-1]) 64 | _3rd=np.array(series.iloc[2:]) 65 | SSC.append(((((_2nd - _1st)*(_2nd - _3rd))>0) &(((abs(_2nd - _1st))>=th) | ((abs(_2nd - _3rd))>=th))).sum()) 66 | 67 | features_array=np.array([MAV,WL,ZC,SSC]).T 68 | return features_array 69 | 70 | def get_predictors_and_outcomes(self,intended_movement_labels,rep,emg,label_series,width=512,tau=128): 71 | 72 | x=[];y=[]; 73 | end=0; win_num=0; 74 | while((len(emg)-end) >= width): 75 | end,window_df=self.MES_analysis_window(emg,width,tau,win_num) 76 | win_num=win_num + 1 77 | 78 | ff=self.features_extraction(window_df) 79 | x.append(ff) 80 | 81 | expected_labels=label_series.iloc[win_num*tau: ((win_num*tau)+width)] 82 | mode,count=stats.mode(expected_labels) 83 | y.append(mode) 84 | 85 | predictors_array=np.array(x) 86 | outcomes_array=np.array(y) 87 | 88 | nsamples, nx, ny = predictors_array.shape 89 | predictors_array_2d = predictors_array.reshape((nsamples,nx*ny)) 90 | 91 | return np.nan_to_num(predictors_array_2d),np.nan_to_num(outcomes_array) 92 | 93 | 94 | def get_predictors(self,emg,width=512,tau=128): 95 | 96 | x=[]; 97 | end=0; win_num=0; 98 | while((len(emg)-end) >= width): 99 | end,window_df=self.MES_analysis_window(emg,width,tau,win_num) 100 | win_num=win_num + 1 101 | 102 | ff=self.features_extraction(window_df) 103 | x.append(ff) 104 | predictors_array=np.array(x) 105 | 106 | nsamples, nx, ny = predictors_array.shape 107 | predictors_array_2d = predictors_array.reshape((nsamples,nx*ny)) 108 | 109 | return np.nan_to_num(predictors_array_2d) 110 | 111 | def prepare_data(self,intended_movement_labels=[0,1,2,3],rows=8000): 112 | emg_set = {} 113 | 114 | e1 = pd.read_csv( self.path1, header=None ) 115 | e2 = pd.read_csv( self.path2, header=None ) 116 | e3 = pd.read_csv( self.path3, header=None ) 117 | e4 = pd.read_csv( self.path4, header=None ) 118 | rows = min( e1.shape[0], e2[1].shape[0], e3[2].shape[0], e4[3].shape[0] ) 119 | e1 = pd.read_csv( self.path1, nrows=rows, header=None ) 120 | e2 = pd.read_csv( self.path2, nrows=rows, header=None ) 121 | e3 = pd.read_csv( self.path3, nrows=rows, header=None ) 122 | e4 = pd.read_csv( self.path4, nrows=rows, header=None ) 123 | e = [e1, e2, e3, e4] 124 | 125 | rep = [] 126 | reps =rows // 6 if rows % 6 == 0 else (rows //6)+1 127 | 128 | for i in range(1,7): 129 | for j in range(0,reps): 130 | rep.append(i) 131 | 132 | rep = rep[:rows] 133 | 134 | 135 | for i in intended_movement_labels: 136 | #emg_set[i] = pd.read_csv('models/' +str(i)+".csv" ,nrows =rows,header=None) 137 | emg_set[i] =e[i] 138 | emg_set[i]['label'] = i 139 | emg_set[i].columns = [1,2,3,4,5,6,7,8,'label'] 140 | emg_set[i]['rep'] = rep 141 | 142 | data = pd.DataFrame() 143 | 144 | for i in intended_movement_labels: 145 | data = pd.concat([data,emg_set[i]]) 146 | 147 | data = data.drop_duplicates().reset_index(drop=True) 148 | dataLabel=data['label'] 149 | dataRep=data['rep'] 150 | data=data.drop(['label','rep'],1) 151 | 152 | normalized_emg=self.filteration (data,sample_rate=200) 153 | 154 | normalized_emg['label'] = dataLabel 155 | 156 | normalized_emg['rep'] = dataRep 157 | normalized_emg=normalized_emg.set_index('rep') 158 | rep_train=[1,3,6,4] 159 | normalized_emg_train,LL_train=self.prepare_df(rep_train,normalized_emg) 160 | predictors_train,outcomes_train=self.get_predictors_and_outcomes(intended_movement_labels,rep_train,normalized_emg_train,LL_train) 161 | 162 | #prepare test part 163 | rep_test=[2,5] 164 | normalized_emg_test,LL_test=self.prepare_df(rep_test,normalized_emg) 165 | 166 | #normalized_emg_test 167 | predictors_test,outcomes_test=self.get_predictors_and_outcomes(intended_movement_labels,rep_test,normalized_emg_test,LL_test) 168 | 169 | predictors_test = self.get_predictors(normalized_emg_test) 170 | return predictors_train,outcomes_train,predictors_test,outcomes_test 171 | 172 | def svm_model(self,predictors_train,outcomes_train): 173 | 174 | model=svm.LinearSVC(dual=False) # at C= 0.05:0.09 gives little increase in accuracy, around 0.4% 175 | model.fit(predictors_train,outcomes_train) 176 | return model 177 | 178 | def accuracy(self,model): 179 | return model.score(self.predictors_test,self.outcomes_test)*100 180 | 181 | def save_model(self,model,filename): 182 | joblib.dump(model, filename) 183 | 184 | 185 | 186 | def all_steps(self,path1,path2,path3,path4,file_name,movements=[0,1,2,3]): 187 | self.path1=path1 188 | self.path2=path2 189 | self.path3=path3 190 | self.path4=path4 191 | predictors_train,outcomes_train,self.predictors_test,self.outcomes_test = self.prepare_data(movements) 192 | model = self.svm_model(predictors_train,outcomes_train) 193 | 194 | #if you wanna accuracy 195 | print (self.accuracy(model)) 196 | 197 | #save pickle 198 | self.save_model(model,file_name) 199 | 200 | if __name__ == '__main__': 201 | e=EMG_Model() 202 | e.all_steps(path1="0.csv",path2="1.csv",path3="2.csv",path4="3.csv",file_name="Hannon.pickle") 203 | 204 | -------------------------------------------------------------------------------- /GP.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | from PyQt4.uic import loadUiType 4 | from PyQt4 import QtCore, QtGui 5 | from PyQt4.QtCore import QObject,pyqtSignal 6 | from PyQt4.QtGui import * 7 | import serial # import Serial Library 8 | #from drawnow import * 9 | import pyqtgraph as pg 10 | import pyqtgraph 11 | import random 12 | import sys, time 13 | import EMG 14 | import poweroff 15 | import threading 16 | from bluepy import btle 17 | import CV 18 | import EMG_Model 19 | import CV_realtime 20 | #import collections 21 | import Queue as queue ##If python 2 22 | #import queue ##If python 3 23 | import pandas as pd 24 | import cv2 25 | Ui_MainWindow, QMainWindow = loadUiType('GP.ui') 26 | 27 | class XStream(QObject): 28 | _stdout = None 29 | _stderr = None 30 | 31 | messageWritten = pyqtSignal(str) 32 | 33 | def flush( self ): 34 | pass 35 | 36 | def fileno( self ): 37 | return -1 38 | 39 | def write( self, msg ): 40 | if ( not self.signalsBlocked() ): 41 | self.messageWritten.emit(unicode(msg)) 42 | 43 | @staticmethod 44 | def stdout(): 45 | if ( not XStream._stdout ): 46 | XStream._stdout = XStream() 47 | sys.stdout = XStream._stdout 48 | return XStream._stdout 49 | 50 | @staticmethod 51 | def stderr(): 52 | if ( not XStream._stderr ): 53 | XStream._stderr = XStream() 54 | sys.stderr = XStream._stderr 55 | return XStream._stderr 56 | class OwnImageWidget( QtGui.QWidget ): 57 | def __init__(self, parent=None): 58 | super( OwnImageWidget, self ).__init__( parent ) 59 | self.image = None 60 | 61 | def setImage(self, image): 62 | self.image = image 63 | sz = image.size() 64 | self.setMinimumSize( sz ) 65 | self.update() 66 | 67 | def paintEvent(self, event): 68 | qp = QtGui.QPainter() 69 | qp.begin( self ) 70 | if self.image: 71 | qp.drawImage( QtCore.QPoint( 0, 0 ), self.image ) 72 | qp.end() 73 | 74 | class LoadImageThread( QtCore.QThread ): 75 | def __init__(self, file, w, h): 76 | QtCore.QThread.__init__( self ) 77 | self.file = file 78 | self.w = w 79 | self.h = h 80 | def __del__(self): 81 | self.wait() 82 | def run(self): 83 | self.emit( QtCore.SIGNAL( 'showImage(QString, int, int)' ), self.file, self.w, self.h ) 84 | class LoadImageThread2( QtCore.QThread ): 85 | def __init__(self, file, w, h): 86 | QtCore.QThread.__init__( self ) 87 | self.file = file 88 | self.w = w 89 | self.h = h 90 | def __del__(self): 91 | self.wait() 92 | def run(self): 93 | self.emit( QtCore.SIGNAL( 'showImage2(QString, int, int)' ), self.file, self.w, self.h ) 94 | class Main(QMainWindow, Ui_MainWindow): 95 | 96 | 97 | def __init__(self, parent=None): 98 | #pyqtgraph.setConfigOption('background', 'w') # before loading widget 99 | super(Main, self).__init__() 100 | self.setupUi(self) 101 | self.Real = EMG.RealTime() 102 | self.Power=poweroff.poweroff() 103 | self.EMG_Modeling = EMG_Model.EMG_Model() 104 | self.cv = CV.CV() 105 | #self.Real.set_GP_instance(self) 106 | 107 | ##TextBrowser 108 | XStream.stdout().messageWritten.connect( self.textBrowser.insertPlainText ) 109 | XStream.stdout().messageWritten.connect( self.textBrowser.ensureCursorVisible ) 110 | XStream.stderr().messageWritten.connect( self.textBrowser.insertPlainText ) 111 | XStream.stderr().messageWritten.connect( self.textBrowser.ensureCursorVisible ) 112 | 113 | #self.emgplot = pg.PlotWidget( name='EMGplot' ) 114 | self.emgplot.setRange( QtCore.QRectF( -50, -200, 1000, 1400 ) ) 115 | self.emgplot.disableAutoRange() 116 | self.emgplot.setTitle( "EMG" ) 117 | 118 | self.emgcurve = [] 119 | for i in range( 8 ): 120 | c = self.emgplot.plot( pen=(i, 10) ) 121 | c.setPos( 0, i * 150 ) 122 | self.emgcurve.append( c ) 123 | 124 | self.emgcurve0 = [self.EMG1,self.EMG2,self.EMG3,self.EMG4,self.EMG5\ 125 | ,self.EMG6,self.EMG7,self.EMG8] 126 | for i in range (8): 127 | self.emgcurve0[i].plotItem.showGrid(True, True, 0.7) 128 | #self.emgcurve0[i].plotItem.setRange(yRange=[0, 1]) 129 | 130 | 131 | self.pushButton.clicked.connect(self.Real.start_MYO) 132 | self.pushButton_2.clicked.connect( self.start_thread2 ) # Start Predict 133 | self.pushButton_3.clicked.connect( self.stop_thread2 ) # Stop Predict 134 | self.pushButton_4.clicked.connect( self.disconnect_MYO) 135 | self.pushButton_5.clicked.connect(self.Power.power_off) 136 | self.pushButton_6.clicked.connect( self.clear_textBrowser ) 137 | self.pushButton_7.clicked.connect( self.start_thread1 )# start Graph1 138 | self.pushButton_8.clicked.connect( self.stop_thread1 ) 139 | self.pushButton_9.clicked.connect( self.file_save_csv ) 140 | self.pushButton_11.clicked.connect( self.start_thread0 ) 141 | self.pushButton_12.clicked.connect( self.stop_thread0 ) 142 | self.pushButton_10.clicked.connect( self.saveEMGModel ) 143 | self.pushButton_10.setStyleSheet( "background-color: red" ) 144 | self.pushButton_13.clicked.connect( self.browseCSVEMGModel1 ) 145 | self.pushButton_14.clicked.connect( self.browseCSVEMGModel2 ) 146 | self.pushButton_15.clicked.connect( self.browseCSVEMGModel3 ) 147 | self.pushButton_16.clicked.connect( self.browseCSVEMGModel4 ) 148 | self.pushButton_21.clicked.connect( self.joinCSV1 ) 149 | self.pushButton_22.clicked.connect( self.joinCSV2 ) 150 | self.pushButton_23.clicked.connect( self.saveJoinCSV ) 151 | self.pushButton_17.clicked.connect( self.browsePickleEMGModel1 ) 152 | self.pushButton_18.clicked.connect( self.browsePickleEMGModel2 ) 153 | self.pushButton_19.clicked.connect( self.browseCVModel ) 154 | self.pushButton_20.clicked.connect( self.start_thread4 ) 155 | self.pushButton_20.setStyleSheet( "background-color: green" ) 156 | self.pushButton_24.clicked.connect( self.stop_thread4 ) 157 | self.pushButton_24.setStyleSheet( "background-color: red" ) 158 | self.pushButton_25.clicked.connect( QtCore.QCoreApplication.instance().quit ) 159 | self.path1 = self.path2 = self.path3 = self.path4 = self.path5 = self.path6 = self.path7 = self.path8 = None 160 | 161 | ## To change text Color to Red Color 162 | palette = QtGui.QPalette() 163 | palette.setColor(QtGui.QPalette.Foreground, QtCore.Qt.red) 164 | self.label.setPalette(palette) 165 | 166 | ############################################################## 167 | self.CV_realtime = CV_realtime.MyThread() 168 | self.CV_realtimeFlag = None 169 | self.CV_realtimeFlag2 = 0 170 | self.capture_thread = None 171 | self.q = queue.Queue() 172 | ############################################################## 173 | self.startButton.clicked.connect( self.start_camera ) 174 | self.pushButton_26.clicked.connect( self.close_camera ) 175 | self.pushButton_27.clicked.connect( self.start_cvRealtime ) 176 | self.pushButton_28.clicked.connect( self.stop_cvRealtime ) 177 | self.pushButton_28.setEnabled( False ) 178 | self.window_width = self.ImgWidget.frameSize().width() 179 | self.window_height = self.ImgWidget.frameSize().height() 180 | self.ImgWidget = OwnImageWidget( self.ImgWidget ) 181 | self.timer = QtCore.QTimer( self ) 182 | self.timer.timeout.connect( self.update_frame ) 183 | self.timer.start( 1 ) 184 | #### 185 | self.pushButton_29.clicked.connect( self.browsePickleEMGModel3 ) 186 | self.pushButton_30.clicked.connect( self.start_thread5 ) 187 | self.pushButton_31.clicked.connect( self.stop_thread5 ) 188 | self.pushButton_30.setStyleSheet( "background-color: green" ) 189 | self.pushButton_31.setStyleSheet( "background-color: red" ) 190 | ################################################################################################################### 191 | self.thread1 = None 192 | self.thread2 = None 193 | self.event_stop_thread0 = threading.Event() 194 | self.event_stop_thread1 = threading.Event() 195 | self.event_stop_thread2 = threading.Event() 196 | self.event_stop_thread3 = threading.Event() 197 | self.event_stop_thread4 = threading.Event() 198 | self.event_stop_thread5 = threading.Event() 199 | 200 | def start_camera(self): 201 | self.ImgWidget.setHidden( False ) 202 | self.running = True 203 | self.capture_thread = threading.Thread( target=self.grab, args=(0, self.q, 1920, 1080, 30) ) 204 | self.capture_thread.daemon = True 205 | self.capture_thread.start() 206 | self.startButton.setEnabled( False ) 207 | self.pushButton_26.setEnabled( True ) 208 | self.startButton.setText( 'Starting...' ) 209 | self.startButton.setText( 'Camera is live' ) 210 | 211 | def grab(self, cam, queue, width, height, fps): 212 | self.capture = cv2.VideoCapture( cam ) 213 | self.capture.set( cv2.CAP_PROP_FRAME_WIDTH, width ) 214 | self.capture.set( cv2.CAP_PROP_FRAME_HEIGHT, height ) 215 | self.capture.set( cv2.CAP_PROP_FPS, fps ) 216 | 217 | while (self.running): 218 | frame = {} 219 | # Get the original frame from video capture 220 | retval, original_frame = self.capture.read() 221 | # Resize the frame to fit the imageNet default input size 222 | if self.CV_realtimeFlag is not None : 223 | self.CV_realtime.frame_to_predict = cv2.resize( original_frame, (224, 224) ) 224 | if self.checkBox.isChecked(): 225 | # Add text label and network score to the video captue 226 | cv2.putText( original_frame, "Label: %s | Score: %.2f" % (self.CV_realtime.label, self.CV_realtime.score), (15, 60), cv2.FONT_HERSHEY_SIMPLEX,0.9, (0, 255, 0), 2 ) 227 | 228 | cv2.putText(original_frame, "Name: %s| Class: %d " % (self.CV_realtime.grasp_name, self.CV_realtime.grasp_number),(0, 25),cv2.FONT_HERSHEY_SIMPLEX,1,(255, 0, 0),2 ) 229 | self.capture.grab() 230 | # retval, img = capture.retrieve( 0 ) 231 | frame["img"] = original_frame 232 | 233 | if queue.qsize() < 10: 234 | queue.put( frame ) 235 | else: 236 | print 237 | queue.qsize() 238 | 239 | def start_cvRealtime(self): 240 | # self.CV_realtime = CV_realtime.MyThread() 241 | self.CV_realtimeFlag = 1 242 | if (self.CV_realtimeFlag2 == 0): 243 | self.CV_realtimeFlag2 = 1 244 | self.CV_realtime.daemon = True 245 | self.CV_realtime.start() 246 | 247 | self.pushButton_27.setEnabled( False ) 248 | self.pushButton_28.setEnabled( True ) 249 | 250 | def stop_cvRealtime(self): 251 | self.CV_realtime.classficication = False 252 | self.pushButton_27.setEnabled( True ) 253 | self.pushButton_28.setEnabled( False ) 254 | self.CV_realtime.frame_to_predict = None 255 | self.CV_realtimeFlag = None 256 | # self.CV_realtime = CV_realtime.MyThread() 257 | # self.capture.release() 258 | # cv2.destroyAllWindows() 259 | 260 | def update_frame(self): 261 | if not self.q.empty(): 262 | # self.startButton.setText( 'Camera is live' ) 263 | frame = self.q.get() 264 | img = frame["img"] 265 | 266 | img_height, img_width, img_colors = img.shape 267 | scale_w = float( self.window_width ) / float( img_width ) 268 | scale_h = float( self.window_height ) / float( img_height ) 269 | scale = min( [scale_w, scale_h] ) 270 | 271 | if scale == 0: 272 | scale = 1 273 | 274 | img = cv2.resize( img, None, fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC ) 275 | img = cv2.cvtColor( img, cv2.COLOR_BGR2RGB ) 276 | height, width, bpc = img.shape 277 | bpl = bpc * width 278 | image = QtGui.QImage( img.data, width, height, bpl, QtGui.QImage.Format_RGB888 ) 279 | self.ImgWidget.setImage( image ) 280 | 281 | def close_camera(self, event): 282 | # global running 283 | self.ImgWidget.setHidden( True ) 284 | self.running = False 285 | self.startButton.setText( 'Start Video' ) 286 | self.startButton.setEnabled( True ) 287 | self.pushButton_26.setEnabled( False ) 288 | self.CV_realtime.frame_to_predict = None 289 | # cv2.destroyAllWindows() 290 | def ReadEMG(self): 291 | while (True): 292 | #time.sleep(0.05) 293 | if self.Real.myo_device.services.waitForNotifications( 1 ): 294 | continue 295 | break 296 | def start_thread0(self):## Graph0 297 | self.Real.EMG = np.empty( [0, 8] ) 298 | self.event_stop_thread0.clear() 299 | #threading.Thread( target=lambda: self.ReadEMG() ).start() 300 | #self.flag_thread0 = True 301 | self.thread0 = threading.Thread(target = self.loop0) 302 | self.thread0.daemon = True 303 | self.thread0.start() 304 | 305 | def start_thread1(self):##Graph1 306 | self.Real.EMG = np.empty( [0, 8] ) 307 | self.Real.emg_total = np.empty( [0, 8] ) 308 | #self.flag_thread1 = True 309 | self.event_stop_thread1.clear() 310 | self.thread1 = threading.Thread(target = self.loop1) 311 | self.thread1.daemon = True 312 | self.thread1.start() 313 | def start_thread2(self):##Predict 314 | self.Real.EMG = np.empty( [0, 8] ) 315 | self.Real.emg_total = np.empty( [0, 8] ) 316 | self.cv.q.queue.clear() 317 | #threading.Thread( target=self.ReadEMG() ).start() 318 | #self.flag_thread2 = True 319 | self.event_stop_thread2.clear() 320 | self.thread2 = threading.Thread(target = self.loop2) 321 | self.thread2.start() 322 | 323 | def start_thread4(self): ##System 324 | self.Real.EMG = np.empty( [0, 8] ) 325 | self.Real.emg_total = np.empty( [0, 8] ) 326 | self.cv.q.queue.clear() 327 | #self.flag_thread4 = True 328 | self.event_stop_thread4.clear() 329 | self.thread4 = threading.Thread( target=self.loop4 ) 330 | self.thread4.start() 331 | def start_thread5(self): ## Online_System 332 | self.Real.EMG = np.empty( [0, 8] ) 333 | self.Real.emg_total = np.empty( [0, 8] ) 334 | self.CV_realtime.q.queue.clear() 335 | self.CV_realtime.stage = 0 336 | self.CV_realtime.corrections = 0 337 | self.CV_realtime.grasp1 = None 338 | self.CV_realtimeFlag = 1 339 | if (self.CV_realtimeFlag2 == 0): 340 | self.CV_realtimeFlag2 = 1 341 | self.CV_realtime.daemon = True 342 | self.CV_realtime.start() 343 | elif (self.CV_realtimeFlag2 == 1): 344 | pass 345 | #self.flag_thread4 = True 346 | self.event_stop_thread5.clear() 347 | self.thread5 = threading.Thread( target=self.loop5 ) 348 | self.thread5.start() 349 | 350 | def loop0(self): 351 | while not self.event_stop_thread0.is_set(): 352 | time.sleep( 0.5) 353 | self.update_Graph0() 354 | if self.Real.myo_device.services.waitForNotifications( 1 ): 355 | continue 356 | 357 | def loop1(self):#Graph1 358 | while not self.event_stop_thread1.is_set(): 359 | self.update_Graph1() 360 | if self.Real.myo_device.services.waitForNotifications( 1 ): 361 | continue 362 | time.sleep( 0.5 ) 363 | 364 | 365 | def loop2(self):## Predict 366 | while not self.event_stop_thread2.is_set(): 367 | if self.Real.myo_device.services.waitForNotifications( 1 ): 368 | #continue 369 | c = self.Real.predict( path=self.path7 ) 370 | if c.size ==1: 371 | self.cv.q.put( int( c ) ) 372 | print (self.cv.q.queue) 373 | self.someFunctionCalledFromAnotherThread2( int( c ) ) 374 | #time.sleep( 0.01 ) 375 | 376 | def loop4(self): ##System 377 | while not self.event_stop_thread4.is_set(): 378 | if self.Real.myo_device.services.waitForNotifications( 1 ): 379 | c = self.Real.predict( path=self.path8 ) 380 | if c.size ==1 : 381 | self.cv.q.put( int( c ) ) 382 | print (self.cv.q.queue) 383 | self.cv.Main_algorithm( path1=self.path9 ) 384 | #time.sleep( 0.01 ) 385 | 386 | def loop5(self): ##Online_System 387 | while not self.event_stop_thread5.is_set(): 388 | if self.Real.myo_device.services.waitForNotifications( 1 ): 389 | c = self.Real.predict( path=self.path10 ) 390 | if c.size == 1: 391 | self.CV_realtime.q.put( int( c ) ) 392 | print( self.cv.q.queue ) 393 | self.CV_realtime.Main_algorithm() 394 | if self.CV_realtime.final is not None: 395 | self.someFunctionCalledFromAnotherThread( self.CV_realtime.final ) 396 | # time.sleep( 0.01 ) 397 | def someFunctionCalledFromAnotherThread(self,grasp): 398 | if grasp == 1: 399 | thread = LoadImageThread( file="screenshots/pinch.png", w=204, h=165 ) 400 | self.connect( thread, QtCore.SIGNAL( "showImage(QString, int, int)" ), self.showImage ) 401 | thread.start() 402 | elif grasp == 2: 403 | thread = LoadImageThread( file="screenshots/palmar_neutral.png", w=238, h=158 ) 404 | self.connect( thread, QtCore.SIGNAL( "showImage(QString, int, int)" ), self.showImage ) 405 | thread.start() 406 | elif grasp == 3: 407 | thread = LoadImageThread( file="screenshots/tripod.png", w=242, h=162 ) 408 | self.connect( thread, QtCore.SIGNAL( "showImage(QString, int, int)" ), self.showImage ) 409 | thread.start() 410 | elif grasp == 4: 411 | thread = LoadImageThread( file="screenshots/palmar_pronated.png", w=219, h=165 ) 412 | self.connect( thread, QtCore.SIGNAL( "showImage(QString, int, int)" ), self.showImage) 413 | thread.start() 414 | 415 | def showImage(self, filename, w, h): 416 | pixmap = QtGui.QPixmap( filename ).scaled( w, h ) 417 | self.label_13.setPixmap( pixmap ) 418 | self.label_13.repaint() 419 | def someFunctionCalledFromAnotherThread2(self,EMG_class): 420 | if EMG_class == 1: 421 | thread = LoadImageThread2( file="screenshots/finger_spread.png", w=278, h=299 ) 422 | self.connect( thread, QtCore.SIGNAL( "showImage2(QString, int, int)" ), self.showImage2 ) 423 | thread.start() 424 | elif EMG_class == 2: 425 | thread = LoadImageThread2( file="screenshots/wrist_extension.png", w=348, h=302 ) 426 | self.connect( thread, QtCore.SIGNAL( "showImage2(QString, int, int)" ), self.showImage2 ) 427 | thread.start() 428 | elif EMG_class == 3: 429 | thread = LoadImageThread2( file="screenshots/wrist_ulnar_deviation.png", w=283, h=254) 430 | self.connect( thread, QtCore.SIGNAL( "showImage2(QString, int, int)" ), self.showImage2 ) 431 | thread.start() 432 | elif EMG_class == 0: 433 | thread = LoadImageThread2( file="screenshots/rest.png", w=353, h=254 ) 434 | self.connect( thread, QtCore.SIGNAL( "showImage2(QString, int, int)" ), self.showImage2) 435 | thread.start() 436 | 437 | def showImage2(self, filename, w, h): 438 | pixmap = QtGui.QPixmap( filename ).scaled( w, h ) 439 | self.label_15.setPixmap( pixmap ) 440 | self.label_15.repaint() 441 | 442 | def stop_thread0(self): 443 | self.event_stop_thread0.set() 444 | self.thread0.join() 445 | self.thread0 = None 446 | self.Real.EMG = np.empty( [0, 8] ) 447 | 448 | def stop_thread1(self): 449 | self.event_stop_thread1.set() 450 | self.thread1.join() 451 | self.thread1 = None 452 | self.Real.EMG = np.empty( [0, 8] ) 453 | 454 | 455 | def stop_thread2(self): 456 | self.event_stop_thread2.set() 457 | self.thread2.join() 458 | self.thread2 = None 459 | self.Real.EMG = np.empty( [0, 8] ) 460 | #self.Real.Flag_Graph= False 461 | def stop_thread3(self): 462 | self.event_stop_thread3.set() 463 | self.thread3.join() 464 | self.thread3 = None 465 | self.Real.EMG = np.empty( [0, 8] ) 466 | #self.Real.Flag_Graph0= False 467 | def stop_thread4(self): ## System 468 | self.event_stop_thread4.set() 469 | self.thread4.join() 470 | self.thread4 = None 471 | self.Real.EMG = np.empty( [0, 8] ) 472 | self.Real.emg_total = np.empty( [0, 8] ) 473 | self.cv.q.queue.clear() 474 | self.c = np.array( [] ) 475 | print( ("Thread Of System Closed ") ) 476 | 477 | def stop_thread5(self): ##Online_System 478 | self.event_stop_thread5.set() 479 | self.thread5.join() 480 | self.thread5 = None 481 | self.Real.emg_total = np.empty( [0, 8] ) 482 | self.Real.EMG = np.empty( [0, 8] ) 483 | self.CV_realtime.q.queue.clear() 484 | print( ("Thread Of Online System is Closed ") ) 485 | 486 | def clear_textBrowser(self): 487 | self.textBrowser.clear() 488 | 489 | def disconnect_MYO(self): 490 | print ("attempting to Disconnect") 491 | self.Real.myo_device.services.vibrate( 1 ) # short vibration 492 | #btle.Peripheral.disconnect() 493 | self.Real.myo_device.services.disconnect_MYO() 494 | print ("Successfully Disconnected") 495 | 496 | 497 | def update_Graph0(self): 498 | 499 | for i in range( 8 ): 500 | self.emgcurve0[i].plot(pen=(i, 10)).setData( self.Real.EMG[:,i] ) 501 | 502 | #self.EMG1.plot(self.Real.b[:,0], pen=pen1,clear=True) 503 | #self.EMG2.plot(self.Real.b[:,1], pen=pen2, clear=True) 504 | #app.processEvents() 505 | if self.Real.EMG.shape[0] >=150 : 506 | self.Real.EMG = np.delete(self.Real.EMG,slice(0,20), axis=0) 507 | 508 | 509 | 510 | def update_Graph1(self): 511 | 512 | for i in range( 8 ): 513 | self.emgcurve[i].setData( self.Real.EMG[:,i] ) 514 | app.processEvents() 515 | 516 | if self.Real.EMG.shape[0] % 5 ==0 : 517 | self.Real.EMG = np.delete(self.Real.EMG,[0], axis=0) 518 | 519 | def file_save_csv(self): 520 | 521 | self.path = QtGui.QFileDialog.getSaveFileName( self, 'Save Point', "", '*.csv' ) 522 | print (" Path = %s" % self.path) 523 | self.records = int( self.lineEdit.text() ) 524 | self.Real.EMG = np.empty( [0, 8] ) 525 | self.Real.emg_total = np.empty( [0, 8] ) 526 | self.event_stop_thread3 = threading.Event() 527 | self.event_stop_thread3.clear() 528 | self.thread3 = threading.Thread( target=self.save_loop) 529 | self.thread3.start() 530 | 531 | def save_loop(self): 532 | while self.Real.EMG.shape[0] < self.records: 533 | print (self.Real.EMG.shape[0]) 534 | if self.Real.myo_device.services.waitForNotifications( 1 ): 535 | continue 536 | 537 | np.savetxt( str( self.path ) + ".csv", self.Real.EMG, delimiter=",", fmt='%10.5f' ) 538 | self.Real.EMG = np.empty( [0, 8] ) 539 | print ("saved Sucessfully at %s" % self.path) 540 | self.thread3 = None 541 | 542 | def browseCSVEMGModel1(self): 543 | 544 | filepath = QtGui.QFileDialog.getOpenFileName( self, 'Single File', "", '*.csv' ) 545 | self.lineEdit_2.setText( filepath ) 546 | self.path1 = str( filepath ) 547 | print (" Path = %s" % self.path1) 548 | # self.records = int( self.lineEdit.text() ) 549 | 550 | def browseCSVEMGModel2(self): 551 | 552 | filepath = QtGui.QFileDialog.getOpenFileName( self, 'Single File', "", '*.csv' ) 553 | self.lineEdit_6.setText( filepath ) 554 | self.path2 = str( filepath ) 555 | print (" Path = %s" % self.path2) 556 | 557 | def browseCSVEMGModel3(self): 558 | filepath = QtGui.QFileDialog.getOpenFileName( self, 'Single File', "", '*.csv' ) 559 | self.lineEdit_7.setText( filepath ) 560 | self.path3 = str( filepath ) 561 | print (" Path = %s" % self.path3) 562 | 563 | def browseCSVEMGModel4(self): 564 | 565 | filepath = QtGui.QFileDialog.getOpenFileName( self, 'Single File', "", '*.csv' ) 566 | self.lineEdit_8.setText( filepath ) 567 | self.path4 = str( filepath ) 568 | print (" Path = %s" % self.path4) 569 | 570 | def saveEMGModel(self): 571 | if not self.path1 == None and not self.path2 == None and not self.path3 == None and not self.path4 == None: 572 | filepath = QtGui.QFileDialog.getSaveFileName( self, 'Save Point', "", '*.pickle' ) 573 | filepath = filepath +".pickle" 574 | print ((" path is = %s" % str(filepath))) 575 | self.EMG_Modeling.all_steps( path1=self.path1, path2=self.path2, path3=self.path3, path4=self.path4, 576 | file_name=str( filepath ) ) 577 | print (" Saved SuccessFully at = %s" % filepath) 578 | 579 | def joinCSV1(self): 580 | 581 | self.path5 = QtGui.QFileDialog.getOpenFileName( self, 'Single File', "", '*.csv' ) 582 | self.lineEdit_9.setText( self.path5 ) 583 | print (" Path = %s" % self.path5) 584 | 585 | def joinCSV2(self): 586 | 587 | self.path6 = QtGui.QFileDialog.getOpenFileName( self, 'Single File', "", '*.csv' ) 588 | self.lineEdit_10.setText( self.path6 ) 589 | print (" Path = %s" % self.path6) 590 | 591 | def saveJoinCSV(self): 592 | if not self.path5 == None and not self.path6 == None: 593 | filepath = QtGui.QFileDialog.getSaveFileName( self, 'Save Point', "", '*.csv' ) 594 | 595 | a = pd.read_csv( str( self.path5 ), header=None, index_col=False ) 596 | b = pd.read_csv( str( self.path6 ), header=None, index_col=False ) 597 | c = pd.concat( [a, b] ) 598 | c.to_csv( str( filepath ) + ".csv", index=False, header=None ) 599 | print (" Saved SuccessFully at = %s" % filepath) 600 | 601 | def browsePickleEMGModel1(self): 602 | filepath = QtGui.QFileDialog.getOpenFileName( self, 'Single File', "", '*.pickle' ) 603 | self.lineEdit_3.setText( filepath ) 604 | self.path7 = str( filepath ) 605 | print (" Path = %s" % self.path7) 606 | 607 | def browsePickleEMGModel2(self): 608 | filepath = QtGui.QFileDialog.getOpenFileName( self, 'Single File', "", '*.pickle' ) 609 | self.lineEdit_4.setText( filepath ) 610 | self.path8 = str( filepath ) 611 | print (" Path = %s" % self.path8) 612 | 613 | def browseCVModel(self): 614 | filepath = QtGui.QFileDialog.getOpenFileName( self, 'Single File', "", '*.h5' ) 615 | self.lineEdit_5.setText( filepath ) 616 | self.path9 = str( filepath ) 617 | print (" Path = %s" % self.path9) 618 | def browsePickleEMGModel3(self): 619 | filepath = QtGui.QFileDialog.getOpenFileName( self, 'Single File', "", '*.pickle') 620 | self.lineEdit_11.setText( filepath) 621 | self.path10 = str( filepath ) 622 | print((" Path = %s" % self.path10)) 623 | 624 | 625 | 626 | 627 | if __name__ == '__main__': 628 | import sys 629 | from PyQt4 import QtGui 630 | import numpy as np 631 | 632 | app = QtGui.QApplication(sys.argv) 633 | main = Main() 634 | main.setWindowIcon( QtGui.QIcon( 'screenshots/x.png' ) ) 635 | main.show() 636 | sys.exit(app.exec_()) 637 | 638 | 639 | 640 | -------------------------------------------------------------------------------- /GP.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 1279 10 | 669 11 | 12 | 13 | 14 | MYO EMG 15 | 16 | 17 | 18 | :/photos/thalmic.ico:/photos/thalmic.ico 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 1 30 | 31 | 32 | true 33 | 34 | 35 | 36 | Home 37 | 38 | 39 | Related To EMG 40 | 41 | 42 | 43 | 44 | 0 45 | -10 46 | 941 47 | 391 48 | 49 | 50 | 51 | QFrame::StyledPanel 52 | 53 | 54 | QFrame::Raised 55 | 56 | 57 | 58 | 59 | 60 | QFrame::NoFrame 61 | 62 | 63 | QFrame::Plain 64 | 65 | 66 | 67 | 0 68 | 69 | 70 | 71 | 72 | EMG 1 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | EMG 5 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | QFrame::NoFrame 96 | 97 | 98 | QFrame::Plain 99 | 100 | 101 | 102 | 0 103 | 104 | 105 | 106 | 107 | EMG 2 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | EMG 6 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | QFrame::NoFrame 131 | 132 | 133 | QFrame::Plain 134 | 135 | 136 | 137 | 0 138 | 139 | 140 | 141 | 142 | EMG 3 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | EMG 7 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | QFrame::NoFrame 166 | 167 | 168 | QFrame::Plain 169 | 170 | 171 | 172 | 0 173 | 174 | 175 | 176 | 177 | EMG 4 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | EMG 8 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 40 203 | 390 204 | 75 205 | 23 206 | 207 | 208 | 209 | Start Plot 210 | 211 | 212 | 213 | 214 | 215 | 180 216 | 390 217 | 75 218 | 23 219 | 220 | 221 | 222 | Stop Plot 223 | 224 | 225 | 226 | 227 | 228 | EMG_Plot 229 | 230 | 231 | 232 | 233 | 10 234 | 10 235 | 811 236 | 421 237 | 238 | 239 | 240 | EMG Plot 241 | 242 | 243 | 244 | 245 | 246 | 830 247 | 160 248 | 101 249 | 31 250 | 251 | 252 | 253 | Start Plot 254 | 255 | 256 | 257 | 258 | 259 | 830 260 | 230 261 | 101 262 | 31 263 | 264 | 265 | 266 | Stop Plot 267 | 268 | 269 | 270 | 271 | 272 | EMG_Training 273 | 274 | 275 | 276 | 277 | 160 278 | 70 279 | 101 280 | 31 281 | 282 | 283 | 284 | Save (.csv) 285 | 286 | 287 | 288 | 289 | 290 | 10 291 | 70 292 | 113 293 | 33 294 | 295 | 296 | 297 | 298 | 299 | 300 | 10 301 | 10 302 | 321 303 | 21 304 | 305 | 306 | 307 | Number of Samples you want to save in .cvs 308 | 309 | 310 | 311 | 312 | 313 | 720 314 | 320 315 | 151 316 | 31 317 | 318 | 319 | 320 | Save Model (.pickle) 321 | 322 | 323 | 324 | 325 | 326 | 350 327 | 20 328 | 321 329 | 31 330 | 331 | 332 | 333 | To Produce a model you should browse for 4 different .csv Files 334 | 335 | 336 | 337 | 338 | 339 | 850 340 | 80 341 | 75 342 | 23 343 | 344 | 345 | 346 | Browse(.csv) 347 | 348 | 349 | 350 | 351 | 352 | 850 353 | 130 354 | 75 355 | 23 356 | 357 | 358 | 359 | Browse(.csv) 360 | 361 | 362 | 363 | 364 | 365 | 850 366 | 180 367 | 75 368 | 23 369 | 370 | 371 | 372 | Browse(.csv) 373 | 374 | 375 | 376 | 377 | 378 | 850 379 | 230 380 | 75 381 | 23 382 | 383 | 384 | 385 | Browse(.csv) 386 | 387 | 388 | 389 | 390 | 391 | 280 392 | 10 393 | 20 394 | 361 395 | 396 | 397 | 398 | Qt::Vertical 399 | 400 | 401 | 402 | 403 | 404 | 310 405 | 80 406 | 511 407 | 20 408 | 409 | 410 | 411 | 412 | 413 | 414 | 310 415 | 130 416 | 511 417 | 20 418 | 419 | 420 | 421 | 422 | 423 | 424 | 310 425 | 180 426 | 511 427 | 20 428 | 429 | 430 | 431 | 432 | 433 | 434 | 310 435 | 230 436 | 511 437 | 20 438 | 439 | 440 | 441 | 442 | 443 | 444 | 20 445 | 130 446 | 241 447 | 20 448 | 449 | 450 | 451 | Qt::Horizontal 452 | 453 | 454 | 455 | 456 | 457 | 20 458 | 150 459 | 211 460 | 20 461 | 462 | 463 | 464 | To join .csv Files 465 | 466 | 467 | 468 | 469 | 470 | 210 471 | 220 472 | 75 473 | 23 474 | 475 | 476 | 477 | Browse (.csv) 478 | 479 | 480 | 481 | 482 | 483 | 210 484 | 300 485 | 75 486 | 23 487 | 488 | 489 | 490 | Browse (.csv) 491 | 492 | 493 | 494 | 495 | 496 | 110 497 | 340 498 | 75 499 | 23 500 | 501 | 502 | 503 | Save (.csv) 504 | 505 | 506 | 507 | 508 | 509 | 10 510 | 190 511 | 271 512 | 20 513 | 514 | 515 | 516 | 517 | 518 | 519 | 10 520 | 270 521 | 271 522 | 20 523 | 524 | 525 | 526 | 527 | 528 | 529 | EMG_Testing 530 | 531 | 532 | 533 | 534 | 580 535 | 170 536 | 191 537 | 23 538 | 539 | 540 | 541 | Start Prediction 542 | 543 | 544 | 545 | 546 | 547 | 580 548 | 220 549 | 191 550 | 23 551 | 552 | 553 | 554 | Stop Prediction 555 | 556 | 557 | 558 | 559 | 560 | 580 561 | 120 562 | 191 563 | 23 564 | 565 | 566 | 567 | Browse Model (.pickle) 568 | 569 | 570 | 571 | 572 | 573 | 10 574 | 120 575 | 531 576 | 20 577 | 578 | 579 | 580 | 581 | 582 | 583 | 30 584 | 20 585 | 391 586 | 16 587 | 588 | 589 | 590 | You must Choose Model (.pickle) 591 | 592 | 593 | 594 | 595 | 596 | 30 597 | 212 598 | 491 599 | 271 600 | 601 | 602 | 603 | 604 | 605 | 606 | 607 | 608 | 609 | System 610 | 611 | 612 | 613 | 614 | 580 615 | 90 616 | 221 617 | 23 618 | 619 | 620 | 621 | Browse EMG Model 622 | 623 | 624 | 625 | 626 | 627 | 580 628 | 190 629 | 221 630 | 23 631 | 632 | 633 | 634 | Browse Computer Vision Model 635 | 636 | 637 | 638 | 639 | 640 | 20 641 | 90 642 | 541 643 | 20 644 | 645 | 646 | 647 | 648 | 649 | 650 | 20 651 | 190 652 | 541 653 | 20 654 | 655 | 656 | 657 | 658 | 659 | 660 | 410 661 | 310 662 | 181 663 | 23 664 | 665 | 666 | 667 | Run System 668 | 669 | 670 | 671 | 672 | 673 | 610 674 | 310 675 | 171 676 | 23 677 | 678 | 679 | 680 | Stop System 681 | 682 | 683 | 684 | 685 | 686 | Online System 687 | 688 | 689 | 690 | 691 | 730 692 | 10 693 | 111 694 | 41 695 | 696 | 697 | 698 | Start video 699 | 700 | 701 | 702 | 703 | 704 | 0 705 | 0 706 | 721 707 | 531 708 | 709 | 710 | 711 | 712 | 0 713 | 0 714 | 715 | 716 | 717 | Video 718 | 719 | 720 | 721 | 722 | 10 723 | 20 724 | 701 725 | 501 726 | 727 | 728 | 729 | 730 | 731 | 732 | 733 | 870 734 | 10 735 | 111 736 | 41 737 | 738 | 739 | 740 | Stop Video 741 | 742 | 743 | 744 | 745 | 746 | 730 747 | 60 748 | 111 749 | 41 750 | 751 | 752 | 753 | Start Prediction 754 | 755 | 756 | 757 | 758 | 759 | 870 760 | 60 761 | 111 762 | 41 763 | 764 | 765 | 766 | Stop Prediction 767 | 768 | 769 | 770 | 771 | 772 | 730 773 | 110 774 | 251 775 | 23 776 | 777 | 778 | 779 | Browse EMG Model (.pickle) 780 | 781 | 782 | 783 | 784 | 785 | 730 786 | 150 787 | 251 788 | 20 789 | 790 | 791 | 792 | 793 | 794 | 795 | 730 796 | 180 797 | 111 798 | 41 799 | 800 | 801 | 802 | Start System 803 | 804 | 805 | 806 | 807 | 808 | 870 809 | 180 810 | 111 811 | 41 812 | 813 | 814 | 815 | Stop System 816 | 817 | 818 | 819 | 820 | 821 | 790 822 | 230 823 | 111 824 | 20 825 | 826 | 827 | 828 | Primary Camera 829 | 830 | 831 | 832 | 833 | 834 | 790 835 | 260 836 | 121 837 | 20 838 | 839 | 840 | 841 | Secondary Camera 842 | 843 | 844 | 845 | 846 | 847 | 790 848 | 290 849 | 111 850 | 17 851 | 852 | 853 | 854 | Show Recognition 855 | 856 | 857 | 858 | 859 | 860 | 740 861 | 320 862 | 241 863 | 191 864 | 865 | 866 | 867 | 868 | 869 | 870 | 871 | 872 | 873 | 874 | 875 | 876 | 877 | 250 878 | 3000 879 | 880 | 881 | 882 | 883 | 884 | 885 | 886 | 887 | 888 | QFrame::StyledPanel 889 | 890 | 891 | QFrame::Raised 892 | 893 | 894 | 895 | 896 | 897 | Connect MYO 898 | 899 | 900 | 901 | 902 | 903 | 904 | Disconnect MYO 905 | 906 | 907 | 908 | 909 | 910 | 911 | Power OFF MYO 912 | 913 | 914 | 915 | 916 | 917 | 918 | Clear Console 919 | 920 | 921 | 922 | 923 | 924 | 925 | Close 926 | 927 | 928 | 929 | 930 | 931 | 932 | 933 | 934 | 935 | 936 | 937 | 938 | 939 | 0 940 | 0 941 | 1279 942 | 21 943 | 944 | 945 | 946 | 947 | File 948 | 949 | 950 | 951 | 952 | 953 | 954 | 955 | 956 | Help 957 | 958 | 959 | 960 | 961 | 962 | 963 | 964 | 965 | 966 | Lolo 967 | 968 | 969 | 970 | 971 | Zozo 972 | 973 | 974 | 975 | 976 | Close 977 | 978 | 979 | 980 | 981 | About 982 | 983 | 984 | 985 | 986 | 987 | PlotWidget 988 | QGraphicsView 989 |
pyqtgraph
990 |
991 |
992 | 993 | 994 |
995 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # This Project is Created on Raspberry-Pi Using MYO ArmBand with Open-Myo 2 | Python module to get data from a Myo armband using a generic Bluetooth LE interface. 3 | 4 | ## Installation and usage of [Open-MYO](https://github.com/Alvipe/Open-Myo) 5 | 6 | This module works with generic Bluetooth LE antennas (e.g. CSR V4.0, Cambridge Silicon Radio or the Bluetooth interface integrated in the Raspberry Pi 3/Raspberry Pi Zero W). This module does not work with the Bluetooth antenna included with the Myo armband because it uses a propietary protocol from Bluegiga. 7 | 8 | This module requires the [bluepy](https://github.com/IanHarvey/bluepy) Python module to work. To install it, run: 9 | 10 | ``$ sudo pip install bluepy`` 11 | 12 | All code using the bluepy module must run with root permissions. To run the example code, execute: 13 | 14 | ``$ sudo python GP.py`` 15 | 16 | The Open Myo module **only works on Linux**, as the bluepy module is only available for Linux. 17 | 18 | ## Installation PyQt4 19 | ```cmd 20 | $ sudo apt update 21 | $ sudo apt install pyqt4-dev-tools 22 | ( to install PyQt4 for Python3 $ sudo apt-get install python3-pyqt4 )) 23 | ``` 24 | ### YouTube Video 25 | [Video](https://youtu.be/xmgbvkJEDqI) 26 | 27 | ### Screenshots 28 | EMG Plot Graph1: 29 | ![GitHub Logo](https://github.com/hananabilabd/EMG-Classification-Visualization-using-MYO-ArmBand-Raspberry-Pi/blob/master/screenshots/1.PNG) 30 | 31 | EMG Plot Graph2: 32 | ![](https://github.com/hananabilabd/EMG-Classification-Visualization-using-MYO-ArmBand-Raspberry-Pi/blob/master/screenshots/2.PNG) 33 | 34 | EMG Training: 35 | ![alt text](https://github.com/hananabilabd/EMG-Classification-Visualization-using-MYO-ArmBand-Raspberry-Pi/blob/master/screenshots/3.PNG) 36 | 37 | EMG Testing: 38 | ![alt text](https://github.com/hananabilabd/EMG-Classification-Visualization-using-MYO-ArmBand-Raspberry-Pi/blob/master/screenshots/4.PNG) 39 | 40 | Computer Vision & EMG system Test Semi Real-Time System: 41 | ![alt text](https://github.com/hananabilabd/EMG-Classification-Visualization-using-MYO-ArmBand-Raspberry-Pi/blob/master/screenshots/5.PNG) 42 | 43 | Computer Vision & EMG system Real-Time System: 44 | ![alt text](https://github.com/hananabilabd/EMG-Classification-Visualization-using-MYO-ArmBand-Raspberry-Pi/blob/master/screenshots/6.PNG) 45 | -------------------------------------------------------------------------------- /open_myo.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import struct 3 | import time 4 | from enum import Enum 5 | from bluepy import btle 6 | 7 | class Services(btle.Peripheral): 8 | # Bluepy's Peripheral class encapsulates a connection to a Bluetooth LE peripheral 9 | def __init__(self, mac): 10 | btle.Peripheral.__init__(self, mac) 11 | time.sleep(0.5) 12 | 13 | # Get the firmware version 14 | def firmware(self): 15 | hex_fw = self.readCharacteristic(ReadHandle.FIRMWARE) 16 | fw = struct.unpack('<4h', hex_fw) 17 | return fw 18 | 19 | def disconnect_MYO(self): 20 | self.disconnect() 21 | 22 | # Get the battery level 23 | def battery(self): 24 | hex_batt = self.readCharacteristic(ReadHandle.BATTERY) 25 | batt = ord(hex_batt) 26 | return batt 27 | 28 | # Change the color of the logo and bar LEDs 29 | def set_leds(self, logo, line): 30 | self.writeCharacteristic(WriteHandle.COMMAND, struct.pack('<8B', 6, 6, *(logo + line))) 31 | 32 | def vibrate(self, length): 33 | if length in range(1, 4): 34 | self.writeCharacteristic(WriteHandle.COMMAND, struct.pack('<3B', 3, 1, length)) 35 | 36 | def sleep_mode(self, mode): 37 | self.writeCharacteristic(WriteHandle.COMMAND, struct.pack('<3B', 9, 1, mode)) 38 | 39 | def power_off(self): 40 | self.writeCharacteristic(WriteHandle.COMMAND, b'\x04\x00') 41 | 42 | # Suscribe to battery notifications 43 | def battery_notifications(self): 44 | self.writeCharacteristic(WriteHandle.BATTERY, b'\x01\x10') 45 | 46 | # Suscribe to raw EMG notifications 47 | def emg_raw_notifications(self): 48 | self.writeCharacteristic(WriteHandle.EMG0, b'\x01\x00') 49 | self.writeCharacteristic(WriteHandle.EMG1, b'\x01\x00') 50 | self.writeCharacteristic(WriteHandle.EMG2, b'\x01\x00') 51 | self.writeCharacteristic(WriteHandle.EMG3, b'\x01\x00') 52 | 53 | # Suscribe to filtered EMG notifications 54 | def emg_filt_notifications(self): 55 | self.writeCharacteristic(WriteHandle.EMG_FILT, b'\x01\x00') 56 | 57 | # Suscribe to IMU notifications 58 | def imu_notifications(self): 59 | self.writeCharacteristic(WriteHandle.IMU, b'\x01\x00') 60 | 61 | # Suscribe to classifier notifications 62 | def classifier_notifications(self): 63 | self.writeCharacteristic(WriteHandle.CLASSIFIER, b'\x02\x00') 64 | 65 | def set_mode(self, emg_mode, imu_mode, classifier_mode): 66 | command_string = struct.pack('<5B', 1, 3, emg_mode, imu_mode, classifier_mode) 67 | self.writeCharacteristic(WriteHandle.COMMAND, command_string) 68 | 69 | class Device(btle.DefaultDelegate): 70 | # bluepy functions which receive Bluetooth messages asynchronously, 71 | # such as notifications, indications, and advertising data 72 | def __init__(self, mac=None): 73 | btle.DefaultDelegate.__init__(self) 74 | self.services = Services(mac=get_myo(mac)) 75 | self.services.setDelegate(self) 76 | 77 | self.emg_event_handlers = [] 78 | self.imu_event_handlers = [] 79 | self.sync_event_handlers = [] 80 | self.classifier_event_handlers = [] 81 | self.battery_event_handlers = [] 82 | 83 | def handleNotification(self, cHandle, data): 84 | # Notification handles of the 4 EMG data characteristics (raw) 85 | if cHandle == ReadHandle.EMG0 or cHandle == ReadHandle.EMG1 or cHandle == ReadHandle.EMG2 or cHandle == ReadHandle.EMG3: 86 | '''According to http://developerblog.myo.com/myocraft-emg-in-the-bluetooth-protocol/ 87 | each characteristic sends two secuential readings in each update, 88 | so the received payload is split in two samples. According to the 89 | Myo BLE specification, the data type of the EMG samples is int8_t. 90 | ''' 91 | emg_raw = [] 92 | emg1 = struct.unpack('<8b', data[:8]) 93 | emg2 = struct.unpack('<8b', data[8:]) 94 | emg_raw.append(emg1) 95 | emg_raw.append(emg2) 96 | self.on_emg(emg_raw) 97 | # Notification handle of the EMG data characteristic (filtered) 98 | elif cHandle == ReadHandle.EMG_FILT: 99 | emg_filt = struct.unpack('<8H', data[:16]) 100 | self.on_emg(emg_filt) 101 | # Notification handle of the IMU data characteristic 102 | elif cHandle == ReadHandle.IMU: 103 | values = struct.unpack('<10h', data) 104 | quat = [x/16384.0 for x in values[:4]] 105 | acc = [x/2048.0 for x in values[4:7]] 106 | gyro = [x/16.0 for x in values[7:10]] 107 | self.on_imu(quat, acc, gyro) 108 | # Notification handle of the classifier data characteristic 109 | elif cHandle == ReadHandle.CLASSIFIER: 110 | event_type, value, x_direction, _, _, _ = struct.unpack('<6B', data) 111 | if event_type == ClassifierEventType.ARM_SYNCED: # on arm 112 | self.on_sync(Arm(value), XDirection(x_direction)) 113 | elif event_type == ClassifierEventType.ARM_UNSYNCED: # removed from arm 114 | self.on_sync(Arm.UNKNOWN, XDirection.UNKNOWN) 115 | elif event_type == ClassifierEventType.POSE: # pose 116 | self.on_classifier(Pose(value)) 117 | elif event_type == ClassifierEventType.SYNC_FAILED: 118 | print("Sync failed, please perform sync gesture.") 119 | # Notification handle of the battery data characteristic 120 | elif cHandle == ReadHandle.BATTERY: 121 | batt = ord(data) 122 | self.on_battery(batt) 123 | else: 124 | print('Data with unknown attr: %02X' % cHandle) 125 | 126 | 127 | def add_emg_event_handler(self, event_handler): 128 | self.emg_event_handlers.append(event_handler) 129 | 130 | def add_imu_event_handler(self, event_handler): 131 | self.imu_event_handlers.append(event_handler) 132 | 133 | def add_sync_event_handler(self, event_handler): 134 | self.sync_event_handlers.append(event_handler) 135 | 136 | def add_classifier_event_hanlder(self, event_handler): 137 | self.classifier_event_handlers.append(event_handler) 138 | 139 | def add_battery_event_handler(self, event_handler): 140 | self.battery_event_handlers.append(event_handler) 141 | 142 | def on_emg(self, emg): 143 | for event_handler in self.emg_event_handlers: 144 | event_handler(emg) 145 | 146 | def on_imu(self, quat, acc, gyro): 147 | for event_handler in self.imu_event_handlers: 148 | event_handler(quat, acc, gyro) 149 | 150 | def on_sync(self, arm, x_direction): 151 | for event_handler in self.sync_event_handlers: 152 | event_handler(arm, x_direction) 153 | 154 | def on_classifier(self, pose): 155 | for event_handler in self.classifier_event_handlers: 156 | event_handler(pose) 157 | 158 | def on_battery(self, batt): 159 | for event_handler in self.battery_event_handlers: 160 | event_handler(batt) 161 | 162 | def get_myo(mac=None): 163 | if mac is not None: 164 | while True: 165 | for i in btle.Scanner(0).scan(1): 166 | if i.addr == mac: 167 | return str(mac).upper() 168 | 169 | while True: 170 | for i in btle.Scanner(0).scan(1): 171 | for j in i.getScanData(): 172 | if j[0] == 6 and j[2] == '4248124a7f2c4847b9de04a9010006d5': 173 | return str(i.addr).upper() 174 | 175 | class ReadHandle: 176 | BATTERY = 0x11 177 | FIRMWARE = 0x17 178 | EMG0 = 0x2b 179 | EMG1 = 0x2e 180 | EMG2 = 0x31 181 | EMG3 = 0x34 182 | EMG_FILT = 0x27 183 | IMU = 0x1c 184 | CLASSIFIER = 0x23 185 | 186 | class WriteHandle: 187 | COMMAND = 0x19 188 | BATTERY = 0x12 189 | EMG0 = 0x2c 190 | EMG1 = 0x2f 191 | EMG2 = 0x32 192 | EMG3 = 0x35 193 | EMG_FILT = 0x28 194 | IMU = 0x1d 195 | CLASSIFIER = 0x24 196 | 197 | class EmgMode: 198 | OFF = 0x00 199 | FILT = 0x01 200 | RAW = 0x02 201 | RAW_UNFILT = 0x03 202 | 203 | class ImuMode: 204 | OFF = 0x00 205 | DATA = 0x01 206 | EVENTS = 0x02 207 | ALL = 0x03 208 | RAW = 0x04 209 | 210 | class ClassifierMode: 211 | OFF = 0x00 212 | ON = 0x01 213 | 214 | class ClassifierEventType: 215 | ARM_SYNCED = 0x01 216 | ARM_UNSYNCED = 0x02 217 | POSE = 0x03 218 | UNLOCKED = 0x04 219 | LOCKED = 0x05 220 | SYNC_FAILED = 0x06 221 | 222 | class Pose(Enum): 223 | REST = 0x00 224 | FIST = 0x01 225 | WAVE_IN = 0x02 226 | WAVE_OUT = 0x03 227 | FINGERS_SPREAD = 0x04 228 | DOUBLE_TAP = 0x05 229 | UNKNOWN = 0xff 230 | 231 | class Arm(Enum): 232 | RIGHT = 0x01 233 | LEFT = 0x02 234 | UNKNOWN = 0xff 235 | 236 | class XDirection(Enum): 237 | WRIST = 0x01 238 | ELBOW = 0x02 239 | UNKNOWN = 0xff 240 | -------------------------------------------------------------------------------- /poweroff.py: -------------------------------------------------------------------------------- 1 | import open_myo as myo 2 | 3 | class poweroff(): 4 | 5 | def power_off(self): 6 | 7 | myo_mac_addr = myo.get_myo() 8 | print("MAC address: %s" % myo_mac_addr) 9 | print ("Attempting to Power Off") 10 | myo_device = myo.Device() 11 | myo_device.services.power_off() 12 | print ("Successfully Powered Off") 13 | -------------------------------------------------------------------------------- /screenshots/1.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/1.PNG -------------------------------------------------------------------------------- /screenshots/2.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/2.PNG -------------------------------------------------------------------------------- /screenshots/3.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/3.PNG -------------------------------------------------------------------------------- /screenshots/4.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/4.PNG -------------------------------------------------------------------------------- /screenshots/5.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/5.PNG -------------------------------------------------------------------------------- /screenshots/6.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/6.PNG -------------------------------------------------------------------------------- /screenshots/finger_spread.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/finger_spread.png -------------------------------------------------------------------------------- /screenshots/palmar_neutral.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/palmar_neutral.png -------------------------------------------------------------------------------- /screenshots/palmar_pronated.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/palmar_pronated.png -------------------------------------------------------------------------------- /screenshots/pinch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/pinch.png -------------------------------------------------------------------------------- /screenshots/rest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/rest.png -------------------------------------------------------------------------------- /screenshots/th.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/th.png -------------------------------------------------------------------------------- /screenshots/tripod.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/tripod.png -------------------------------------------------------------------------------- /screenshots/wrist_extension.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/wrist_extension.png -------------------------------------------------------------------------------- /screenshots/wrist_ulnar_deviation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/wrist_ulnar_deviation.png -------------------------------------------------------------------------------- /screenshots/x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/screenshots/x.png -------------------------------------------------------------------------------- /tools/GP_WOC_95.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/GP_WOC_95.h5 -------------------------------------------------------------------------------- /tools/GP_Weights.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/GP_Weights.h5 -------------------------------------------------------------------------------- /tools/class 1/222_r320.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 1/222_r320.png -------------------------------------------------------------------------------- /tools/class 1/50_r110.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 1/50_r110.png -------------------------------------------------------------------------------- /tools/class 1/728_r320.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 1/728_r320.png -------------------------------------------------------------------------------- /tools/class 1/98_r255.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 1/98_r255.png -------------------------------------------------------------------------------- /tools/class 2/206_r225.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 2/206_r225.png -------------------------------------------------------------------------------- /tools/class 2/277_r355.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 2/277_r355.png -------------------------------------------------------------------------------- /tools/class 2/630_r175.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 2/630_r175.png -------------------------------------------------------------------------------- /tools/class 2/640_r320.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 2/640_r320.png -------------------------------------------------------------------------------- /tools/class 3/100_r125.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 3/100_r125.png -------------------------------------------------------------------------------- /tools/class 3/104_r325.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 3/104_r325.png -------------------------------------------------------------------------------- /tools/class 3/510_r50.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 3/510_r50.png -------------------------------------------------------------------------------- /tools/class 3/511_r125.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 3/511_r125.png -------------------------------------------------------------------------------- /tools/class 4/212_r115.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 4/212_r115.png -------------------------------------------------------------------------------- /tools/class 4/474_r120.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 4/474_r120.png -------------------------------------------------------------------------------- /tools/class 4/710_r175.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 4/710_r175.png -------------------------------------------------------------------------------- /tools/class 4/89_r225.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/class 4/89_r225.png -------------------------------------------------------------------------------- /tools/hannon.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hananabilabd/EMG-Computer-Vision-Classification-using-MYO-ArmBand-Camera-Raspberry-Pi/48ec73465129e3f7f17d92635d49dffa2455e008/tools/hannon.pickle --------------------------------------------------------------------------------