├── .gitattributes ├── .gitignore ├── MicroExpressionDetector.sln └── MicroExpressionDetector ├── MicroExpressionDetector.py ├── MicroExpressionDetector.pyproj ├── __ASM-main__.py ├── __main__.py ├── active_shape_models ├── ActiveShapeModel.py ├── ActiveShapeModel.pyc ├── ApplyASM.py ├── PointMovementCalculator.py ├── ShapeAligner.py ├── ShapeAligner.pyc ├── __init__.py └── __init__.pyc ├── data_mining ├── CASMEData.py ├── ParamTune.py ├── ValPandas.py └── __init__.py ├── data_parser ├── CASMELabels.py ├── CASMEParser.py ├── CASMEParserMem.py └── __init__.py ├── dts.py ├── feature_extraction ├── GaborExtractor.py ├── GaborWindowExtractor.py └── __init__.py ├── helpers ├── FileHelper.py ├── FileHelper.pyc ├── TexTable.py ├── WriteUpHelper.py ├── __init__.py ├── __init__.pyc └── log.py ├── image_processing ├── PreProcessing.py ├── TemplateMatcher.py └── __init__.py ├── legacy ├── mains │ ├── DASMMain.py │ ├── FaceMainPASM.py │ ├── FaceMainPCA.py │ └── applyASMMain.py ├── models │ ├── DeformableASM.py │ └── ParallelASM.py └── other │ ├── ActiveShapeModels.py │ ├── ActiveShapeModelsBetter.py │ ├── ParallelMain.py │ ├── SimpleASM.py │ ├── main.py │ ├── shapeNormTest1.py │ ├── shapeNormalizationTest.py │ ├── shapeNormalizationTestToSelf.py │ └── shapeTestMapper.py ├── mains └── __init__.py ├── point_annotator ├── annotator.py └── pointAnnotator.py ├── shapes ├── ActiveShape.py ├── ActiveShape.pyc ├── Point.py ├── Point.pyc ├── Shape.py ├── Shape.pyc ├── Vector.py ├── Vector.pyc ├── __init__.py └── __init__.pyc └── tests └── script_tests ├── active_shape_models ├── PCA_triangle_test.py ├── face_ASM_test.py ├── face_read_points.py ├── filter_training.py ├── gradient_deform_test.py ├── parallel_pool_rotation_test.py ├── shapeNormTest1.py ├── shapeNormalizationTest.py └── shapeNormalizationTestToSelf.py ├── feature_extraction ├── gabor_dim_OO.py ├── gabor_dimensionality.py ├── video_capture_face_detection_MSER_affine_dictionary.py └── vidtoImg.py ├── image_processing ├── correlation.py ├── template_matching.py ├── template_matching_class.py └── template_matching_parallel.py ├── pipeline ├── CASME_video_structure.py ├── img_eye_detection_debug.py └── simple_video.py └── shapes ├── __init__.py ├── rotation_angle_test.py └── shape_aligment_test.py /.gitattributes: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Set default behavior to automatically normalize line endings. 3 | ############################################################################### 4 | * text=auto 5 | 6 | ############################################################################### 7 | # Set default behavior for command prompt diff. 8 | # 9 | # This is need for earlier builds of msysgit that does not have it on by 10 | # default for csharp files. 11 | # Note: This is only used by command line 12 | ############################################################################### 13 | #*.cs diff=csharp 14 | 15 | ############################################################################### 16 | # Set the merge driver for project and solution files 17 | # 18 | # Merging from the command prompt will add diff markers to the files if there 19 | # are conflicts (Merging from VS is not affected by the settings below, in VS 20 | # the diff markers are never inserted). Diff markers may cause the following 21 | # file extensions to fail to load in VS. An alternative would be to treat 22 | # these files as binary and thus will always conflict and require user 23 | # intervention with every merge. To do so, just uncomment the entries below 24 | ############################################################################### 25 | #*.sln merge=binary 26 | #*.csproj merge=binary 27 | #*.vbproj merge=binary 28 | #*.vcxproj merge=binary 29 | #*.vcproj merge=binary 30 | #*.dbproj merge=binary 31 | #*.fsproj merge=binary 32 | #*.lsproj merge=binary 33 | #*.wixproj merge=binary 34 | #*.modelproj merge=binary 35 | #*.sqlproj merge=binary 36 | #*.wwaproj merge=binary 37 | 38 | ############################################################################### 39 | # behavior for image files 40 | # 41 | # image files are treated as binary by default. 42 | ############################################################################### 43 | #*.jpg binary 44 | #*.png binary 45 | #*.gif binary 46 | 47 | ############################################################################### 48 | # diff behavior for common document formats 49 | # 50 | # Convert binary document formats to text before diffing them. This feature 51 | # is only available from the command line. Turn it on by uncommenting the 52 | # entries below. 53 | ############################################################################### 54 | #*.doc diff=astextplain 55 | #*.DOC diff=astextplain 56 | #*.docx diff=astextplain 57 | #*.DOCX diff=astextplain 58 | #*.dot diff=astextplain 59 | #*.DOT diff=astextplain 60 | #*.pdf diff=astextplain 61 | #*.PDF diff=astextplain 62 | #*.rtf diff=astextplain 63 | #*.RTF diff=astextplain 64 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | 4 | # User-specific files 5 | *.suo 6 | *.user 7 | *.sln.docstates 8 | 9 | # Build results 10 | 11 | [Dd]ebug/ 12 | [Rr]elease/ 13 | x64/ 14 | build/ 15 | [Bb]in/ 16 | [Oo]bj/ 17 | 18 | # Enable "build/" folder in the NuGet Packages folder since NuGet packages use it for MSBuild targets 19 | !packages/*/build/ 20 | 21 | # MSTest test Results 22 | [Tt]est[Rr]esult*/ 23 | [Bb]uild[Ll]og.* 24 | 25 | *_i.c 26 | *_p.c 27 | *.ilk 28 | *.meta 29 | *.obj 30 | *.pch 31 | *.pdb 32 | *.pgc 33 | *.pgd 34 | *.rsp 35 | *.sbr 36 | *.tlb 37 | *.tli 38 | *.tlh 39 | *.tmp 40 | *.tmp_proj 41 | *.log 42 | *.vspscc 43 | *.vssscc 44 | .builds 45 | *.pidb 46 | *.log 47 | *.scc 48 | 49 | # Visual C++ cache files 50 | ipch/ 51 | *.aps 52 | *.ncb 53 | *.opensdf 54 | *.sdf 55 | *.cachefile 56 | 57 | # Visual Studio profiler 58 | *.psess 59 | *.vsp 60 | *.vspx 61 | 62 | # Guidance Automation Toolkit 63 | *.gpState 64 | 65 | # ReSharper is a .NET coding add-in 66 | _ReSharper*/ 67 | *.[Rr]e[Ss]harper 68 | 69 | # TeamCity is a build add-in 70 | _TeamCity* 71 | 72 | # DotCover is a Code Coverage Tool 73 | *.dotCover 74 | 75 | # NCrunch 76 | *.ncrunch* 77 | .*crunch*.local.xml 78 | 79 | # Installshield output folder 80 | [Ee]xpress/ 81 | 82 | # DocProject is a documentation generator add-in 83 | DocProject/buildhelp/ 84 | DocProject/Help/*.HxT 85 | DocProject/Help/*.HxC 86 | DocProject/Help/*.hhc 87 | DocProject/Help/*.hhk 88 | DocProject/Help/*.hhp 89 | DocProject/Help/Html2 90 | DocProject/Help/html 91 | 92 | # Click-Once directory 93 | publish/ 94 | 95 | # Publish Web Output 96 | *.Publish.xml 97 | 98 | # NuGet Packages Directory 99 | ## TODO: If you have NuGet Package Restore enabled, uncomment the next line 100 | #packages/ 101 | 102 | # Windows Azure Build Output 103 | csx 104 | *.build.csdef 105 | 106 | # Windows Store app package directory 107 | AppPackages/ 108 | 109 | # Others 110 | sql/ 111 | *.Cache 112 | ClientBin/ 113 | [Ss]tyle[Cc]op.* 114 | ~$* 115 | *~ 116 | *.dbmdl 117 | *.[Pp]ublish.xml 118 | *.pfx 119 | *.publishsettings 120 | 121 | # RIA/Silverlight projects 122 | Generated_Code/ 123 | 124 | # Backup & report files from converting an old project file to a newer 125 | # Visual Studio version. Backup files are not needed, because we have git ;-) 126 | _UpgradeReport_Files/ 127 | Backup*/ 128 | UpgradeLog*.XML 129 | UpgradeLog*.htm 130 | 131 | # SQL Server files 132 | App_Data/*.mdf 133 | App_Data/*.ldf 134 | 135 | 136 | #LightSwitch generated files 137 | GeneratedArtifacts/ 138 | _Pvt_Extensions/ 139 | ModelManifest.xml 140 | 141 | # ========================= 142 | # Windows detritus 143 | # ========================= 144 | 145 | # Windows image file caches 146 | Thumbs.db 147 | ehthumbs.db 148 | 149 | # Folder config file 150 | Desktop.ini 151 | 152 | # Recycle Bin used on file shares 153 | $RECYCLE.BIN/ 154 | 155 | # Mac desktop service store files 156 | .DS_Store 157 | -------------------------------------------------------------------------------- /MicroExpressionDetector.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 2013 4 | VisualStudioVersion = 12.0.30501.0 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{888888A0-9F3D-457C-B088-3A5042F75D52}") = "MicroExpressionDetector", "MicroExpressionDetector\MicroExpressionDetector.pyproj", "{73F21E65-4874-4521-9817-F5DC14F09649}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|Any CPU = Debug|Any CPU 11 | Release|Any CPU = Release|Any CPU 12 | EndGlobalSection 13 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 14 | {73F21E65-4874-4521-9817-F5DC14F09649}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 15 | {73F21E65-4874-4521-9817-F5DC14F09649}.Release|Any CPU.ActiveCfg = Release|Any CPU 16 | EndGlobalSection 17 | GlobalSection(SolutionProperties) = preSolution 18 | HideSolutionNode = FALSE 19 | EndGlobalSection 20 | EndGlobal 21 | -------------------------------------------------------------------------------- /MicroExpressionDetector/MicroExpressionDetector.py: -------------------------------------------------------------------------------- 1 | print('Hello World') 2 | -------------------------------------------------------------------------------- /MicroExpressionDetector/MicroExpressionDetector.pyproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | Debug 5 | 2.0 6 | 73f21e65-4874-4521-9817-f5dc14f09649 7 | . 8 | __ASM-main__.py 9 | 10 | 11 | . 12 | . 13 | MicroExpressionDetector 14 | MicroExpressionDetector 15 | 16 | 17 | true 18 | false 19 | 20 | 21 | true 22 | false 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | Code 31 | 32 | 33 | Code 34 | 35 | 36 | Code 37 | 38 | 39 | Code 40 | 41 | 42 | 43 | Code 44 | 45 | 46 | 47 | Code 48 | 49 | 50 | Code 51 | 52 | 53 | Code 54 | 55 | 56 | 57 | Code 58 | 59 | 60 | Code 61 | 62 | 63 | 64 | Code 65 | 66 | 67 | 68 | Code 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | Code 77 | 78 | 79 | Code 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | Code 93 | 94 | 95 | 96 | Code 97 | 98 | 99 | 100 | Code 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | Code 111 | 112 | 113 | Code 114 | 115 | 116 | Code 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | Code 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | Code 168 | 169 | 170 | 171 | 10.0 172 | $(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)\Python Tools\Microsoft.PythonTools.targets 173 | 174 | 175 | 176 | 179 | 180 | 181 | 182 | 183 | 184 | -------------------------------------------------------------------------------- /MicroExpressionDetector/__ASM-main__.py: -------------------------------------------------------------------------------- 1 | from active_shape_models.ActiveShapeModel import ActiveShapeModel 2 | from active_shape_models.ShapeAligner import ShapeAligner 3 | from active_shape_models.ApplyASM import ApplyASM 4 | 5 | from helpers.WriteUpHelper import WriteUp 6 | from helpers.FileHelper import FileHelper 7 | from helpers.DrawFace import DrawFace 8 | 9 | from shapes.ActiveShape import ActiveShape 10 | 11 | import logging 12 | from matplotlib import pyplot as plt 13 | import numpy as np 14 | import os 15 | import cv2 16 | from collections import OrderedDict 17 | 18 | 19 | """ 20 | Folder structure: 21 | 22 | > ASM: iters, training imgs, filtering 23 | --> Deformation 24 | --> PCA 25 | --> SSD 26 | -----> 7 27 | -----> 5 28 | -----> 3 29 | --> Grad 30 | -----> 5 31 | -----> 10 32 | -----> 15 33 | """ 34 | 35 | def trainingRun( ): 36 | train = True 37 | for filterPoints in [ True, False ]: 38 | for filterImages in [ True, False ] : 39 | process( filterPoints, filterImages, train, "",0,0 ) 40 | 41 | def applyRun( ): 42 | train = False 43 | for filterPoints in [ True, False ]: 44 | for filterImages in [ True, False ] : 45 | for method in [ 'grad',"SSD", "nCorr" ] : 46 | if method == "grad": 47 | for maxPx in [5,10,15,30,50]: 48 | process( filterPoints, filterImages, train, method, maxPx, 0 ) 49 | else : 50 | for filter in [ 3, 5, 7 ]: 51 | process( filterPoints, filterImages, train, method , 0, filter ) 52 | 53 | 54 | 55 | 56 | def getMethodSubFolder( params, output ): 57 | paramStr = "" 58 | paramStr += "%s-" % ( params["method"] ) 59 | if params["method"] == 'grad': 60 | paramStr += "%s-%d" % ("maxPx", params["maxPx"]) 61 | else: 62 | paramStr += "%s-%d" % ("fSize", params["fSize"]) 63 | sub = os.path.join( output, paramStr ) 64 | if not params["trainASM"]: 65 | if not os.path.exists( sub ): 66 | os.mkdir( sub ) 67 | return sub 68 | 69 | 70 | def getASMfolder( params ): 71 | paramStr = "" 72 | for k,v in params.iteritems(): 73 | if v and k in ["filterPts", "filterImgs"]: 74 | paramStr += "%s-" % k 75 | 76 | i = 0 77 | output = "C:\\Users\\Valerie\\Desktop\\output\\" 78 | study = "i-%d-t-%d-%ss-%d" % ( params["nIters"], params["nTrain"], paramStr, i ) 79 | 80 | 81 | while os.path.exists( os.path.join( output, study )): 82 | study = "i-%d-t-%d-%ss-%d" % ( params["nIters"], params["nTrain"], paramStr, i ) 83 | print study 84 | i += 1 85 | 86 | if params["trainASM"]: 87 | os.mkdir( os.path.join( output, study ) ) 88 | else: 89 | study = "i-%d-t-%d-%ss-%d" % ( params["nIters"], params["nTrain"], paramStr, i - 2 ) 90 | 91 | output = os.path.join( output, study ) 92 | return output 93 | 94 | 95 | def process( fPts, fImgs, train, method, maxPx, fSize ): 96 | params ={ "nIters" : 7, 97 | "nTrain" : 500, 98 | "filterPts" : fPts, 99 | "filterImgs" : fImgs, 100 | "trainASM" : train, 101 | "writePCA" : False, 102 | "maxPx" : maxPx, 103 | "fSize" : fSize, 104 | "method" : method 105 | } 106 | 107 | 108 | params = OrderedDict( sorted( params.items(), key = lambda t : len(t[0]) ) ) 109 | ##Methods: "SSD", "NCorr", or "grad" 110 | ASMout = getASMfolder( params ) 111 | appASMout = getMethodSubFolder( params, ASMout ) 112 | 113 | fh = FileHelper( params["nIters"], params["nTrain"], ASMout, params["filterPts"], params["filterImgs"] ) 114 | 115 | if params["filterPts"]: 116 | asm = ActiveShapeModel( [43,35] ) 117 | else: 118 | asm = ActiveShapeModel( [36,31] ) 119 | 120 | if params["trainASM"]: 121 | asm = fh.readInPoints( asm ) 122 | asm = ShapeAligner( asm, params["nIters"], ASMout ).alignTrainingSet( ) 123 | fh.writeOutASM( asm ) #write out to read in later 124 | else: 125 | asm = fh.readInASM( asm ) 126 | 127 | ### Calculate Principal components 128 | 129 | asm.PCA() 130 | 131 | if params["trainASM"] or params["writePCA"]: 132 | ### Draw PCA stuffs 133 | if not os.path.exists( os.path.join( ASMout, "PCA" ) ) : 134 | os.mkdir( os.path.join( ASMout, "PCA" ) ) 135 | wu = WriteUp( asm, fh ) 136 | wu.PCAresults() 137 | 138 | if not params["trainASM"] : 139 | ### Apply ASM to image 140 | img = cv2.imread( "C:\Users\Valerie\Desktop\MicroExpress\CASME2\CASME2_RAW\CASME2-RAW\sub01\EP02_01f\img1.jpg") 141 | img = cv2.cvtColor( img, cv2.COLOR_BGR2GRAY) 142 | appASM = ApplyASM( asm, params["nIters"], params["nTrain"], 143 | appASMout, img, params['method'], 144 | params['maxPx'], params['fSize'], params["filterPts"] ) 145 | appASM.applyASM() 146 | 147 | 148 | 149 | #trainingRun() 150 | applyRun() 151 | 152 | -------------------------------------------------------------------------------- /MicroExpressionDetector/__main__.py: -------------------------------------------------------------------------------- 1 | from data_parser.CASMEParser import CASMEParser 2 | import numpy as np 3 | import pandas as pd 4 | import os 5 | import numpy as np 6 | 7 | 8 | 9 | OUT = "C:\\Users\\Valerie\\Desktop\\output" 10 | 11 | res = [ 0.05, 0.1, 0.5 ] 12 | 13 | orient = [ 8 ] 14 | scales = [ 5, 8, 11 ] 15 | 16 | """ 17 | for nScales in scales: 18 | for nOrient in orient: 19 | for imgRes in res: 20 | """ 21 | 22 | 23 | nScales = 5 24 | nOrient = 8 25 | imgRes = 0.06 26 | cp = CASMEParser( nScales, nOrient, imgRes) 27 | featureInfo, labelInfo, retval = cp.run() 28 | 29 | if retval: 30 | ## To DataFrames 31 | labels = pd.DataFrame.from_dict( labelInfo, orient = 'index' ) 32 | feats = pd.DataFrame( featureInfo ) 33 | 34 | ## Check data 35 | #labels[ (labels['video'] == 0) & (labels['subject'] == 0) ] 36 | 37 | ## To .CSV 38 | # Column headers (Yes) & indices (No) ??? 39 | #pd.DataFrame.to_csv( labels, os.path.join( OUT, "CASME-Labels.csv" ), index=False) 40 | pd.DataFrame.to_csv( feats, os.path.join( OUT, "Gabor-S%d-O%d-H%d-W%d.csv" ) % (nScales, nOrient, cp.nh, cp.nw ), index=False ) 41 | else: 42 | print "Failboat" 43 | 44 | 45 | 46 | 47 | 48 | 49 | ####################################################### 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | -------------------------------------------------------------------------------- /MicroExpressionDetector/active_shape_models/ActiveShapeModel.py: -------------------------------------------------------------------------------- 1 | from shapes.Shape import Shape 2 | from shapes.Point import Point 3 | from shapes.ActiveShape import ActiveShape 4 | from shapes.Vector import Vector 5 | import numpy as np 6 | import math 7 | import copy 8 | 9 | class ActiveShapeModel( object ): 10 | 11 | 12 | def __init__( self, refIndices ): 13 | self.allShapes = [] 14 | self.n = 0 #number of points in shape 15 | self.leftEyeIx = refIndices[0] 16 | self.rightEyeIx = refIndices[1] 17 | self.modelParams = { 'rot' : [[1,0],[0,1]] , 't' : [[0],[0]] } 18 | 19 | @property 20 | def I( self ): 21 | """ 22 | Returns the number of ASM training shapes 23 | """ 24 | return len( self.allShapes ) 25 | 26 | 27 | @property 28 | def appModel( self ): 29 | shape = copy.deepcopy( self.meanShape ) 30 | shape = shape.transform( self.modelParams ) 31 | return shape 32 | 33 | def PCA( self ): 34 | self.meanShape = self.calcMeanShape() 35 | cov = map( lambda x : x.calcSingleCov( self.meanShape ), self.allShapes ) 36 | S = sum( cov ) 37 | 38 | self.evals, vecs = np.linalg.eig( S ) 39 | self.evecs = np.array( vecs ) 40 | 41 | 42 | def addShape( self, s ): 43 | """ 44 | Adds a training shape to the ASM 45 | Checks that the number of points in ASM of 46 | added shape is same as number of points in other ASM shapes 47 | """ 48 | if len( self.allShapes ) == 0: 49 | self.allShapes.append( s ) 50 | self.n = s.n 51 | else: 52 | assert( s.n == self.n ) 53 | self.allShapes.append( s ) 54 | 55 | 56 | def drawAllShapes( self, pallette, axis): 57 | i = 0 58 | for el in self.allShapes: 59 | el.draw( palette, i, axis) 60 | i += 1 61 | axis.plot( self.meanShape.xs, self.meanShape.ys, c = 'k' ) 62 | 63 | 64 | def calcMeanShape( self ): 65 | xList = [ el.xs for el in self.allShapes ] 66 | yList = [ el.ys for el in self.allShapes ] 67 | meanPointsList = map( lambda x,y: Vector( x, y), np.mean(xList, 0), np.mean(yList, 0) ) 68 | # meanPointsList = zip( np.mean(xList, 0), np.mean(yList, 0) ) 69 | 70 | return ActiveShape( meanPointsList ) 71 | 72 | 73 | def calcNormTranslate( self, shape): 74 | ## Translate centroid as origin 75 | cm = Shape.centroid( shape ) 76 | t = [[ -cm.x ], [ -cm.y ]] 77 | return t 78 | 79 | def calcNormScale( self, shape ): 80 | if self.n == 68: 81 | d = Point.dist( shape.shapePoints[self.leftEyeIx], shape.shapePoints[self.rightEyeIx] ) 82 | else : 83 | rc = ActiveShape.centroid( ActiveShape( shape.shapePoints[ 31 : 35 ] ) ) 84 | lc = ActiveShape.centroid( ActiveShape( shape.shapePoints[ 27 : 31 ] ) ) 85 | d = Point.dist( rc, lc ) 86 | s = float(1)/float(d) 87 | return s 88 | 89 | def calcNormRotateImg( self, shape ): 90 | xDiff = shape.shapePoints[self.rightEyeIx].x - shape.shapePoints[self.leftEyeIx].x 91 | yDiff = shape.shapePoints[self.rightEyeIx].y - shape.shapePoints[self.leftEyeIx].y 92 | p0 = [ xDiff, yDiff ] 93 | axisVector = [ -1, 0 ] 94 | thetaP = Vector.angleBetween( p0, axisVector ) 95 | thetaRot = thetaP 96 | rot = Vector.calcSRotMat( 1, thetaRot ) 97 | return rot, thetaRot 98 | 99 | def calcNormRotate( self, shape ): 100 | xDiff = shape.shapePoints[self.rightEyeIx].x - shape.shapePoints[self.leftEyeIx].x 101 | yDiff = shape.shapePoints[self.rightEyeIx].y - shape.shapePoints[self.leftEyeIx].y 102 | p0 = [ xDiff, yDiff ] 103 | axisVector = [ 1, 0 ] 104 | thetaP = Vector.angleBetween( p0, axisVector ) 105 | thetaRot = thetaP 106 | rot = Vector.calcSRotMat( 1, thetaRot ) 107 | return rot, thetaRot 108 | 109 | def normShape( self, shape ): 110 | """ 111 | Calculates and applies normalization to passed shape 112 | """ 113 | 114 | ############## Calc transformations ################### 115 | ## Translate centroid as origin 116 | t = self.calcNormTranslate( shape ) 117 | shape = shape.translate( t ) 118 | 119 | ## Scale to distance between eyes as 1 120 | s = self.calcNormScale( shape ) 121 | 122 | ## Rotate so eyes are level 123 | rot, rotTheta = self.calcNormRotateImg( shape ) 124 | 125 | shape = shape.rotate( rot ) 126 | shape = shape.scale( s ) 127 | 128 | return shape 129 | 130 | 131 | -------------------------------------------------------------------------------- /MicroExpressionDetector/active_shape_models/ActiveShapeModel.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/active_shape_models/ActiveShapeModel.pyc -------------------------------------------------------------------------------- /MicroExpressionDetector/active_shape_models/ApplyASM.py: -------------------------------------------------------------------------------- 1 | import sys 2 | sys.path.append( ".." ) 3 | 4 | from helpers.FileHelper import FileHelper 5 | from helpers.DrawFace import DrawFace 6 | from image_processing.TemplateMatcher import TemplateMatcher 7 | from shapes.Point import Point 8 | from shapes.Vector import Vector 9 | from shapes.ActiveShape import ActiveShape 10 | from active_shape_models.ShapeAligner import ShapeAligner 11 | from active_shape_models.PointMovementCalculator import PointMovementCalculator as pmc 12 | import math 13 | import numpy as np 14 | import cv2 15 | from matplotlib import pyplot as plt 16 | import os 17 | import copy 18 | 19 | 20 | class ApplyASM(object): 21 | 22 | def __init__( self, asm, nIters, nTrain, out, img, method, maxPx, filterSize, filterPoints ): 23 | self.img = img#FileHelper( nIters, nTrain, out).readInImage( ) 24 | self.out = out 25 | self.asm = asm 26 | 27 | self.method = method 28 | self.maxPx = maxPx 29 | self.fSize = filterSize 30 | 31 | self.fPts = filterPoints 32 | 33 | self.nVals = asm.n *2 34 | self.P = self.asm.evecs[ :, 0 : self.nVals ] 35 | self.b = np.zeros( self.nVals )#self.asm.evals 36 | #self.Wb = np.diag( map( math.sqrt, self.asm.evals[ 0 : self.nVals ] ) ) 37 | self.db = np.zeros( self.nVals ) 38 | self.d0 = 0 39 | self.ds = 0 40 | self.dXc = np.zeros( self.asm.n ) 41 | 42 | 43 | 44 | ### DYNAMIC PROPERTIES 45 | @property 46 | def x( self ): 47 | pcaMat = np.dot( self.P, self.b) 48 | pts = np.add( self.asm.meanShape.flatten(), pcaMat ) 49 | return pts 50 | 51 | @property 52 | def y( self ): 53 | shape = ActiveShape.createShape( self.x ) 54 | shape = shape.M( self.s, self.theta ) 55 | pts = np.add( shape.flatten(), self.dX ) 56 | pts = np.subtract( pts, self.dXc ) 57 | return pts 58 | 59 | 60 | 61 | def genXc( self, pDict ) : 62 | xc = np.ones( self.asm.n ) * pDict['t'][0][0] 63 | yc = np.ones( self.asm.n ) * pDict['t'][1][0] 64 | return np.ravel( zip( xc, yc ) ) 65 | 66 | 67 | 68 | 69 | ### For initial setup of model 70 | def alignEyes( self, eye1, eye2 ): 71 | 72 | 73 | x = ActiveShape.createShape( self.x ) 74 | f, [[ax1, ax2], [ax3, ax4]] = plt.subplots( 2,2) 75 | # distance between eyes: 76 | d1 = Point.dist( eye1, eye2 ) 77 | rc = ActiveShape.centroid( ActiveShape( x.shapePoints[ 31 : 35 ] ) ) 78 | lc = ActiveShape.centroid( ActiveShape( x.shapePoints[ 27 : 31 ] ) ) 79 | 80 | 81 | if self.asm.n == 68: 82 | d2 = Point.dist( x.shapePoints[ self.asm.rightEyeIx], x.shapePoints[self.asm.leftEyeIx] ) 83 | else : 84 | d2 = Point.dist( rc, lc ) 85 | s = float( d1/d2 ) 86 | 87 | 88 | shape = copy.deepcopy( x ) 89 | 90 | DrawFace( shape, ax1).drawBold() 91 | 92 | shape = shape.scale( s ) 93 | DrawFace( shape, ax2).drawBold() 94 | 95 | rot, thetaRot = self.asm.calcNormRotateImg( shape ) 96 | shape = shape.rotate( rot ) 97 | DrawFace( shape, ax3).drawBold() 98 | 99 | ax1.invert_yaxis() 100 | ax2.invert_yaxis() 101 | ax3.invert_yaxis() 102 | 103 | rc = ActiveShape.centroid( ActiveShape( shape.shapePoints[ 31 : 35 ] ) ) 104 | lc = ActiveShape.centroid( ActiveShape( shape.shapePoints[ 27 : 31 ] ) ) 105 | 106 | 107 | if self.asm.n == 68: 108 | t = [ [ (eye2.x - shape.shapePoints[self.asm.leftEyeIx ].x ) ], 109 | [ (eye2.y - shape.shapePoints[self.asm.leftEyeIx ].y )] ] 110 | else: 111 | t = [ [ ( eye2.x - rc.x )], [ ( eye2.y - rc.y )] ] 112 | shape = shape.translate( t ) 113 | 114 | ### Check that initial shape is within image frame 115 | tempS = 0 116 | nr, nc = np.shape( self.img ) 117 | for pt in shape.shapePoints: 118 | if pt.y > nr : 119 | # print "y big" 120 | tempS += ( pt.y - nr + 10 ) / nr 121 | if pt.x > nc : 122 | # print "x big" 123 | tempS += ( pt.x - nc + 10) / nc 124 | 125 | if tempS != 0: 126 | shape = shape.scale( 1 - tempS ) 127 | 128 | if self.asm.n == 68: 129 | t = [ [ (eye2.x - shape.shapePoints[self.asm.leftEyeIx ].x ) ], 130 | [ (eye2.y - shape.shapePoints[self.asm.leftEyeIx ].y )] ] 131 | else: 132 | rc = ActiveShape.centroid( ActiveShape( shape.shapePoints[ 31 : 35 ] ) ) 133 | lc = ActiveShape.centroid( ActiveShape( shape.shapePoints[ 27 : 31 ] ) ) 134 | 135 | t = [ [ ( eye2.x - rc.x )], [ ( eye2.y - rc.y )] ] 136 | shape = shape.translate( t ) 137 | 138 | 139 | #print "row: %d\tcol:%d" % ( pt.y, pt.x ) 140 | # print np.shape(self.img ) 141 | 142 | DrawFace( shape, ax4).drawBold() 143 | ax4.scatter( eye1.x, eye1.y, c = 'r') 144 | ax4.scatter( eye2.x, eye2.y, c = 'g' ) 145 | ax4.imshow( self.img, cmap = 'gray' ) 146 | f.show() 147 | plt.savefig( os.path.join( self.out, "deform-init.png" ) ) 148 | plt.gca().invert_yaxis() 149 | plt.close() 150 | srot = np.dot(s, rot ) 151 | transDict = { 't' : t, 's' : s, 'rot' : rot,'srot' : srot, 'theta' : thetaRot } 152 | 153 | 154 | return shape, transDict 155 | 156 | def findEyes( self ): 157 | eye_cascade = cv2.CascadeClassifier('C:\\OpenCV\\data\\haarcascades\\haarcascade_eye.xml') 158 | eyes = eye_cascade.detectMultiScale( self.img ) 159 | im_toshow = copy.deepcopy( self.img ) 160 | eyeArr = [] 161 | for (ex,ey,ew,eh) in eyes: 162 | cv2.rectangle(im_toshow,(ex,ey),(ex+ew,ey+eh),(0,255,0),2) 163 | eyeArr.append( ( ex, ey, ew, eh ) ) 164 | 165 | # filter by area 166 | eyeAreas = map( self.areaRect, eyeArr ) 167 | ix = np.argmax( eyeAreas ) 168 | eye1 = eyeArr[ix] 169 | 170 | eyeAreas.pop( ix ) 171 | eyeArr.pop(ix ) 172 | 173 | ix = np.argmax( eyeAreas ) 174 | eye2 = eyeArr[ix] 175 | 176 | ex1, ey1, ew1, eh1 = eye1 177 | eye1loc = Point( ex1 + ew1/2,ey1+ eh1/2) 178 | 179 | ex2, ey2, ew2, eh2 = eye2 180 | eye2loc = Point( ex2 + ew2/2,ey2+ eh2/2) 181 | 182 | # cv2.circle( self.img, ( int(eye1loc.x), int(eye1loc.y) ), 1, (255,0,0) ) 183 | # cv2.circle( self.img, ( int(eye2loc.x), int(eye2loc.y) ), 1, (255,0,0) ) 184 | #showImg( im_toshow ) 185 | return eye1loc, eye2loc 186 | 187 | @staticmethod 188 | def areaRect( tup ): 189 | return tup[2] * tup[3] 190 | 191 | def initialPosition( self ): 192 | ##### SETUP 193 | eye1, eye2 = self.findEyes( ) 194 | 195 | ### Align eyes needs to return model in the image coordinate frame !!!! 196 | model, transdict = self.alignEyes( eye1, eye2 ) 197 | 198 | return model, transdict 199 | 200 | 201 | def calcdX( self ): 202 | print "calcDX" 203 | # print "CurrentShape as points" 204 | # print self.X 205 | 206 | # print "CurrentShape as ActiveShape" 207 | Xshape = ActiveShape.createShape( self.X ) 208 | # for p in Xshape.shapePoints: 209 | # print p.x, p.y 210 | 211 | #for pt in Xshape.shapePoints: 212 | # print pt.x, pt.y 213 | if self.method == "grad": 214 | PMC = pmc( self.img, self.maxPx ) 215 | return np.ravel( map( lambda p : PMC.calcShift( p, 'tx' ), Xshape.shapePoints ) ) 216 | else: 217 | TM = TemplateMatcher( self.method , self.fSize, self.fPts ) 218 | return np.ravel( TM.performMatching(self.img, Xshape) ) 219 | 220 | def applyASM( self ): ## Main 221 | SA = ShapeAligner( self.asm, 0, self.out ) 222 | 223 | XShape, _ = self.initialPosition() 224 | 225 | xShape = ActiveShape.createShape( self.x ) 226 | 227 | modelParams = SA.calcAlignTransBtwn( XShape, xShape , np.ones( self.asm.n) ) 228 | # print "params after init" 229 | # print modelParams 230 | #modelParams = SA.calcAlignTransBtwn( XShape , xShape , np.ones( self.asm.n ) ) 231 | self.s = modelParams[ 's' ] 232 | 233 | self.theta = modelParams[ 'theta'] 234 | 235 | self.Xc = self.genXc( modelParams ) 236 | 237 | self.X = np.add( xShape.M( self.s, self.theta ).flatten(), self.Xc) 238 | 239 | # print "Initial Current shape" 240 | # print self.X 241 | 242 | 243 | i = 0 244 | 245 | while np.mean( self.calcdX() ) > 0.000001 and i < 150: 246 | print i 247 | 248 | f, (ax1, ax2) = plt.subplots( 1,2 ) #, sharex = True, sharey = True ) 249 | 250 | xShape = ActiveShape.createShape( self.x ) 251 | self.X = np.add( xShape.M( self.s, self.theta ).flatten(), self.Xc) 252 | 253 | # Calculate point shifts 254 | self.dX = self.calcdX() 255 | # "dX result" 256 | #print self.dX 257 | 258 | ## X + dX 259 | self.XdX = np.add( self.X , self.dX ) 260 | 261 | ax1.imshow( self.img ) 262 | DrawFace( self.X, ax1).drawContrast() 263 | 264 | DrawFace( self.XdX, ax1).drawBold() 265 | 266 | DrawFace( self.X, ax2).drawContrast() 267 | DrawFace( self.XdX, ax2 ).drawBold() 268 | ax1.set_xlim( 0, np.shape( self.img )[1]) 269 | ax1.set_ylim( np.shape( self.img)[0] ,0 ) 270 | ax2.invert_yaxis() 271 | f.suptitle("Original Shape (Contrast) and Gradient Suggested Shape (Bold)") 272 | f.savefig( os.path.join( self.out, "deformation-iter-%d.png" % i ) ) 273 | 274 | 275 | ## Find X --> X + dX 276 | XShape = ActiveShape.createShape( self.X ) 277 | XdXShape = ActiveShape.createShape( self.XdX ) 278 | deltaParams = SA.calcAlignTransBtwn( XdXShape, XShape, np.ones( self.asm.n ) ) 279 | 280 | ## Get transformation constrained delta parameters 281 | self.d0 = deltaParams['theta'] 282 | self.ds = deltaParams['s'] 283 | self.dXc = self.genXc( deltaParams ) 284 | 285 | ## Calculate dx 286 | yShape = ActiveShape.createShape( self.y ) 287 | f, (ax1,ax2) = plt.subplots( 1,2) 288 | yt = yShape.M( 1 / ( self.s *( 1 + self.ds )) , - ( self.theta + self.d0 ) ) 289 | 290 | DrawFace( yShape, ax1).drawBold() 291 | DrawFace( yt, ax2).drawBold() 292 | 293 | f.suptitle( "Transformed contour" ) 294 | ax1.set_title( "Original") 295 | ax2.set_title( "Delta'd") 296 | ax1.invert_yaxis() 297 | ax2.invert_yaxis() 298 | f.savefig( os.path.join( self.out, "y-%d.png" % i ) ) 299 | plt.close() 300 | 301 | 302 | self.dx = np.subtract( yt.flatten(), self.x ) 303 | 304 | ## Calculate db 305 | self.db = np.dot( np.transpose(self.P) , self.dx ) 306 | 307 | 308 | ## Update 309 | self.theta += self.d0 310 | self.s = self.s * ( 1 + self.ds ) 311 | self.Xc = np.add( self.Xc, self.dXc ) 312 | 313 | self.b = np.add( self.b, self.db ) 314 | 315 | f.clear() 316 | plt.close() 317 | 318 | i += 1 319 | print "It took you %d iterations" % i 320 | 321 | 322 | 323 | 324 | 325 | 326 | -------------------------------------------------------------------------------- /MicroExpressionDetector/active_shape_models/PointMovementCalculator.py: -------------------------------------------------------------------------------- 1 | import sys 2 | sys.path.append( ".." ) 3 | 4 | 5 | import numpy as np 6 | import math 7 | from shapes.Vector import Vector 8 | from shapes.Point import Point 9 | from image_processing.TemplateMatcher import TemplateMatcher 10 | 11 | 12 | class PointMovementCalculator( object ): 13 | 14 | def __init__( self, img, maxPx ): 15 | self.img = img 16 | self.maxPx = maxPx 17 | 18 | 19 | def pointGradient( self ): 20 | fx = [ -1, 0,1 ] 21 | fy = np.transpose(fx) 22 | 23 | 24 | @staticmethod 25 | def getGradient( pt, img ): 26 | if pt.x is np.nan or pt.y is np.nan: 27 | return 28 | 29 | 30 | ## Check bounds 31 | h, w = np.shape( img ) 32 | if pt.y > h - 2: 33 | pt.setY( h - 2) 34 | if pt.x > w - 2 : 35 | 36 | 37 | pt.setX( w - 2) 38 | 39 | if pt.x < 1 : 40 | pt.setX( 1) 41 | if pt.y < 1 : 42 | pt.setY( 1) 43 | 44 | 45 | 46 | delF = [ (img[ pt.y, pt.x + 1] - img[pt.y, pt.x - 1 ] )/2, 47 | (img[ pt.y + 1 , pt.x] - img[pt.y - 1, pt.x ] )/2 ] 48 | #print delF 49 | mag = math.sqrt( delF[0] ** 2 + delF[1] ** 2 ) 50 | 51 | unitF = Vector.unit( delF ) 52 | 53 | 54 | 55 | 56 | 57 | return unitF, mag, pt 58 | 59 | def calcShift( self, point, method ): 60 | ## get point p (ix 48) 61 | 62 | # Get initial gradient at point ( unitV already ) 63 | # vector, magnitude, origin 64 | #print "original: %f, %f" % (point.x, point.y) 65 | f, m, pt = self.getGradient( point, self.img ) 66 | #print "returned: %f, %f" % (point.x, point.y) 67 | 68 | h, w = np.shape( self.img ) 69 | 70 | cX = pt.x 71 | cY = pt.y 72 | # Goal is to find maximal 73 | maxM = 1 74 | cnt = 0 75 | while ( True ): 76 | ## move point according to maximal magnitude response in area 77 | ## normalized according to current magnitude/maximal magnitude 78 | dx = f[0] * float(m)/float(maxM) 79 | dy = f[1]* float(m)/float(maxM) 80 | 81 | ## too far from original point 82 | if cnt > self.maxPx: 83 | # print point.x + dx, point.y + dy 84 | return dx, dy 85 | cnt+= 1 86 | 87 | ## move point one unit in direction of gradient 88 | cX = cX + f[0] 89 | cY = cY + f[1] 90 | if ( cX > w - 2 or cY > h - 2 or cX < 1 or cY < 1 ) : 91 | # print point.x + dx, point.y + dy 92 | return dx, dy 93 | 94 | _, cM, _ = self.getGradient( Point( cX, cY ), self.img ) 95 | if cM > maxM: 96 | maxM = cM 97 | 98 | ## Outside of image frame 99 | 100 | @staticmethod 101 | def deravel( vect ): 102 | x, y = [], [] 103 | vect = np.ravel( vect ) 104 | for i in range( len( vect ) ): 105 | if i % 2 == 0: 106 | x.append( vect[i] ) 107 | else: 108 | y.append( vect[i] ) 109 | return x, y 110 | 111 | 112 | 113 | 114 | 115 | 116 | -------------------------------------------------------------------------------- /MicroExpressionDetector/active_shape_models/ShapeAligner.py: -------------------------------------------------------------------------------- 1 | from shapes.ActiveShape import ActiveShape 2 | from active_shape_models.ActiveShapeModel import ActiveShapeModel 3 | 4 | from helpers.FileHelper import FileHelper 5 | 6 | from shapes.Vector import Vector 7 | from shapes.Shape import Shape 8 | 9 | import numpy as np 10 | import math 11 | import logging 12 | import matplotlib.pyplot as plt 13 | import seaborn as sns 14 | import functools 15 | 16 | from pathos.multiprocessing import ProcessingPool as PPool 17 | from multiprocessing import freeze_support 18 | import random 19 | import time 20 | import os 21 | 22 | class ShapeAligner( object ): 23 | 24 | def __init__( self, asm, nIters, out ): 25 | self.asm = asm 26 | self.nIters = nIters 27 | self.n = asm.n 28 | self.out = os.path.join( out, "Align" ) 29 | if not os.path.exists( self.out ): 30 | os.mkdir( self.out ) 31 | 32 | 33 | def alignTrainingSet( self ): 34 | 35 | ## Setup drawing 36 | co = random.sample( sns.xkcd_rgb.keys(), self.n ) 37 | pal = sns.xkcd_palette( co ) 38 | 39 | 40 | for i in range( self.nIters ): 41 | start = time.time() 42 | 43 | # Calculate mean shape 44 | self.asm.meanShape = self.asm.calcMeanShape() 45 | 46 | if i == 0: 47 | map( lambda t : t.draw( pal, plt ), self.asm.allShapes ) 48 | plt.plot( self.asm.meanShape.xs, self.asm.meanShape.ys, c = 'k', lw = 1 ) 49 | plt.gca().invert_yaxis() 50 | plt.savefig( os.path.join( self.out, "no-alignment-%d.png" % i ) ) 51 | 52 | plt.close() 53 | 54 | # Normalize mean shape 55 | self.asm.normMeanShape = self.asm.normShape( self.asm.meanShape ) 56 | 57 | # Align all shapes to normalized mean shape 58 | self.asm.allShapes = self.alignAllShapes() 59 | map( lambda t : t.draw( pal, plt ), self.asm.allShapes ) 60 | plt.plot( self.asm.normMeanShape.xs, self.asm.normMeanShape.ys, c = 'k', lw = 1 ) 61 | plt.gca().invert_yaxis() 62 | plt.savefig( os.path.join( self.out, "alignment-%d.png" % ( i ) ) ) 63 | plt.close() 64 | 65 | with open( os.path.join( self.out, 'log.txt' ), 'a' ) as of: 66 | of.write( "AlignIter: %f\n" % ( time.time() - start ) ) 67 | of.write( '%d\n\n' % i ) 68 | print i 69 | 70 | 71 | return self.asm 72 | 73 | 74 | ### Alignment methods 75 | ## Generators 76 | def Zgen( self, shape, w ): 77 | SS = map( lambda a, b : a**2 + b**2, shape.xs, shape.ys ) 78 | return np.dot( SS, w ) 79 | 80 | 81 | def Xgen( self, shape, w ): 82 | return np.dot( shape.xs, w ) 83 | 84 | def Ygen( self, shape, w ): 85 | return np.dot( shape.ys, w ) 86 | 87 | def Wgen( self, w ): 88 | return sum( w ) 89 | 90 | def C1gen( self, shape1, shape2, w): 91 | SW = map( lambda a, b, c,d : a * b + c * d, 92 | shape1.xs, shape2.xs, shape1.ys, shape2.ys ) 93 | return np.dot( SW, w ) 94 | 95 | def C2gen( self, shape1, shape2, w): 96 | SB = map( lambda a, b, c,d : c * b - a * d, 97 | shape1.xs, shape2.xs, shape1.ys, shape2.ys ) 98 | return np.dot( SB, w ) 99 | 100 | 101 | def calcWs( self ): 102 | """ 103 | Calculates and returns: 104 | w <-- vector of weights (length == number of points (n) ) 105 | W <-- diagonal matrix representing vector (n x n) 106 | 107 | These weights represent the influence of each point 108 | (by distance among corresponding points across training shapes) 109 | """ 110 | 111 | # Get variance matrix V 112 | V = [] 113 | 114 | for k in range(self.n): 115 | row = [] 116 | for l in range(self.n): 117 | col = [] 118 | for i in range(len( self.asm.allShapes )): 119 | col.append( self.asm.allShapes[i].R[k][l]) 120 | row.append( np.var(col) ) 121 | V.append(row) 122 | 123 | 124 | s = map( sum, V) 125 | w = [ math.pow( j, -1) for j in s] 126 | W = np.diag(w) 127 | return w, W 128 | 129 | def alignOneShape( self, shape, w ): 130 | transDict = self.calcAlignTransBtwn( self.asm.normMeanShape, shape, w ) 131 | shape = shape.transform( transDict ) 132 | return shape 133 | 134 | def calcAlignTransBtwn( self, shape1, shape2, w): 135 | start = time.time() 136 | coeffs = np.array( [ 137 | [ self.Xgen( shape2, w ), - self.Ygen( shape2, w ), self.Wgen( w ) , 0], 138 | [ self.Ygen( shape2, w ), self.Xgen( shape2, w ) , 0 , self.Wgen( w )], 139 | [ self.Zgen( shape2, w ), 0 , self.Xgen( shape2, w ) , self.Ygen( shape2, w )], 140 | [ 0 , self.Zgen( shape2, w ) , - self.Ygen( shape2, w ), self.Xgen( shape2, w )] 141 | ]) 142 | eqs = np.array([ self.Xgen( shape1, w ) , 143 | self.Ygen( shape1, w ), 144 | self.C1gen( shape1, shape2, w ), 145 | self.C2gen( shape1, shape2, w ) ] ) 146 | 147 | sol = np.linalg.solve( coeffs, eqs ) 148 | d = sol[0] # d = ax = s cos 0 --> 0 149 | e = sol[1] # e = ay = s sin 0 --> 1 150 | f = sol[2] # f = tx --> 2 151 | g = sol[3] # g = ty --> 3 152 | 153 | 154 | srot = [[ d, - e], 155 | [ e, d] ] 156 | 157 | t = [[ f ],[ g ]] 158 | 159 | theta = math.atan( e / d ) 160 | 161 | s = math.sqrt( d**2 + e**2 ) 162 | 163 | #print 'CalcAlign: %f' % ( time.time() - start ) 164 | return { 'srot': srot, 't':t , 'theta' : theta, 's' : s } 165 | 166 | 167 | def shapeDist( self ): 168 | allD = map( lambda x : x.shapeDist( self.asm.normMeanShape ), self.asm.allShapes ) 169 | return np.mean(np.mean( allD, 0 )), np.mean(np.std( allD, 0 )), np.mean(np.mean( allD, 1 )), np.mean(np.std( allD, 1 ) ) 170 | 171 | 172 | def alignAllShapes( self ): 173 | start = time.time() 174 | w, W = self.calcWs() 175 | freeze_support() 176 | allShapes = PPool().map( lambda x : self.alignOneShape( x, w ), self.asm.allShapes ) 177 | mn1, sd1, mn2,sd2 = self.shapeDist() 178 | with open( os.path.join( self.out, 'log.txt' ), 'a' ) as of: 179 | of.write('point-wise mean:\t%f\n' % mn1 ) 180 | of.write('point-wise std:\t%f\n' % sd1 ) 181 | of.write('shape-wise mean:\t%f\n'% mn2 ) 182 | of.write('shape-wise std:\t%f\n' % sd2 ) 183 | of.write( 'alignAllShapes: %f\n' % (time.time() - start ) ) 184 | return allShapes -------------------------------------------------------------------------------- /MicroExpressionDetector/active_shape_models/ShapeAligner.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/active_shape_models/ShapeAligner.pyc -------------------------------------------------------------------------------- /MicroExpressionDetector/active_shape_models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/active_shape_models/__init__.py -------------------------------------------------------------------------------- /MicroExpressionDetector/active_shape_models/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/active_shape_models/__init__.pyc -------------------------------------------------------------------------------- /MicroExpressionDetector/data_mining/CASMEData.py: -------------------------------------------------------------------------------- 1 | from data_mining.ValPandas import ValPandas 2 | class CASMEData( ValPandas ): 3 | def __init__ ( self, X = None, Y = None, df = None ): 4 | super( CASMEData, self).__init__( X = X, Y = Y, df = df ) 5 | self.label = ['emotion'] 6 | self.filters = ['subject', 'video', 'frame','isOnset', 'isApex', 'isOffset'] 7 | if X is None: 8 | dims = len( df.columns ) - len( self.label ) - len( self.filters) 9 | else: 10 | dims = len( X.columns ) 11 | self.feats = ['%s' % i for i in range( dims )] 12 | 13 | @property 14 | def x( self ): 15 | return self.X[ self.feats] 16 | 17 | @property 18 | def y(self): 19 | return self.Y[ self.label ] 20 | 21 | @property 22 | def Xtrain( self ): 23 | if self.X is None: 24 | return self.train[self.feats] 25 | else : 26 | a, b = self.train 27 | return a 28 | 29 | 30 | @property 31 | def Xtest( self ): 32 | if self.X is None: 33 | return self.test[self.feats] 34 | else : 35 | a, b = self.test 36 | return a 37 | 38 | 39 | @property 40 | def Xval( self ): 41 | if self.X is None: 42 | return self.val[self.feats] 43 | else : 44 | a, b = self.val 45 | return a 46 | 47 | 48 | @property 49 | def Ytrain( self ): 50 | if self.X is None: 51 | return self.train['emotion'] 52 | else : 53 | a, b = self.train 54 | return b[ 'emotion' ] 55 | 56 | 57 | @property 58 | def Ytest( self ): 59 | if self.X is None: 60 | return self.test['emotion'] 61 | else : 62 | a, b = self.test 63 | return b[ 'emotion' ] 64 | 65 | 66 | 67 | @property 68 | def Yval( self ): 69 | if self.X is None: 70 | return self.val['emotion'] 71 | else : 72 | a, b = self.val 73 | return b[ 'emotion' ] 74 | 75 | 76 | -------------------------------------------------------------------------------- /MicroExpressionDetector/data_mining/ParamTune.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/data_mining/ParamTune.py -------------------------------------------------------------------------------- /MicroExpressionDetector/data_mining/ValPandas.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import scipy as sp 4 | import math 5 | 6 | class ValPandas(object): 7 | 8 | def __init__ ( self, df ): 9 | self.data = df 10 | self.sets = { 'train': 0, 11 | 'test': 1, 12 | 'val' : 2 } 13 | #self.feats = features 14 | #self.label = label 15 | 16 | def addEmptyCols( self, colnames ): 17 | if type(colnames) is list: 18 | temp = pd.DataFrame( np.zeros( (len( self.data), len(colnames)) ), 19 | columns = colnames, 20 | index = range(len(self.data)) ) 21 | self.data = self.data.join( temp ) 22 | 23 | def addData( self, colName, vals, setStr): 24 | if setStr is None: 25 | self.data.loc[:,colName] = vals 26 | else: 27 | self.data.loc[ self.data.set == self.sets[setStr], colName ] = vals 28 | 29 | def sliceRows( self, condition ): 30 | return self.data 31 | 32 | def splitData( self, ptr, pte, pval ): 33 | """ Adds a column to data called 'set' that creates 34 | training, testing, validation according to probs given""" 35 | self.data['set'] = self.sample( len( self.data), [ptr,pte,pval]) 36 | self.splitSets = { } 37 | 38 | @property 39 | def train( self ): 40 | return self.data[ self.data.set == self.sets['train'] ] 41 | 42 | @property 43 | def test( self ): 44 | return self.data[ self.data.set == self.sets['test'] ] 45 | 46 | @property 47 | def val( self ): 48 | return self.data[ self.data.set == self.sets['val'] ] 49 | 50 | @staticmethod 51 | 52 | 53 | def sample( n, p ): 54 | """ Creates a list (of idxs) of length n, with class probas defined by 55 | list p""" 56 | assert type(p) is list 57 | cnt = 0 58 | ix = list() 59 | for prob in p: 60 | if cnt == 0: 61 | ix = np.hstack( (ix, np.zeros( n * prob))) 62 | else: 63 | ix = np.hstack( (ix, np.ones( n * prob) * cnt) ) 64 | cnt += 1 65 | 66 | np.random.shuffle( ix ) 67 | ix = list(ix) 68 | 69 | print len(ix) 70 | print n 71 | 72 | while n - len(ix) > 0: 73 | ix.append(0) 74 | return ix 75 | 76 | def getSubset( self, setStr ): 77 | return self.data[ self.data.set == self.sets[setStr] ] 78 | -------------------------------------------------------------------------------- /MicroExpressionDetector/data_mining/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/data_mining/__init__.py -------------------------------------------------------------------------------- /MicroExpressionDetector/data_parser/CASMELabels.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import math 3 | import numpy as np 4 | import re 5 | 6 | class CASMELabels( object ): 7 | @staticmethod 8 | def getLabelInfo( sub, vid, df, videoKey ): 9 | 10 | # Query master table given subject and video 11 | instance = df.query( 'Subject == %d & Filename == "%s"' % ( int(sub.replace( "sub", "" )), vid) ) 12 | 13 | # Get important stuff from there 14 | onset = int( instance.OnsetFrame.values[0] ) 15 | apex = instance.ApexFrame.values[0] 16 | offset = int( instance.OffsetFrame.values[0] ) 17 | 18 | s,v = videoKey ## unpack (subject, video, frame) 19 | 20 | 21 | ## double check boundaries of onset/apex/offset 22 | accum = { 23 | "subject" : s, 24 | "video" : v, 25 | } 26 | params = { 27 | "emotion" : instance['Estimated Emotion'].values[0], # replace string with num 28 | "onset" : onset, 29 | "apex" : CASMELabels.hasApex( apex ), 30 | "offset" : offset 31 | } 32 | 33 | return accum, params 34 | @staticmethod 35 | def hasApex( a ): 36 | if a == '/' : 37 | return None 38 | else: 39 | return int( a ) 40 | 41 | @staticmethod 42 | def isOnset( f, onset, apex ): 43 | return f in range( onset, apex ) 44 | 45 | @staticmethod 46 | def isApex( f, apex ): 47 | return f == apex 48 | 49 | @staticmethod 50 | def isOffset( f, apex, offset ): 51 | return f in range( apex + 1, offset + 1 ) 52 | 53 | @staticmethod 54 | def getEmotion( vidEmotion, frameInfo ): 55 | # Translation between string and integer reps of emotions 56 | emoji = { 'happiness' : 0, 57 | 'disgust' : 1, 58 | 'repression' : 2, 59 | 'fear': 3, 60 | 'sadness' : 4, 61 | 'others' : 5, 62 | 'surprise' : 6, 63 | 'neutral' : 7 64 | } 65 | 66 | if frameInfo['isOnset'] or frameInfo['isApex'] or frameInfo['isOffset']: 67 | return emoji[ vidEmotion ] 68 | else: 69 | return emoji['neutral'] 70 | 71 | @staticmethod 72 | def getFrameParams( labelInfo, labelParams, f, frame ): 73 | if labelParams['apex'] is None: #estimate halfway as apex 74 | apex = int( math.floor( (labelParams['offset'] - labelParams['onset']) / 2 ) ) 75 | else: 76 | apex = labelParams['apex'] 77 | orgFrame = map( int, re.findall( r'\d+', frame ))[0] 78 | accum = { 79 | "frame" : f, 80 | "isOnset" : CASMELabels.isOnset( orgFrame, labelParams['onset'], apex ), 81 | "isApex" : CASMELabels.isApex( orgFrame, labelParams['apex'] ), 82 | "isOffset" : CASMELabels.isOffset( orgFrame, apex, labelParams['offset'] ) 83 | } 84 | 85 | accum.update( { "emotion" : CASMELabels.getEmotion( labelParams[ 'emotion' ], accum) } ) 86 | return accum 87 | -------------------------------------------------------------------------------- /MicroExpressionDetector/data_parser/CASMEParser.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from data_parser.CASMELabels import CASMELabels 3 | from feature_extraction.GaborExtractor import GaborExtractor as gab 4 | from image_processing.PreProcessing import PreProcessing as pp 5 | import os 6 | import numpy as np 7 | 8 | class CASMEParser( object ): 9 | def __init__( self, nScales, nOrientations, imageResolution ): ## 5,8, 11, 14 10 | self.excelFile = "CASME2-coding-20140508.xlsx" 11 | self.dataDir = "C:\Users\Valerie\Desktop\MicroExpress\CASME2\Cropped\Cropped" 12 | #self.dataDir = "C:\Users\Valerie\Desktop\MicroExpress\CASME2\CASME2_RAW\CASME2-RAW" 13 | self.mainDir = "C:\Users\Valerie\Desktop\MicroExpress\CASME2" 14 | self.nScales = nScales 15 | self.nOrient = nOrientations 16 | self.imgRes = imageResolution 17 | ## DIMS needed? 18 | #self.dims = self.nScales * self.nOrient * self.scaledH * self.scaledW 19 | self.gabor = gab( nScales, nOrientations, 15 ) 20 | 21 | 22 | 23 | def run( self ): 24 | ## Read in excel file to data frame 25 | excelFile = os.path.join( self.mainDir, self.excelFile ) 26 | table = pd.ExcelFile( excelFile ) 27 | t = table.parse( ) 28 | 29 | 30 | ## Init parsed data containers 31 | labelInfo = {} 32 | frameInfo = {} 33 | 34 | #Probe sample image for dims 35 | img = pp.readInImg( os.path.join( self.dataDir, "sub01/EP02_01f/reg_img46.jpg" ) ) 36 | self.nh, self.nw = pp.imgRes( img, self.imgRes ) 37 | dims = self.nScales * self.nOrient * self.nh * self.nw 38 | featureInfo = np.array([ ] ).reshape( 0, dims ) 39 | 40 | 41 | tf = 0 42 | s = 0 43 | 44 | ## Iterate through files and generate labels and features 45 | complete = True 46 | ## For each subject 47 | for sub in os.listdir( self.dataDir ): 48 | v = 0 49 | 50 | ## For each video 51 | for vid in os.listdir( os.path.join( self.dataDir, sub ) ): 52 | if vid.endswith( "avi" ): #skip avis 53 | break 54 | 55 | ## Each line in main excel file proports to one video, get that info here 56 | intLabelInfo, labelParams = CASMELabels.getLabelInfo( sub, vid, t, [s, v] ) 57 | f = 0 58 | 59 | ## Process each frame 60 | for frame in os.listdir( os.path.join( self.dataDir, sub, vid ) ): 61 | 62 | frameInfo = CASMELabels.getFrameParams( intLabelInfo, labelParams, f, frame ) 63 | frameInfo.update( intLabelInfo ) 64 | labelInfo.update( { tf : frameInfo } ) 65 | img = pp.readInImg( os.path.join( self.dataDir, sub, vid, frame ) ) 66 | img = pp.downsample( img, self.nh, self.nw ) 67 | 68 | 69 | kernels = self.gabor.generateGaborKernels( ) 70 | try: 71 | featureInfo = np.vstack( [ featureInfo, gab.processGabor( img, kernels ) ]) 72 | except MemoryError: 73 | #print "S%d O%d H%d W%d.csv" % (self.nScales, self.nOrient, self.nh, self.nw ) 74 | complete = False 75 | break 76 | f += 1 77 | tf += 1 78 | v += 1 79 | s += 1 80 | 81 | return featureInfo, labelInfo, complete -------------------------------------------------------------------------------- /MicroExpressionDetector/data_parser/CASMEParserMem.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from data_parser.CASMELabels import CASMELabels 3 | from feature_extraction.GaborWindowExtractor import GaborWindowExtractor, GaborExtractor 4 | from image_processing.PreProcessing import PreProcessing as pp 5 | import os 6 | import numpy as np 7 | 8 | class CASMEParserMem( object ): 9 | def __init__( self, nScales, nOrientations ) : 10 | self.excelFile = "CASME2-coding-20140508.xlsx" 11 | self.dataDir = "C:\Users\Valerie\Desktop\MicroExpress\CASME2\Cropped\Cropped" 12 | #self.dataDir = "C:\Users\Valerie\Desktop\MicroExpress\CASME2\CASME2_RAW\CASME2-RAW" 13 | self.mainDir = "C:\Users\Valerie\Desktop\MicroExpress\CASME2" 14 | self.nScales = nScales 15 | self.nOrient = nOrientations 16 | self.featureOut = "F:\output" 17 | self.gabor = GaborExtractor( nScales, nOrientations, 21) 18 | 19 | def parseLabels( self ): 20 | ## Read in excel file to data frame 21 | excelFile = os.path.join( self.mainDir, self.excelFile ) 22 | table = pd.ExcelFile( excelFile ) 23 | t = table.parse( ) 24 | 25 | 26 | ## Init parsed data containers 27 | labelInfo = {} 28 | frameInfo = {} 29 | 30 | ## Iterate through images and build label frame 31 | 32 | tf = 0 33 | s = 0 34 | 35 | ## Iterate through files and generate labels and features 36 | 37 | ## For each subject 38 | for sub in os.listdir( self.dataDir ): 39 | v = 0 40 | ## For each video 41 | for vid in os.listdir( os.path.join( self.dataDir, sub ) ): 42 | if vid.endswith( "avi" ): #skip avis 43 | break 44 | 45 | ## Each line in main excel file proports to one video, get that info here 46 | intLabelInfo, labelParams = CASMELabels.getLabelInfo( sub, vid, t, [s, v] ) 47 | f = 0 48 | 49 | ## Process each frame 50 | for frame in os.listdir( os.path.join( self.dataDir, sub, vid ) ): 51 | 52 | frameInfo = CASMELabels.getFrameParams( intLabelInfo, labelParams, f, frame ) 53 | frameInfo.update( intLabelInfo ) 54 | labelInfo.update( { tf : frameInfo } ) 55 | 56 | f += 1 57 | tf += 1 58 | v += 1 59 | s += 1 60 | return labelInfo 61 | 62 | def writeFeatures( self ): 63 | img = pp.readInImg( os.path.join( self.dataDir, "sub01/EP02_01f/reg_img46.jpg" ) ) 64 | 65 | ## For each subject 66 | for sub in os.listdir( self.dataDir ): 67 | ## For each video 68 | for vid in os.listdir( os.path.join( self.dataDir, sub ) ): 69 | print vid 70 | if vid.endswith( "avi" ): #skip avis 71 | break 72 | ## Process each frame 73 | for frame in os.listdir( os.path.join( self.dataDir, sub, vid ) ): 74 | img = pp.readInImg( os.path.join( self.dataDir, sub, vid, frame ) ) 75 | kernels = self.gabor.generateGaborKernels( ) 76 | 77 | newFeatures = np.ravel(self.gabor.processGaborRaw( img, kernels ) ) 78 | featStr = map( lambda x : '%.15f' % x, newFeatures ) 79 | featOut = str.join( ',' , featStr ) 80 | f = open( os.path.join( self.featureOut, "Gabors-S%d-O%d.csv" % (self.nScales, self.nOrient)), "a" ) 81 | 82 | f.write( "%s\n" % featOut ) 83 | f.close() 84 | 85 | def run( self ): 86 | labelInfo = self.parseLabels() 87 | self.writeFeatures() 88 | return labelInfo -------------------------------------------------------------------------------- /MicroExpressionDetector/data_parser/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/data_parser/__init__.py -------------------------------------------------------------------------------- /MicroExpressionDetector/dts.py: -------------------------------------------------------------------------------- 1 | ### AdaBoost 2 | #from sklearn.ensemble import AdaBoostClassifier 3 | #from sklearn.naive_bayes import MultinomialNB 4 | #from sklearn.svm import SVC 5 | from sklearn.tree import DecisionTreeClassifier 6 | from sklearn.metrics import accuracy_score 7 | import numpy as np 8 | import pandas as pd 9 | import math 10 | import os 11 | from data_mining.CASMEData import ValPandas, CASMEData 12 | import re 13 | from matplotlib import pyplot as plt 14 | 15 | minSplit = [ 5 * i for i in range( 1, 30 ) ] 16 | 17 | 18 | OUT = "C:\Users\Valerie\Desktop\output" 19 | 20 | results = {} 21 | i = 0 22 | for file in os.listdir( OUT ): 23 | if file != "CASME-Labels.csv": 24 | print file 25 | ## Parse data 26 | a = file.split("-") 27 | [_, m] = a[:2] 28 | [s,o,h,w,r] = map( lambda x: int( re.findall( r'\d+', x )[0] ), a[2:] ) 29 | print m, s, o, h, w, r 30 | 31 | l = "CASME-labels.csv" 32 | try: 33 | X = pd.DataFrame.from_csv( os.path.join( OUT, file ) ) 34 | except MemoryError: 35 | print "memReadIn" 36 | pass 37 | Y = pd.DataFrame.from_csv( os.path.join( OUT, l )) 38 | df = CASMEData( X = X, Y = Y ) 39 | try: 40 | df.splitData( 0.6, 0.3, 0.1 ) 41 | except MemoryError: 42 | print "memSplit" 43 | pass 44 | 45 | 46 | #fig = plt.figure() 47 | #_ = plt.plot( [0.5,1], [0.5,1]) 48 | #ax = fig.add_subplot(111) 49 | # Tune params 50 | maxTest = 0 51 | for ms in minSplit : #actually minleaf, change name 52 | 53 | clf = DecisionTreeClassifier( min_samples_split = 25, min_samples_leaf = ms ) 54 | try: 55 | _ = clf.fit( df.Xtrain, df.Ytrain ) 56 | YpredTrain = clf.predict( df.Xtrain ) 57 | YpredTest = clf.predict( df.Xtest ) 58 | trainAcc = accuracy_score( df.Ytrain, YpredTrain ) 59 | testAcc = accuracy_score( df.Ytest, YpredTest) 60 | #_ = plt.scatter( trainAcc, testAcc ) 61 | #_ = plt.xlabel( "Training Accuracy" ) 62 | #_ = plt.ylabel( "Testing Accuracy" ) 63 | if testAcc > maxTest: 64 | maxTest = testAcc 65 | if trainAcc - testAcc < 0.2 : 66 | 67 | # _ = ax.annotate( '(%d, %d)' % (ms, 25), xy=( trainAcc, testAcc), textcoords='offset points', size = 8) 68 | 69 | results.update( { i : { "Method" : m, 70 | "Scales": s, 71 | "Orientations" : o, 72 | "Height" : h, 73 | "Width" : w, 74 | "Rectangles" : r, 75 | "Training" : trainAcc, 76 | "Testing" : testAcc, 77 | "MinLeaf" : ms} } ) 78 | 79 | 80 | i += 1 81 | except MemoryError: 82 | print "mem" 83 | pass 84 | 85 | #fig.savefig( "out-%s-S%d-O%d-H%d-W%d-R%d.png" % ( m, s, o, h, w, r) ) 86 | # plt.close( fig ) 87 | 88 | df = pd.DataFrame.from_dict( results, orient = 'index') 89 | df.to_csv( "results-dm.csv" ) 90 | 91 | df = df[["Method", "Height", "Width", "Rectangles", "Orientations", "Scales", "MinLeaf", "Training", "Testing" ]] 92 | df = df.sort( columns = ["Height", "Scales", "Rectangles"] ) 93 | 94 | def writeHeader( out ): 95 | out.write("\\documentclass[writeup.tex]{subfiles}\n") 96 | out.write("\\begin{document}\n") 97 | return out 98 | 99 | def writeFooter( out): 100 | out.write("\\end{document}\n") 101 | return out 102 | 103 | def startTable( out ): 104 | out.write("\\begin{table}[H]\n") 105 | out.write("\\centering\n") 106 | out.write("\\begin{tabular}") 107 | return out 108 | def endTable( out ): 109 | out.write("\\end{tabular}\n \\\\") 110 | out.write("\\caption{Accuracy results}\n" ) 111 | out.write("\\end{table}\n") 112 | out.write("\\hspace{5mm}\n") 113 | return out 114 | 115 | def crossTable( out, ct ): 116 | cols = ct.columns 117 | rows = ct.index 118 | 119 | 120 | ## Column Headers 121 | c = len(cols) + 1 122 | 123 | out.write("{|*{%d}{r|}}\n" % c) 124 | out.write("\\hline\n") 125 | for icx in range(len(cols)): 126 | colname = cols[icx] 127 | if icx == len(cols) - 1: 128 | out.write( "\\textbf{%s}" % (colname)) 129 | else: 130 | out.write( "\\textbf{%s} &" % (colname)) 131 | out.write( "\\\\\n") 132 | out.write("\\hline\n") 133 | 134 | ## Rows 135 | for irx, row in ct.iterrows(): 136 | #out.write( '\\textbf{%s} &' % irx) 137 | for ix, val in enumerate( row.values ): 138 | if ix == len(row.values) - 1: 139 | if isinstance( val, str ): 140 | out.write( "%s" % val ) 141 | elif isinstance( val, long): 142 | out.write( "%d" % val ) 143 | else: 144 | out.write( "%.2f" % (val * 100) ) 145 | 146 | else: 147 | if isinstance( val, str): 148 | out.write( "%s &" % val ) 149 | elif isinstance( val, long): 150 | out.write( "%d &" % val ) 151 | else: 152 | out.write( "%.2f &" % (val * 100) ) 153 | 154 | out.write( "\\\\\n" ) 155 | out.write("\\hline\n") 156 | 157 | return out 158 | 159 | def tableout( df ): 160 | with open("results.tex", 'w') as out: 161 | writeHeader( out ) 162 | startTable( out ) 163 | crossTable( out, df ) 164 | 165 | endTable( out ) 166 | writeFooter( out ) 167 | 168 | tableout( df ) -------------------------------------------------------------------------------- /MicroExpressionDetector/feature_extraction/GaborExtractor.py: -------------------------------------------------------------------------------- 1 | import math 2 | import cv2 3 | import numpy as np 4 | 5 | class GaborExtractor( object ): 6 | def __init__( self, nScales, nOrientations, kernelSize): 7 | self.nScales = nScales 8 | self.nOrientations = nOrientations 9 | self.kSize = kernelSize 10 | 11 | 12 | def generateGaborKernels( self ): 13 | ## scales 14 | frequencies = [ ( math.pi / 2 ) / ( math.pow( math.sqrt( 2 ), u ) ) for u in range(self.nScales) ] 15 | 16 | ## orientations 17 | thetas = [v * math.pi / 8 for v in range(self.nOrientations) ] 18 | sigma = 1 #average of sigmas shown in Shen2006MutualBoost 19 | 20 | # possibility of using ellipse formulation of sigma with major and minor axis components 21 | # orientations first then scales!!! 22 | filters = [] 23 | for th in thetas : 24 | for f in frequencies : 25 | filters.append (cv2.getGaborKernel( (self.kSize,self.kSize), sigma, th, 1 / f, 1 ) ) 26 | return filters 27 | 28 | ### https://cvtuts.wordpress.com/2014/04/27/gabor-filters-a-practical-overview/ 29 | 30 | def processGabor(self, img, filters): ## apply all filters to one image 31 | return np.ravel( GaborExtractor.processGaborRaw( img, filters) ) 32 | 33 | @staticmethod 34 | def processGaborRaw( img, filters): 35 | imgs = [] 36 | for kern in filters: 37 | fimg = cv2.filter2D(img, cv2.CV_32F, kern) 38 | imgs.append( fimg ) 39 | return imgs 40 | 41 | 42 | -------------------------------------------------------------------------------- /MicroExpressionDetector/feature_extraction/GaborWindowExtractor.py: -------------------------------------------------------------------------------- 1 | from feature_extraction.GaborExtractor import GaborExtractor 2 | import numpy as np 3 | import copy 4 | 5 | class GaborWindowExtractor( GaborExtractor ): 6 | def __init__( self, nScales, nOrientations, kernelSize, nRects): 7 | GaborExtractor.__init__(self, nScales, nOrientations, kernelSize ) 8 | self.numRects = nRects 9 | 10 | 11 | def getRectangleParams( self, img ): ## only works with even numRects 12 | nrect = copy.deepcopy( self.numRects ) 13 | nrows = 0 14 | ncols = 0 15 | cs = np.shape( img ) 16 | cs = list( cs ) 17 | while nrect != 1: 18 | maxIx = np.argmax( cs ) 19 | if nrect % 2 == 0: #even 20 | self.replace( cs, 2, maxIx ) 21 | if maxIx == 0: 22 | nrows += 2 23 | else: 24 | ncols += 2 25 | nrect /= 2 26 | return cs, (nrows, ncols) 27 | 28 | @staticmethod 29 | def replace( vect, mult, maxIx): 30 | v = vect.pop( maxIx) 31 | vect.insert( maxIx, v / mult ) 32 | 33 | 34 | @staticmethod 35 | def sliceImg( img, rectSize, rectIx ): 36 | slices = [] 37 | for nr in range( rectIx[0] ): 38 | for nc in range( rectIx[1] ): 39 | slices.append( list( img[ nr * rectSize[0] + nr : nr * rectSize[0] + nr + rectSize[0], 40 | nc * rectSize[1] + nc : nc * rectSize[1] + nc + rectSize[1]] ) ) 41 | return slices 42 | 43 | @staticmethod 44 | def findMaxMag( fImg ): 45 | return np.argmax( fImg ) 46 | 47 | @staticmethod 48 | def getResponseVals( fImg, maxLoc): 49 | return fImg[ maxLoc[0] ][ maxLoc[1] ] 50 | 51 | 52 | def process( self, I ): 53 | f = self.generateGaborKernels() 54 | imgs = self.processGaborRaw( I, f ) 55 | fVect = [] 56 | for fIx, fImg in enumerate( imgs ): 57 | o,s = self.getOrientationScaleIx( fIx, self.nOrientations, self.nScales ) 58 | maxLocs = [] 59 | rsize, rIx = self.getRectangleParams( fImg ) 60 | slices = self.sliceImg( fImg, rsize, rIx ) 61 | for slice in slices: 62 | sliceR, sliceC = np.shape( slice ) 63 | if s == 0: 64 | maxLoc = self.findMaxMag( slice ) 65 | maxLoc = self.ind2sub( maxLoc, sliceR, sliceC ) 66 | 67 | 68 | fVect.append( self.getResponseVals( fImg ,maxLoc) ) 69 | 70 | return fVect 71 | 72 | #http://stackoverflow.com/questions/28995146/matlab-ind2sub-equivalent-in-python 73 | @staticmethod 74 | def getOrientationScaleIx( ix, no, ns ): 75 | return GaborWindowExtractor.ind2sub( ix, no, ns ) 76 | 77 | ## Given number of rectangles wanted, returns 78 | @staticmethod 79 | def ind2sub( ix, nrows, ncols ): 80 | row = ix / ncols 81 | col = ix % ncols # or numpy.mod(ind.astype('int'), array_shape[1]) 82 | return (row, col) 83 | 84 | -------------------------------------------------------------------------------- /MicroExpressionDetector/feature_extraction/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/feature_extraction/__init__.py -------------------------------------------------------------------------------- /MicroExpressionDetector/helpers/FileHelper.py: -------------------------------------------------------------------------------- 1 | import sys 2 | sys.path.append( ".." ) 3 | 4 | 5 | from shapes.ActiveShape import ActiveShape 6 | from shapes.Vector import Vector 7 | import logging 8 | import os 9 | import cv2 10 | import copy 11 | 12 | class FileHelper( object ): 13 | """description of class""" 14 | 15 | def __init__( self, nIters, nTrain, out, filterPts, filterImgs ): 16 | direc = "C:\\Users\\Valerie\\Desktop\\MicroExpress\\facePoints" 17 | subdir = "session_1" 18 | self.pointFolder = os.path.join( direc, subdir ) 19 | self.pointFiles = next(os.walk(self.pointFolder))[2] 20 | self.nTrain = nTrain 21 | self.nIters = nIters 22 | self.output = out 23 | self.exclPts = [ 31 ,36, 66, 67] 24 | self.doExclPts = filterPts 25 | self.exclImgs = [ 15, 36, 58, 155, 165, 166, 167, 183, 245, 292, 299, 312, 340, 341, 405, 461] 26 | self.doExclImgs = filterImgs 27 | # setup logging 28 | 29 | @staticmethod 30 | def readInEbenImg( ): 31 | img = cv2.imread( 'C:\\Users\\Valerie\\Downloads\\000_1_1.ppm') 32 | img_gray = cv2.cvtColor( img, cv2.COLOR_BGR2GRAY ) 33 | return img_gray 34 | 35 | @staticmethod 36 | def readInImage(): 37 | img = cv2.imread( 'C:\\Users\\Valerie\\Desktop\\MicroExpress\\CASME2\\Cropped\\Cropped\\sub02\\EP01_11f\\reg_img46.jpg' ) 38 | img = cv2.imread( 'C:\\Users\\Valerie\Desktop\\MicroExpress\\CASME2\\CASME2_RAW\\CASME2-RAW\\sub01\\EP02_01f\\img1.jpg') 39 | 40 | img = cv2.imread( 'C:\\Users\\Valerie\\Downloads\\000_1_1.ppm') 41 | img_gray = cv2.cvtColor( img, cv2.COLOR_BGR2GRAY ) 42 | #faces = face_cascade.detectMultiScale( gray, 1.3, 5) 43 | #for (x,y,w,h) in faces: 44 | # cv2.rectangle( img, (x,y), (x+w,y+h), (255,0,0),2) 45 | # roi_gray = gray[y:y+h, x:x+w] 46 | # roi_color = img[y:y+h, x:x+w] 47 | 48 | return img_gray 49 | 50 | def writeOutASM( self, asm ): 51 | with open( os.path.join(self.output, 'outfile-ASM-%diters-%dtr.txt' % (self.nIters, self.nTrain) ), "w") as output: 52 | output.write( str(asm.meanShape) ) 53 | output.write( "!!!\n" ) 54 | for shape in asm.allShapes: 55 | output.write( str(shape) ) 56 | output.write( "@@@\n" ) 57 | output.write( "!!!\n" ) 58 | 59 | def readInASM( self, asm ): 60 | allLines = None 61 | #f = "C:\\Users\\Valerie\\Documents\\Visual Studio 2013\\Projects\\ActiveShapeModels\\ActiveShapeModels\\outputs\\outfile-ASM-100iters-500tr.txt" 62 | with open( os.path.join( self.output, 'outfile-ASM-%diters-%dtr.txt' % (self.nIters, self.nTrain)) , "r") as infile: 63 | #with open( f, "r") as infile: 64 | allLines = infile.readlines() 65 | cleanLines = map( lambda x : x.strip().split(), allLines ) 66 | 67 | s = [] 68 | 69 | for tuple in cleanLines: 70 | if tuple[0] == '!!!': 71 | if s != []: 72 | asm.meanShape = ActiveShape( s ) 73 | s = [] 74 | else: 75 | pass 76 | elif tuple[0] == '@@@': 77 | if s != [] : 78 | asm.addShape( ActiveShape(s) ) 79 | s = [] 80 | else: 81 | pass 82 | else: 83 | s.append( Vector( float(tuple[0]), float(tuple[1]) ) ) 84 | return asm 85 | 86 | def readInOneDude( self, f): 87 | ptList = [ ] 88 | ex = copy.deepcopy( self.exclPts ) 89 | with open( os.path.join(self.pointFolder,f), "r" ) as infile: 90 | allLines = infile.readlines() 91 | pointLine = False 92 | cleanLines = [ x.strip() for x in allLines] 93 | for line in cleanLines: 94 | if line is '{': 95 | pointLine = True 96 | elif line is '}': 97 | pointLine = False 98 | elif pointLine: 99 | ptList.append( map( float, line.split(' ') ) ) 100 | else: 101 | pass 102 | if self.doExclPts: 103 | while ex != []: 104 | ptList.pop( ex.pop() ) 105 | return ptList 106 | 107 | 108 | 109 | def readInPoints( self, asm ): 110 | m = 0 111 | 112 | for f in self.pointFiles : 113 | ptList = self.readInOneDude( f ) 114 | if self.doExclImgs: 115 | if m in ( set( range(self.nTrain) ) - set( self.exclImgs ) ): 116 | asm.addShape( ActiveShape( ptList ) ) 117 | else: 118 | asm.addShape( ActiveShape( ptList ) ) 119 | 120 | m += 1 121 | 122 | if m > self.nTrain - 1: 123 | return asm 124 | 125 | 126 | 127 | -------------------------------------------------------------------------------- /MicroExpressionDetector/helpers/FileHelper.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/helpers/FileHelper.pyc -------------------------------------------------------------------------------- /MicroExpressionDetector/helpers/TexTable.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/helpers/TexTable.py -------------------------------------------------------------------------------- /MicroExpressionDetector/helpers/WriteUpHelper.py: -------------------------------------------------------------------------------- 1 | import sys 2 | sys.path.append( ".." ) 3 | 4 | import os 5 | from helpers.DrawFace import DrawFace 6 | from shapes.ActiveShape import ActiveShape 7 | from matplotlib import pyplot as plt 8 | import seaborn as sns 9 | import numpy as np 10 | import math 11 | 12 | 13 | class WriteUp(object): 14 | def __init__( self, ASM, FileHelper ): 15 | self.asm = ASM 16 | self.fh = FileHelper 17 | self.nPlots = 5 18 | self.P = ASM.evecs 19 | self.b = np.ravel( ASM.evals ) 20 | self.lim = 0.25 21 | self.xbar = ASM.meanShape.flatten() 22 | self.xlim = (-3,3) 23 | self.ylim = (-3,3) 24 | self.out = os.path.join( FileHelper.output, "PCA") 25 | 26 | 27 | def PCAresults( self ) : 28 | with open( os.path.join( self.out, 'faces-results-%diters-%dtr.txt' % (self.fh.nIters, self.fh.nTrain)), 'w') as outfile: 29 | for i in range( 20 ): 30 | outfile.write("%d: %f, %f \n" % ( i, self.b[i] / sum( self.b ), sum( self.b[:i+1] ) / sum(self.b ) ) ) 31 | 32 | ## Reproject multiple PCS and vary evals 33 | for i in range( 10 ): 34 | self.showVaryMultiplePCS( i + 1 ) 35 | self.showVary( i ) 36 | self.exampleEvalEvecs( i ) 37 | 38 | ### Draw all faces 39 | for sh in self.asm.allShapes : 40 | DrawFace( sh, plt ).drawContrast() 41 | plt.gca().invert_yaxis() 42 | plt.savefig( os.path.join( self.out, 'faces-all-%diters-%dtr.png' % (self.fh.nIters, self.fh.nTrain) ) ) 43 | plt.close() 44 | 45 | 46 | def showVaryMultiplePCS( self, numPCs ): 47 | f, axes = plt.subplots( 1, self.nPlots ) 48 | bs = [] 49 | for p in range( numPCs ): 50 | rs = np.linspace( - self.lim * math.sqrt( self.b[p] ), self.lim * math.sqrt( self.b[p] ), self.nPlots ) 51 | bs.append( rs ) 52 | P = self.P[:, 0:numPCs] 53 | for pl in range(self.nPlots) : 54 | b = [ bs[p][pl] for p in range(len(bs) ) ] 55 | X = np.add( self.xbar, np.dot( P, b ) ) 56 | s = ActiveShape.createShape( X ) 57 | DrawFace( s, axes[pl] ).drawBold() 58 | axes[pl].set_xlim( self.xlim ) 59 | axes[pl].set_ylim( self.ylim ) 60 | axes[pl].invert_yaxis() 61 | f.savefig( os.path.join( self.out, 'faces-%d-PCs-at-once.png' % numPCs ) ) 62 | plt.close() 63 | 64 | def showVary( self, evIx ) : 65 | # Vary one eigenvalue 66 | f, axes = plt.subplots( 1, self.nPlots, sharex = True, sharey = True ) 67 | 68 | rs = np.linspace( -self.lim * math.sqrt( self.b[evIx] ), self.lim * math.sqrt( self.b[evIx] ), self.nPlots ) 69 | 70 | for m in range( len(rs) ): 71 | s = self.projectOnePC( evIx, rs[m] ) 72 | DrawFace( s, axes[m] ).drawBold() 73 | axes[m].set_xlim( self.xlim) 74 | axes[m].set_ylim( self.ylim ) 75 | self.plotEigenvectors( evIx, axes[m]) 76 | DrawFace( self.asm.meanShape, axes[m] ).drawContrast() 77 | plt.gca().invert_yaxis() 78 | f.savefig( os.path.join( self.out, 'faces-PC-%d.png' % evIx ) ) 79 | 80 | plt.close() 81 | 82 | def plotEigenvectors( self, evIx, axes ): 83 | for ptIx in np.multiply( 2, range(self.asm.n) ): 84 | axes.plot( [ self.xbar[ptIx], self.xbar[ptIx] + self.lim * math.sqrt(self.b[evIx] )* self.P[ptIx][evIx] ] , 85 | [ self.xbar[ptIx+1], self.xbar[ptIx+1] + self.lim * math.sqrt( self.b[evIx] )* self.P[ptIx + 1][evIx] ], 86 | c = '#A0A0A0', 87 | lw = 0.5) 88 | axes.plot( [ self.xbar[ptIx], self.xbar[ptIx] - self.lim * math.sqrt(self.b[evIx] )* self.P[ptIx][evIx] ] , 89 | [ self.xbar[ptIx+1], self.xbar[ptIx+1] - self.lim * math.sqrt( self.b[evIx] )* self.P[ptIx + 1][evIx] ], 90 | c = '#A0A0A0', 91 | lw = 0.5) 92 | 93 | 94 | def projectOnePC( self, evIx, b ) : 95 | X = np.add( self.xbar, np.multiply( self.P[:,evIx], b ) ) 96 | return ActiveShape.createShape( X ) 97 | 98 | def reProjectCumulative( self, numVals ) : 99 | P = self.P[:, 0:numVals] 100 | b = [ np.transpose(np.mat( self.b[ 0: numVals ] )) ] 101 | 102 | X = np.add( self.xbar, np.dot( P, b ) ) 103 | 104 | return ActiveShape.createShape( X ) 105 | 106 | 107 | 108 | 109 | 110 | def exampleEvalEvecs( self, evIx ): 111 | vecs = np.array( self.P ) 112 | newPts = np.add( self.xbar, np.dot(math.sqrt( self.b[evIx] ), vecs[:,evIx] ) ) 113 | 114 | newx, newy = ActiveShape.deravel( newPts ) 115 | 116 | ## Mean Shape 117 | DrawFace( self.asm.meanShape, plt ).drawBold() 118 | 119 | ## Eigenvectors 120 | plt.plot( [self.asm.meanShape.xs, np.add( self.asm.meanShape.xs, newx ) ], 121 | [ self.asm.meanShape.ys, np.add( self.asm.meanShape.ys, newy ) ], c ='#A0A0A0', lw = 1 ) 122 | plt.xlim( self.xlim ) 123 | plt.ylim( self.ylim ) 124 | 125 | ## New Shape 126 | plt.gca().invert_yaxis() 127 | plt.savefig( os.path.join( self.out, 'faces-eigenvectors-%d.png' % evIx )) 128 | plt.close() 129 | 130 | 131 | 132 | 133 | 134 | 135 | -------------------------------------------------------------------------------- /MicroExpressionDetector/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/helpers/__init__.py -------------------------------------------------------------------------------- /MicroExpressionDetector/helpers/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/helpers/__init__.pyc -------------------------------------------------------------------------------- /MicroExpressionDetector/helpers/log.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/helpers/log.py -------------------------------------------------------------------------------- /MicroExpressionDetector/image_processing/PreProcessing.py: -------------------------------------------------------------------------------- 1 | import math 2 | import cv2 3 | import numpy as np 4 | from matplotlib import pyplot as plt 5 | from shapes.Point import Point 6 | 7 | class PreProcessing(object): 8 | def __init__( self ): 9 | return 10 | 11 | @staticmethod 12 | def readInImg( imgPath ): 13 | img = cv2.imread( imgPath ) 14 | img = cv2.cvtColor( img, cv2.COLOR_BGR2GRAY ) 15 | return img 16 | 17 | @staticmethod 18 | def readInCrop( imgPath ): 19 | img = readInImg( imgPath ) 20 | if np.shape( img )[0] > 400: 21 | img = crop( img, 2.5, 2) 22 | return img 23 | 24 | 25 | 26 | @staticmethod 27 | def imgRes( img, scale ): 28 | dims = np.shape( img ) 29 | nh, nw = map( lambda m: int(math.floor( m * scale)), dims ) 30 | return nh, nw 31 | 32 | @staticmethod 33 | def downsample( img, nh, nw ): 34 | #img = cv2.resize( img, dsize= (dim,dim), fx = .1, fy = 0.1, interpolation = cv2.INTER_NEAREST ) 35 | img = cv2.resize( img, dsize= (nh, nw), interpolation = cv2.INTER_NEAREST ) 36 | return img 37 | 38 | @staticmethod 39 | def faceDetection( img ): 40 | face_cascade = cv2.CascadeClassifier( 'C:\\OpenCV\\data\\haarcascades\\haarcascade_frontalface_default.xml') 41 | face_cascade = cv2.CascadeClassifier( 'C:\\OpenCV\\data\\haarcascades\\haarcascade_frontalface_alt_tree.xml') 42 | faces = face_cascade.detectMultiScale( img, 1.1, 2) 43 | 44 | x,y,w,h = faces[0] 45 | roi_gray = img[y:y+h, x:x+w] 46 | return roi_gray 47 | 48 | @staticmethod 49 | def eyeDetection( img ): 50 | eye_cascade = cv2.CascadeClassifier('C:\\OpenCV\\data\\haarcascades\\haarcascade_eye_tree_eyeglasses.xml') 51 | eyes = eye_cascade.detectMultiScale( img, 1.1, 2) 52 | eye1, eye2 = PreProcessing.filterFoundEyes( eyes, img ) 53 | return eye1, eye2 54 | 55 | 56 | # print d # print newH, newW 57 | 58 | @staticmethod 59 | def crop( img, fh, fw ): ## In terms of x, y not h, w !!! # or the other way>>> 60 | 61 | # Using eyes 62 | eye1, eye2 = PreProcessing.eyeDetection( img ) 63 | d = Point.dist( eye1, eye2 ) 64 | 65 | newH = int(fh * d) #set more firmly for other types of images for consistent size 66 | newW = int(fw * d) 67 | 68 | y0, x0 = PreProcessing.newEyeLoc( eye1, eye2, newH, newW, d ) 69 | 70 | return img[ y0 : y0 + newH, x0 : x0 + newW ] 71 | 72 | 73 | @staticmethod 74 | def newEyeLoc( eye1, eye2, newH, newW, d): 75 | if eye1.x < eye2.x: 76 | y0 = eye1.y - ( newH ) / 3 77 | x0 = eye1.x - ( newW - d )/2 78 | else: 79 | y0 = eye2.y - ( newH ) / 3 80 | x0 = eye2.x - ( newW - d )/2 81 | return y0, x0 82 | 83 | @staticmethod 84 | def showEyes( img, eye1, eye2 ): 85 | im_toshow = copy.deepcopy( img ) 86 | cv2.circle( im_toshow, eye1, 10, (1,0,0) ) 87 | cv2.circle( im_toshow, eye2, 10, (0,1,0) ) 88 | plt.imshow( im_toshow, cmap = "gray" ) 89 | plt.show() 90 | 91 | @staticmethod 92 | def areaRect( tup ): 93 | return tup[2] * tup[3] 94 | 95 | @staticmethod 96 | def filterFoundEyes( eyes, img ): 97 | if len( eyes ) > 1: ## if multiple eyes are returned, get biggest one 98 | eyeArr = [] 99 | for (ex,ey,ew,eh) in eyes: 100 | #cv2.rectangle(im_toshow,(ex,ey),(ex+ew,ey+eh),(0,255,0),2) 101 | eyeArr.append( ( ex, ey, ew, eh ) ) 102 | 103 | # filter by area 104 | eyeAreas = map( PreProcessing.areaRect, eyeArr ) 105 | ix = np.argmax( eyeAreas ) 106 | eye1 = eyeArr[ix] 107 | 108 | eyeAreas.pop( ix ) 109 | eyeArr.pop(ix ) 110 | 111 | ix = np.argmax( eyeAreas ) 112 | eye2 = eyeArr[ix] 113 | 114 | ex1, ey1, ew1, eh1 = eye1 115 | ex2, ey2, ew2, eh2 = eye2 116 | 117 | else: ## just one eye, estimate the other 118 | ex, ey, ew, eh = eyes[0] 119 | w0 = np.shape( img )[1] 120 | if ex < w0 / 2: 121 | ex1, ey1, ew1, eh1 = [ ex, ey, ew, eh ] 122 | ex2, ey2, ew2, eh2 = [ ex + w0/5, ey, ew, eh ] 123 | else: 124 | ex1, ey1, ew1, eh1 = [ ex - w0/5, ey, ew, eh ] 125 | ex2, ey2, ew2, eh2 = [ ex, ey, ew, eh ] 126 | 127 | 128 | 129 | eye1loc = Point( ex1 + ew1/2, ey1+ eh1/2 ) 130 | eye2loc = Point( ex2 + ew2/2, ey2+ eh2/2) 131 | 132 | 133 | return eye1loc, eye2loc 134 | 135 | 136 | 137 | -------------------------------------------------------------------------------- /MicroExpressionDetector/image_processing/TemplateMatcher.py: -------------------------------------------------------------------------------- 1 | from helpers.FileHelper import FileHelper 2 | from shapes.ActiveShape import ActiveShape 3 | from shapes.Vector import Vector 4 | from pathos.multiprocessing import ProcessingPool as PPool 5 | import numpy as np 6 | import math 7 | import time 8 | 9 | class TemplateMatcher(object): 10 | def __init__( self, method, n, fpts ): 11 | self.fPts = fpts 12 | ## Training 13 | self.ebenImg, self.ebenShape = self.getEben() 14 | #self.templates = self.genTemplates() 15 | self.method = method 16 | self.n = n 17 | 18 | 19 | 20 | def performMatching( self, I, model ): 21 | r = self.genRegionsArr( I, model ) 22 | t = self.genTemplatesArr() 23 | matchesDx = map( lambda x,y : self.match( x, y) , r,t ) 24 | return np.ravel( matchesDx ) 25 | 26 | def match( self, r, t): 27 | ixs = TemplateMatcher.matIxs( self.n ** 2 ) 28 | #for ixx in ixs: 29 | #print ixx.x, ixx.y 30 | ix = 0 31 | ws = map( lambda x : TemplateMatcher.slice( r, self.n, x ), ixs ) 32 | # print "ws" 33 | # print ws 34 | 35 | if self.method == "SSD" : 36 | resp = map( lambda w : self.SSD( t, w ), ws ) 37 | ix = np.argmin( resp ) 38 | # print "ix %d, ixs:%d,%d" % (ix, ixs[ix].x, ixs[ix].y ) 39 | else : 40 | resp = map( lambda w : self.normCorr( t, w ), ws ) 41 | ix = np.argmin( resp ) 42 | 43 | return TemplateMatcher.coordOffset(ixs[ix], self.n ** 2) 44 | 45 | 46 | 47 | def processRegionSSD( self, r ): 48 | """ 49 | Matches each point in the region to minimum SSD template from entire list of templates 50 | """ 51 | ixs = TemplateMatcher.matIxs( self.n ** 2 ) 52 | ## 5 x 5 windows for region r 53 | ws = map( lambda x : TemplateMatcher.slice( r, self.n, x ), ixs ) 54 | 55 | ts = map( lambda w : map( lambda t : TemplateMatcher.SSD( t, w ), self.templates ) , ws) 56 | tMatch = map( lambda x : np.argmin( x, axis = 0), ts ) 57 | lMatch = map( lambda x : np.min( x, axis = 0) ,ts ) 58 | ix = np.argmin( lMatch ) 59 | return TemplateMatcher.coordOffset(ixs[ix], self.n ** 2)#, tMatch[ix] ixs[ix].x, ixs[ix].y, tMatch[ix] 60 | 61 | def processRegionNormCorr( self, r ): 62 | ixs = TemplateMatcher.matIxs( self.n ** 2 ) 63 | ## 5 x 5 windows for region r 64 | ws = map( lambda x : TemplateMatcher.slice( r, self.n, x ), ixs ) 65 | 66 | ts = map( lambda w : map( lambda t : self.normCorr( t, w ), self.templates ) , ws) 67 | tMatch = map( lambda x : np.argmax( x, axis = 0), ts ) 68 | lMatch = map( lambda x : np.max( x, axis = 0) ,ts ) 69 | ix = np.argmax( lMatch ) 70 | return TemplateMatcher.coordOffset(ixs[ix], self.n**2)#, tMatch[ix] 71 | 72 | 73 | 74 | 75 | 76 | def performMatchingOld( self, I, model ): 77 | start = time.time() 78 | ixs = TemplateMatcher.matIxs( self.n ** 2 ) 79 | if self.method == "SSD": 80 | stuff = PPool().map( self.processRegionSSD, self.genRegionsArr(I, model) ) 81 | else: 82 | stuff = PPool().map( self.processRegionNormCorr, self.genRegionsArr(I, model) ) 83 | #print "match: %f" % ( time.time() - start) 84 | return stuff 85 | 86 | 87 | 88 | def genRegionsArr( self, I, model ): 89 | regions = [] 90 | for pt in model.shapePoints : 91 | # print pt.x, pt.y 92 | regions.append( TemplateMatcher.slice( I, self.n ** 2, pt ) ) 93 | return regions 94 | 95 | 96 | def genTemplatesArr( self ): 97 | templates = [] 98 | for pt in self.ebenShape.shapePoints : 99 | templates.append( TemplateMatcher.slice( self.ebenImg, self.n, pt ) ) 100 | return templates 101 | 102 | def genTemplatesDict( self ): 103 | templates = {} 104 | ix = 0 105 | for pt in self.ebenShape.shapePoints : 106 | templates.update( { ix : TemplateMatcher.slice( self.ebenImg, self.n, pt ) } ) 107 | return templates 108 | 109 | 110 | 111 | def getEben( self ): 112 | i = 20 113 | tr = 500 114 | out = "C:\\Users\\Valerie\\Desktop\\output\\20-500-1" 115 | ### END OF NEED? 116 | fh = FileHelper( i, tr, out, self.fPts, False) 117 | trainImg = fh.readInEbenImg() 118 | trainPts = fh.readInOneDude( '000_1_1.pts') 119 | return trainImg, ActiveShape( trainPts ) 120 | 121 | ### Matrix Manipulation 122 | @staticmethod 123 | def hinge( v, n ): 124 | # return lower and upper bounds of n x n region around float/int v 125 | # print v 126 | # print n 127 | lower = int(round(v) - math.floor( n / 2 )) 128 | upper = int(round(v) + math.floor( n / 2 )) 129 | 130 | return lower, upper 131 | 132 | @staticmethod 133 | def slice( mat, n, pt ): 134 | """ 135 | Slices matrix centered around point pt to have size nxn 136 | If area includes the edge of imaged area, 0 padded rows/cols 137 | are added 138 | 139 | 140 | lc uc 141 | lr -------------- 142 | | 143 | | 144 | | 145 | ur -------------- 146 | """ 147 | 148 | lc, uc = TemplateMatcher.hinge( pt.x, n ) ## column limits 149 | lr, ur = TemplateMatcher.hinge( pt.y, n ) 150 | nr, nc = np.shape( mat ) 151 | nr = nr - 1 152 | nc = nc - 1 153 | 154 | alc, auc = lc, uc 155 | alr, aur = lr, ur 156 | rpb, rpa, cpl, cpr = [0,0,0,0] 157 | 158 | if lc < 0: 159 | alc = 0 160 | cpl = -lc 161 | 162 | if uc > nc: 163 | auc = nc 164 | cpr = uc - auc 165 | 166 | if lr < 0: 167 | alr = 0 168 | rpb = -lr 169 | 170 | if ur > nr: 171 | aur = nr 172 | rpa = ur - aur 173 | 174 | return np.pad( mat[ alr : aur + 1 , alc : auc + 1 ], (( rpb, rpa ),( cpl, cpr )), mode ='constant' ) 175 | 176 | 177 | 178 | ## http://stackoverflow.com/questions/28995146/matlab-ind2sub-equivalent-in-python 179 | def ind2sub(array_shape, ind): 180 | rows = (int(ind) / array_shape[1]) 181 | cols = (int(ind) % array_shape[1]) # or numpy.mod(ind.astype('int'), array_shape[1]) 182 | return (rows, cols) 183 | 184 | 185 | def normCorr( self, template, image ): 186 | t = np.ravel( template - np.mean( template )) 187 | nt = math.sqrt( sum( t ** 2 ) ) 188 | i = np.ravel( image - np.mean( image )) 189 | ni = math.sqrt( sum( i ** 2 ) ) 190 | if ni == 0 or nt == 0: 191 | return 0 192 | th = np.divide( t, nt ) 193 | ih = np.divide( i, ni ) 194 | return sum ( th * ih ) 195 | 196 | @staticmethod 197 | def SSD( template, image ): 198 | t = np.ravel(template - np.mean( template )) 199 | i = np.ravel(image - np.mean( image )) 200 | return sum( ( t - i ) ** 2 ) 201 | 202 | @staticmethod 203 | def matIxs( n ): 204 | """ 205 | Given region of size n x n, returns all col, row indices for iteration 206 | 207 | x --> col 208 | y --> row 209 | """ 210 | rows, cols = np.indices( (n,n) ) 211 | row = rows.flatten() 212 | col = cols.flatten() 213 | 214 | return map( lambda x: Vector( x[0], x[1] ), zip( col, row ) ) 215 | 216 | @staticmethod 217 | def coordOffset( pt, n ): 218 | y = pt.y #row 219 | x = pt.x #col 220 | v = Vector.unit( [[ x - ( n - 1) / 2],[ y - ( n - 1 )/ 2 ] ]) 221 | # print type(v) 222 | #print "CoordOff: %f %f" % (v[0], v[1]) 223 | return v 224 | 225 | def corr( template, image ): 226 | t = np.ravel( template - np.mean( template )) 227 | i = np.ravel( image - np.mean( image )) 228 | return sum( t * i ) 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | -------------------------------------------------------------------------------- /MicroExpressionDetector/image_processing/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/image_processing/__init__.py -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/mains/DASMMain.py: -------------------------------------------------------------------------------- 1 | from DeformableASM import DASM, PASM 2 | from ParallelASM import Point, Shape 3 | from FaceDraw import FaceDraw 4 | 5 | from matplotlib import pyplot as plt 6 | 7 | import numpy as np 8 | import os 9 | import math 10 | import cv2 11 | import time 12 | from PIL import Image 13 | import copy 14 | 15 | def showImg( imgIn ): 16 | img = Image.fromarray(imgIn) 17 | img.show() 18 | 19 | def readInAlign( ): 20 | allLines = None 21 | with open( "outfile-ASM-100iters-500tr.txt", "r") as infile: 22 | allLines = infile.readlines() 23 | cleanLines = map( lambda x : x.strip().split(), allLines ) 24 | 25 | asm = PASM( [36,31],10 ) 26 | s = [] 27 | 28 | for tuple in cleanLines: 29 | if tuple[0] == '!!!': 30 | if s != []: 31 | asm.meanShape = Shape( s ) 32 | s = [] 33 | else: 34 | pass 35 | elif tuple[0] == '@@@': 36 | if s != [] : 37 | asm.addShape( Shape(s) ) 38 | s = [] 39 | else: 40 | pass 41 | else: 42 | s.append( Point( float(tuple[0]), float(tuple[1]) ) ) 43 | 44 | return asm 45 | 46 | def readInImage( ): 47 | 48 | img = cv2.imread( 'C:\\Users\\Valerie\\Desktop\\MicroExpress\\CASME2\\Cropped\\Cropped\\sub02\\EP01_11f\\reg_img46.jpg' ) 49 | img = cv2.imread( 'C:\\Users\\Valerie\Desktop\\MicroExpress\\CASME2\\CASME2_RAW\\CASME2-RAW\\sub01\\EP02_01f\\img1.jpg') 50 | 51 | img = cv2.imread( 'C:\\Users\\Valerie\\Downloads\\000_1_1.ppm') 52 | img_gray = cv2.cvtColor( img, cv2.COLOR_BGR2GRAY ) 53 | #faces = face_cascade.detectMultiScale( gray, 1.3, 5) 54 | #for (x,y,w,h) in faces: 55 | # cv2.rectangle( img, (x,y), (x+w,y+h), (255,0,0),2) 56 | # roi_gray = gray[y:y+h, x:x+w] 57 | # roi_color = img[y:y+h, x:x+w] 58 | 59 | return img_gray 60 | def areaRect( tup ): 61 | return tup[2] * tup[3] 62 | 63 | 64 | def findEyes( img_gray ): 65 | eye_cascade = cv2.CascadeClassifier('C:\\OpenCV\\data\\haarcascades\\haarcascade_eye.xml') 66 | eyes = eye_cascade.detectMultiScale(img_gray) 67 | im_toshow = copy.deepcopy(img_gray) 68 | eyeArr = [] 69 | for (ex,ey,ew,eh) in eyes: 70 | cv2.rectangle(im_toshow,(ex,ey),(ex+ew,ey+eh),(0,255,0),2) 71 | eyeArr.append( ( ex, ey, ew, eh ) ) 72 | 73 | # filter by area 74 | eyeAreas = map( areaRect, eyeArr ) 75 | ix = np.argmax( eyeAreas ) 76 | eye1 = eyeArr[ix] 77 | 78 | eyeAreas.pop( ix ) 79 | eyeArr.pop(ix ) 80 | 81 | ix = np.argmax( eyeAreas ) 82 | eye2 = eyeArr[ix] 83 | 84 | ex1, ey1, ew1, eh1 = eye1 85 | eye1loc = Point( ex1 + ew1/2,ey1+ eh1/2) 86 | 87 | ex2, ey2, ew2, eh2 = eye2 88 | eye2loc = Point( ex2 + ew2/2,ey2+ eh2/2) 89 | 90 | #cv2.circle( im_toshow, ( int(eye1loc.x), int(eye1loc.y) ), 1, (255,255,255) ) 91 | #cv2.circle( im_toshow, ( int(eye2loc.x), int(eye2loc.y) ), 1, (255,255,255) ) 92 | #showImg( im_toshow ) 93 | return eye1loc, eye2loc 94 | 95 | def calcShift( point, img_gray ): 96 | ## get point p (ix 48) 97 | p = point 98 | 99 | # Get gradient ( unitV already ) 100 | f, m = dasm.getGradient( p, img_gray ) 101 | # print "x %f, y %f" % ( p.x, p.y ) 102 | cX = p.x 103 | cY = p.y 104 | #print f, m 105 | maxM = 1 106 | cnt = 0 107 | while cX < np.shape( img_gray )[1] and cY < np.shape( img_gray )[0]: 108 | 109 | if cnt > 30: 110 | return pt.x + f[0] * float(m)/float(maxM), pt.y + f[1]* float(m)/float(maxM) 111 | cnt+= 1 112 | cX = cX + f[0] 113 | cY = cY + f[1] 114 | if cX < np.shape( img_gray )[1] and cY < np.shape( img_gray)[0]: 115 | _, cM = dasm.getGradient( Point( cX, cY ), img_gray ) 116 | if cM > maxM: 117 | maxM = cM 118 | # print "cX %f cY %f" % (cX, cY) 119 | return pt.x + f[0] * float(m)/float(maxM), pt.y + f[1]* float(m)/float(maxM) 120 | 121 | def drawFaces( dasm ): 122 | # Test model and appModel structures 123 | FaceDraw( dasm.model, plt).drawBold() 124 | FaceDraw( dasm.appModel, plt).drawBold() 125 | 126 | #### MAIN ########### 127 | asm = readInAlign() 128 | dasm = DASM( asm ) 129 | 130 | drawFaces( dasm ) 131 | 132 | ## Read in image 133 | img_gray = readInImage() 134 | 135 | ## Align model to image face 136 | eye1, eye2 = findEyes( img_gray ) 137 | dasm.alignEyes( eye1, eye2, img_gray ) 138 | 139 | plt.imshow( img_gray, cmap = 'gray' ) 140 | drawFaces( dasm ) 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | dmodel = copy.deepcopy( dasm.model ) 149 | i = 0 150 | while i < 100: 151 | # print i 152 | allpt = [] 153 | pts = [] 154 | 155 | for pt in dmodel.shapePoints: 156 | dx, dy = calcShift( pt, img_gray ) 157 | allpt.append( dx ) 158 | allpt.append( dy ) 159 | pts.append( [dx, dy] ) 160 | #print pts 161 | 162 | dmodel = Shape( pts ) 163 | FaceDraw( dasm.model, plt ).drawBold() 164 | FaceDraw( dmodel, plt ).drawContrast() 165 | plt.imshow( img_gray ) 166 | 167 | ### 168 | 169 | i+= 1 170 | 171 | plt.show() 172 | 173 | FaceDraw( dasm.model, plt ).drawBold() 174 | FaceDraw( dmodel, plt ).drawContrast() 175 | plt.imshow( img_gray ) 176 | plt.show() 177 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/mains/FaceMainPASM.py: -------------------------------------------------------------------------------- 1 | from ParallelASM import PASM, Point, Shape 2 | import os 3 | from matplotlib import pyplot as plt 4 | import seaborn as sn 5 | 6 | 7 | #########3 8 | 9 | 10 | def readIn( asm, files ): 11 | m = 0 12 | for f in files: 13 | if m > 499: 14 | return 15 | with open( os.path.join(folder,f), "r" ) as infile: 16 | ptList = [ ] 17 | allLines = infile.readlines() 18 | pointLine = False 19 | cleanLines = [ x.strip() for x in allLines] 20 | for line in cleanLines: 21 | if line is '{': 22 | pointLine = True 23 | 24 | elif line is '}': 25 | pointLine = False 26 | pass 27 | elif pointLine: 28 | ptList.append( map( float, line.split(' ') ) ) 29 | else: 30 | pass 31 | 32 | asm.addShape( Shape( ptList ) ) 33 | m += 1 34 | print m 35 | if __name__ == "__main__": 36 | 37 | DIR = "C:\\Users\\Valerie\\Desktop\\MicroExpress\\facePoints" 38 | SUBDIR = "session_1" 39 | folder = os.path.join( DIR, SUBDIR ) 40 | files = next(os.walk(folder))[2] 41 | asm = PASM( [36,31], 100) 42 | readIn( asm, files) 43 | asm.iterateAlignment() 44 | with open( 'outfile-ASM-100iters-500tr.txt', "w") as output: 45 | output.write( str(asm.meanShape) ) 46 | output.write( "!!!\n" ) 47 | for shape in asm.allShapes: 48 | output.write( str(shape) ) 49 | output.write( "@@@\n" ) 50 | output.write( "!!!\n" ) 51 | 52 | 53 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/mains/FaceMainPCA.py: -------------------------------------------------------------------------------- 1 | from ParallelASM import PASM, Shape, Point 2 | import numpy as np 3 | from matplotlib import pyplot as plt 4 | import math 5 | import seaborn as sns 6 | import random 7 | from FaceDraw import FaceDraw 8 | 9 | def PCA( asm ): 10 | asm.calcMeanShape() 11 | map( lambda x : x.calcDiff( asm.meanShape ), asm.allShapes ) 12 | cov = map( lambda x : x.calcSingleCov(), asm.allShapes ) 13 | S = sum( cov ) 14 | 15 | vals, vecs = np.linalg.eig( S ) 16 | vecs = np.array( vecs ) 17 | return asm, vals, vecs 18 | 19 | 20 | def main(): 21 | asm = readIn() 22 | asm, vals, vecs = PCA( asm ) 23 | # Variance explained 24 | with open('faces-results.txt', 'w') as outfile: 25 | for i in range( 20 ): 26 | outfile.write("%d: %f, %f \n" % ( i, vals[i] / sum( vals ), sum( vals[:i+1] ) / sum(vals ) ) ) 27 | 28 | ## Reproject multiple PCS and vary evals 29 | for i in range( 10 ): 30 | showVaryMultiplePCS( asm, vals, vecs, 5, i+1 ) 31 | showVary( asm, vals, vecs, 5, i, 0.25) 32 | exampleEvalEvecs(asm, vecs, vals, i ) 33 | 34 | ### Draw all faces 35 | for sh in asm.allShapes : 36 | FaceDraw( sh, plt ).drawContrast() 37 | plt.savefig( 'faces-all.png') 38 | plt.close() 39 | 40 | 41 | 42 | 43 | 44 | def reravel( vect ): 45 | x, y = [], [] 46 | vect = np.ravel( vect ) 47 | for i in range( len( vect ) ): 48 | if i % 2 == 0: 49 | x.append( vect[i] ) 50 | else: 51 | y.append( vect[i] ) 52 | return x, y 53 | 54 | 55 | def showVaryMultiplePCS( asm, vals, vecs, numPlots, numPCs ): 56 | f, axes = plt.subplots( 1, numPlots ) 57 | bs = [] 58 | for p in range( numPCs ): 59 | rs = np.linspace( - 0.25 * math.sqrt( vals[p] ), 0.25 * math.sqrt( vals[p] ), numPlots ) 60 | bs.append( rs ) 61 | P = vecs[:, 0:numPCs] 62 | for pl in range(numPlots) : 63 | b = [ bs[p][pl] for p in range(len(bs) ) ] 64 | X = np.add( asm.meanShape.allPts, np.dot( P, b ) ) 65 | x, y = reravel( X ) 66 | s = Shape( [ Point (pt[0], pt[1] ) for pt in zip(x,y) ]) 67 | FaceDraw( s, axes[pl] ).drawBold() 68 | axes[pl].set_xlim( (-2,2) ) 69 | axes[pl].set_ylim( (-3,3) ) 70 | 71 | 72 | f.savefig('faces-%d-PCs-at-once.png' % numPCs ) 73 | plt.close() 74 | 75 | 76 | def projectOnePC( evIx, b, asm, vals, vecs ): 77 | X = np.add( asm.meanShape.allPts, np.multiply( vecs[:,evIx], b ) ) 78 | x,y = reravel( X ) 79 | sv = [] 80 | for a, b in zip( x, y): 81 | sv.append( Point( a, b ) ) 82 | s = Shape( sv ) 83 | return s 84 | 85 | 86 | 87 | def showVary( asm, vals, vecs, numPlots, eval, lim ): 88 | # Vary one eigenvalue 89 | f, axes = plt.subplots( 1, numPlots, sharex = True, sharey = True ) 90 | 91 | vals = np.ravel( vals ) 92 | rs = np.linspace( -lim * math.sqrt( vals[eval] ), lim * math.sqrt( vals[eval] ), numPlots ) 93 | 94 | for m in range( len(rs) ): 95 | s = projectOnePC( eval, rs[m], asm, vals, vecs ) 96 | FaceDraw( s, axes[m] ).drawBold() 97 | axes[m].set_xlim( (-2,2) ) 98 | axes[m].set_ylim( (-3,3) ) 99 | plotEigenvectors( asm, vecs, vals, eval, axes[m], lim ) 100 | FaceDraw( asm.meanShape, axes[m] ).drawContrast() 101 | f.savefig('faces-PC-%d.png' % eval ) 102 | plt.close() 103 | 104 | def plotEigenvectors( asm, vecs,vals, evIx, axes, lim ): 105 | for ptIx in np.multiply( 2, range(asm.n) ): 106 | axes.plot( [ asm.meanShape.allPts[ptIx], asm.meanShape.allPts[ptIx] + lim * math.sqrt(vals[evIx] )* vecs[ptIx][evIx] ] , 107 | [ asm.meanShape.allPts[ptIx+1], asm.meanShape.allPts[ptIx+1] + lim * math.sqrt( vals[evIx] )* vecs[ptIx + 1][evIx] ], 108 | c = '#A0A0A0', 109 | lw = 0.5) 110 | axes.plot( [ asm.meanShape.allPts[ptIx], asm.meanShape.allPts[ptIx] - lim * math.sqrt(vals[evIx] )* vecs[ptIx][evIx] ] , 111 | [ asm.meanShape.allPts[ptIx+1], asm.meanShape.allPts[ptIx+1] - lim * math.sqrt( vals[evIx] )* vecs[ptIx + 1][evIx] ], 112 | c = '#A0A0A0', 113 | lw = 0.5) 114 | 115 | 116 | 117 | def reProjectCumulative( asm, numVals, vals, vecs ): 118 | P = vecs[:, 0:numVals] 119 | b = [ np.transpose(np.mat( vals[ 0: numVals ] )) ] 120 | 121 | X = np.add( asm.meanShape.allPts, np.dot( P, b ) ) 122 | x,y = reravel( X ) 123 | return Shape( [ Point (pt[0], pt[1] ) for pt in zip(x,y) ]) 124 | 125 | 126 | 127 | 128 | 129 | def exampleEvalEvecs( asm, vecs, vals, evIx ): 130 | vecs = np.array( vecs ) 131 | newPts = np.add( asm.meanShape.allPts, np.multiply(math.sqrt( vals[evIx] ), vecs[:,evIx] ) ) 132 | print np.shape( newPts) 133 | #x, y = reravel( np.multiply( vals[0], vecs[:,0] ) ) 134 | newx, newy = reravel( newPts ) 135 | print np.shape( newx) 136 | ## Mean Shape 137 | FaceDraw( asm.meanShape, plt ).drawBold() 138 | ## Eigenvectors 139 | plt.plot( [asm.meanShape.xs, np.add( asm.meanShape.xs, newx ) ], 140 | [ asm.meanShape.ys, np.add( asm.meanShape.ys, newy ) ], c ='#A0A0A0', lw = 1 ) 141 | plt.xlim( (-6,6) ) 142 | plt.ylim( (-6,6)) 143 | ## New Shape 144 | #FaceDraw( ( newx, newy), plt ).draw() 145 | plt.savefig( 'faces-eigenvectors-%d.png' % evIx ) 146 | plt.close() 147 | 148 | 149 | def exampleVariance( vecs, vals ): 150 | for e in range(10): 151 | showVary( asm, vals, vecs, 5, e, 0.5 ) 152 | 153 | """ 154 | def readIn( ): 155 | allLines = None 156 | with open( "outfile-ASM-100iters-500tr.txt", "r") as infile: 157 | allLines = infile.readlines() 158 | cleanLines = map( lambda x : x.strip().split(), allLines ) 159 | 160 | asm = PASM( [36,31],10 ) 161 | s = [] 162 | 163 | for tuple in cleanLines: 164 | if tuple[0] == '!!!': 165 | if s != []: 166 | asm.meanShape = Shape( s ) 167 | s = [] 168 | else: 169 | pass 170 | elif tuple[0] == '@@@': 171 | if s != [] : 172 | asm.addShape( Shape(s) ) 173 | s = [] 174 | else: 175 | pass 176 | else: 177 | s.append( Point( float(tuple[0]), float(tuple[1]) ) ) 178 | return asm 179 | """ 180 | if __name__ == "__main__": 181 | main() -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/mains/applyASMMain.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import os 3 | 4 | DIR = "C:\\Users\\Valerie\\Desktop\\MicroExpress\\facePoints" 5 | SUBDIR = "session_1" 6 | 7 | 8 | # read in an image 9 | img = cv2.imread( os.path.join( DIR, SUBDIR2 ) ) 10 | 11 | 12 | 13 | 14 | # find eyes 15 | 16 | # translate, scale, rotate ASM to match eyes 17 | # use mean Shape and PCA 18 | 19 | 20 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/models/DeformableASM.py: -------------------------------------------------------------------------------- 1 | from ParallelASM import PASM, Shape, Point 2 | import numpy as np 3 | from matplotlib import pyplot as plt 4 | from matplotlib import pylab as plb 5 | 6 | import copy 7 | import matplotlib.cm as cm 8 | from FaceDraw import FaceDraw 9 | import math 10 | 11 | class DASM( PASM): 12 | def __init__( self, pasm ): 13 | self.pasm = pasm 14 | self.PCA() 15 | 16 | self.__dict__.update( pasm.__dict__ ) 17 | pasm.normShape( self.meanShape ) 18 | 19 | self.model = self.meanShape 20 | self.modelParams = { 'rot' : [[1,0],[0,1]] , 't' : [[0],[0]] } 21 | 22 | @property 23 | def appModel( self ): 24 | shape = copy.deepcopy( self.model ) 25 | shape = shape.transform( self.modelParams ) 26 | return shape 27 | 28 | 29 | def alignAllShapes(self): 30 | self.calcWs() 31 | return super(DASM, self).alignAllShapes() 32 | 33 | def calcWs( self ): 34 | self.w = np.ones( self.n ) 35 | #self.w = [ 1 for j in s] 36 | self.W = np.diag(self.w) 37 | ##print "calcWs: %f" % (time.time() - start) 38 | 39 | def pointGradient( self ): 40 | fx = [ -1, 0,1 ] 41 | fy = np.transpose(fx) 42 | 43 | 44 | def PCA( self ): 45 | self.pasm.calcMeanShape() 46 | map( lambda x : x.calcDiff( self.pasm.meanShape ), self.pasm.allShapes ) 47 | cov = map( lambda x : x.calcSingleCov(), self.pasm.allShapes ) 48 | S = sum( cov ) 49 | 50 | self.eVals, vecs = np.linalg.eig( S ) 51 | self.eVecs = np.array( vecs ) 52 | 53 | def alignEyes( self, eye1, eye2, img ): 54 | # distance between eyes: 55 | d = eye1.dist( eye2 ) 56 | self.meanShape.scale( d ) 57 | 58 | rightEyeIx = 36 59 | leftEyeIx = 31 60 | 61 | 62 | 63 | ## Rotation 64 | xDiff = self.meanShape.shapePoints[rightEyeIx].x - self.meanShape.shapePoints[leftEyeIx].x 65 | yDiff = self.meanShape.shapePoints[rightEyeIx].y - self.meanShape.shapePoints[leftEyeIx].y 66 | 67 | p0 = [ xDiff, yDiff ] #self.meanShape.shapePoints[0].x, self.meanShape.shapePoints[0].y ] 68 | axisVector = [ 1, 0] 69 | thetaP = PASM.angleV( p0, axisVector ) 70 | thetaRot = thetaP 71 | 72 | rot = [[ math.cos( thetaRot ), -math.sin( thetaRot ) ], 73 | [ math.sin( thetaRot ), math.cos( thetaRot ) ] ] 74 | 75 | self.meanShape.rotate( rot ) 76 | 77 | t = [ [eye2.x - self.meanShape.shapePoints[ rightEyeIx].x ], 78 | [eye2.y - self.meanShape.shapePoints[rightEyeIx].y ] ] 79 | 80 | 81 | self.meanShape.translate( t ) 82 | 83 | self.transDict = {} #DANGER!!!!!!! 84 | self.transDict.update( {'rot' : rot , 't' : t }) 85 | 86 | 87 | 88 | @staticmethod 89 | def getGradient( pt, img ): 90 | if pt.x is np.nan or pt.y is np.nan: 91 | return 92 | ## Check bounds 93 | h, w = np.shape( img ) 94 | if pt.y > h or pt.y > h-1 : 95 | pt.setY( h - 2) 96 | if pt.x > w or pt.x > w-1 : 97 | pt.setX( w - 2) 98 | 99 | delF = [ (img[ pt.y, pt.x + 1] - img[pt.y, pt.x - 1 ] )/2, 100 | (img[ pt.y + 1 , pt.x] - img[pt.y - 1, pt.x ] )/2 ] 101 | #print delF 102 | mag = math.sqrt( delF[0] ** 2 + delF[1] ** 2 ) 103 | #dir = PASM.angleV( delF, [ 0, 0] ) 104 | unitF = PASM.unitV( delF ) 105 | 106 | return unitF, mag 107 | 108 | def adjust( self, img ): 109 | self.dX = np.ravel( map( lambda x : DASM.getGradient( x, img ), self.meanShape.shapePoints ) ) 110 | nX = np.add( self.meanShape.allPts, self.dX ) 111 | 112 | x,y = DASM.deravel( nX ) 113 | ms = Shape( [ Point (pt[0], pt[1] ) for pt in zip(x,y) ]) 114 | self.transDict = self.alignOneShape( ms, self.appModel ) 115 | 116 | #nX = np.add( dasm.meanShape.allPts, np.dot( dasm.eVals, dasm.db ) ) 117 | #x,y = DASM.deravel( nX ) 118 | #ms = Shape( [ Point (pt[0], pt[1] ) for pt in zip(x,y) ]) 119 | 120 | #self.db = np.dot( np.transpose( self.eVecs ), np.ravel( self.dX ) ) 121 | 122 | @staticmethod 123 | def deravel( vect ): 124 | x, y = [], [] 125 | vect = np.ravel( vect ) 126 | for i in range( len( vect ) ): 127 | if i % 2 == 0: 128 | x.append( vect[i] ) 129 | else: 130 | y.append( vect[i] ) 131 | return x, y 132 | 133 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/other/ActiveShapeModelsBetter.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | import time 5 | import functools 6 | from pathos.multiprocessing import ProcessingPool as Pool 7 | from ActiveShapeModels import Point, Shape 8 | 9 | 10 | class ASMB( object ): 11 | def __init__( self, refIndices, numIterations ): 12 | self.allShapes = [] 13 | self.n = 0 14 | self.refIxs = refIndices 15 | self.nIters = numIterations 16 | @property 17 | def I( self ): 18 | return len( self.allShapes ) 19 | 20 | def addShape( self, s ): 21 | 22 | if len( self.allShapes ) == 0: 23 | self.allShapes.append( s ) 24 | self.n = s.n 25 | else: 26 | assert( s.n == self.n ) 27 | self.allShapes.append( s ) 28 | 29 | @property 30 | def V( self ): 31 | """ 32 | Variance in distance matrix amongst all shapes 33 | """ 34 | V = [] 35 | for k in range(self.n): 36 | row = [] 37 | for l in range(self.n): 38 | col = [] 39 | for i in range(len( self.allShapes )): 40 | col.append( self.allShapes[i].R[k][l]) 41 | row.append( np.var(col) ) 42 | V.append(row) 43 | return V 44 | 45 | @property 46 | def W( self ): 47 | """ 48 | Point weights as diagonal matrix 49 | """ 50 | 51 | return np.diag(self.w) 52 | 53 | @property 54 | def w( self ): 55 | """ 56 | Point weights as array 57 | """ 58 | s = map( sum, self.V) 59 | return [ math.pow( j, -1) for j in s] 60 | 61 | 62 | def Zgen( self, shape ): 63 | return sum( [ self.w[k] * ( shape.xs[k] **2 + 64 | shape.xs[k] **2 ) 65 | for k in range( self.n ) ] ) 66 | def Xgen( self, shape ): 67 | return sum( [ self.w[k] * shape.xs[k] 68 | for k in range( self.n ) ] ) 69 | 70 | def Ygen( self, shape ): 71 | return sum( [ self.w[k] * shape.ys[k] 72 | for k in range( self.n ) ] ) 73 | 74 | def Wgen( self ): 75 | return sum( [ self.w[k] for k in range( self.n ) ] ) 76 | 77 | def C1gen( self, shape1, shape2): 78 | return sum( [ self.w[k] * 79 | ( shape1.xs[k] * shape2.xs[k] + 80 | shape1.ys[k] * shape2.ys[k] ) 81 | for k in range( self.n) ] ) 82 | 83 | def C2gen( self, shape1, shape2): 84 | return sum( [ self.w[k] * 85 | ( shape1.ys[k] * shape2.xs[k] + 86 | shape1.xs[k] * shape2.ys[k] ) 87 | for k in range( self.n) ] ) 88 | 89 | 90 | 91 | 92 | def calcMeanShape( self ): 93 | xList = [ el.xs for el in self.allShapes ] 94 | yList = [ el.ys for el in self.allShapes ] 95 | meanPointsList = zip( np.mean(xList, 0), np.mean(yList, 0) ) 96 | self.meanShape = Shape( meanPointsList ) 97 | 98 | 99 | def iterateAlignment( self ): 100 | 101 | # Setup drawing 102 | 103 | #colors = ["purple", "light purple", 104 | # "blue", "cyan", "neon blue"] 105 | #"red", "rose", 106 | #"green", "bright green", "mint"] 107 | # roygbv 108 | co = ['lightish red', 'yellowish orange', 'canary yellow', 'lime', 'cyan']#,'lavender] 109 | pal = sns.xkcd_palette( co ) 110 | 111 | 112 | for i in range( self.nIters ): 113 | f, (ax1,ax2) = plt.subplots(1,2)#, sharex= True, sharey=True) 114 | ## Calculate mean shape 115 | self.calcMeanShape( ) 116 | ax1.plot( self.meanShape.xs, self.meanShape.ys, 'k' ) 117 | 118 | ## Normalize mean shape 119 | self.normMeanShape( ) 120 | 121 | for sh in self.allShapes: 122 | sh.draw( pal, ax1) 123 | 124 | 125 | 126 | ## Realign 127 | self.alignAllShapes( ) 128 | for sh in self.allShapes: 129 | sh.draw( pal, ax2 ) 130 | ax2.plot( self.meanShape.xs, self.meanShape.ys, 'k' ) 131 | 132 | # Draw change 133 | self.calcMeanShape() 134 | 135 | 136 | f.savefig( "C:/Users/Valerie/Desktop/stars/plots5/%d.png" % i ) 137 | f.clear() 138 | plt.close() 139 | i += 1 140 | # Show 141 | # f.show() 142 | 143 | # def isConverged( self ): 144 | def alignAllShapes( self ): 145 | import pathos.multiprocessing as mp 146 | start = time.time() 147 | pool = Pool() 148 | self.allShapes = pool.map( self.alignOneShape, self.allShapes ) 149 | # for sh in self.allShapes: 150 | # self.alignOneShape( sh ) 151 | print 'alignAllShapes: %f' % (time.time() - start ) 152 | return 153 | 154 | 155 | def alignOneShape( self, shape ): 156 | start = time.time() 157 | transDict = self.calcAlignTrans( shape ) 158 | shape.applyTrans( transDict ) 159 | return shape 160 | 161 | @staticmethod 162 | def centroid( shape1 ): 163 | return Point( np.mean( shape1.xs ) , np.mean( shape1.ys ) ) 164 | 165 | @staticmethod 166 | def unitV( v ): 167 | return v / np.linalg.norm( v ) 168 | 169 | @staticmethod 170 | def angleV( v1, v2 ): 171 | return math.atan2( v1[0], v1[1] ) - math.atan2( v2[0], v2[1] ) 172 | 173 | def normMeanShape( self ): 174 | ############## Calc transformations ################### 175 | ## Translate 176 | cmShape = ASMB.centroid( self.meanShape ) 177 | t = [[ -cmShape.x ], [ -cmShape.y ]] 178 | 179 | self.meanShape.translate( t ) 180 | self.meanShape.update( ) 181 | 182 | leftEyeIx = self.refIxs[0] 183 | rightEyeIx = self.refIxs[1] 184 | 185 | ## Scale 186 | # distance between two "eyes" 187 | d = self.meanShape.shapePoints[leftEyeIx].dist( self.meanShape.shapePoints[rightEyeIx] ) 188 | s = float(1)/float(d) 189 | 190 | ## Rotation 191 | xDiff = self.meanShape.shapePoints[rightEyeIx].x - self.meanShape.shapePoints[leftEyeIx].x 192 | yDiff = self.meanShape.shapePoints[rightEyeIx].y - self.meanShape.shapePoints[leftEyeIx].y 193 | 194 | p0 = [ xDiff, yDiff ] #self.meanShape.shapePoints[0].x, self.meanShape.shapePoints[0].y ] 195 | axisVector = [ 1, 0] 196 | thetaP = ASMB.angleV( p0, axisVector ) 197 | thetaRot = thetaP 198 | 199 | rot = [[ math.cos( thetaRot ), -math.sin( thetaRot ) ], 200 | [ math.sin( thetaRot ), math.cos( thetaRot ) ] ] 201 | 202 | self.meanShape.rotate( rot ) 203 | self.meanShape.update( ) 204 | 205 | self.meanShape.scale( s ) 206 | self.meanShape.update( ) 207 | return 208 | 209 | def calcAlignTrans( self, shape ): 210 | 211 | start = time.time() 212 | coeffs = np.array( [ 213 | [ self.Xgen( shape ), - self.Ygen( shape ), self.Wgen(), 0], 214 | [ self.Ygen( shape ), self.Xgen( shape ), 0, self.Wgen()], 215 | [ self.Zgen( shape ), 0, self.Xgen( shape ), self.Ygen( shape )], 216 | [ 0, self.Zgen( shape ), - self.Ygen( shape ), self.Xgen( shape )] 217 | ]) 218 | eqs = np.array([ self.Xgen(self.meanShape) , self.Ygen(self.meanShape), self.C1gen(self.meanShape, shape), self.C2gen(self.meanShape, shape) ] ) 219 | sol = np.linalg.solve( coeffs, eqs ) 220 | # d = ax = s cos 0 221 | # e = ay = s sin 0 222 | # f = tx 223 | # g = ty 224 | 225 | rot = [[ sol[0], - sol[1]], 226 | [ sol[1], sol[0]] ] 227 | t = [[ sol[2]],[sol[3]]] 228 | 229 | return { 'rot': rot, 't':t} 230 | 231 | def drawAll( self, axis, palette ): 232 | i = 0 233 | for el in self.allShapes: 234 | el.draw( palette, i, axis) 235 | i += 1 236 | axis.plot( self.meanShape.xs, self.meanShape.ys, c = 'k' ) 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | #### SHAPE ##### 245 | 246 | 247 | 248 | 249 | ### POINTS ### 250 | 251 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/other/ParallelMain.py: -------------------------------------------------------------------------------- 1 | from ParallelASM import PASM, Point, Shape 2 | import os 3 | from matplotlib import pyplot as plt 4 | import seaborn as sn 5 | 6 | DIR = "C:\\Users\\Valerie\\Desktop\\stars" 7 | OUTPUT = os.path.join( DIR, "output20") 8 | 9 | files = next(os.walk(OUTPUT))[2] 10 | 11 | def run(): 12 | 13 | asm = PASM([0,1], 1000 ) 14 | allLines = [] 15 | pts = [] 16 | for f in files: 17 | with open( os.path.join( OUTPUT, f), "r" ) as infile: 18 | 19 | allLines = infile.readlines() 20 | if len(allLines) > 0: 21 | cleanLines = [ x.strip().split('\t') for x in allLines] 22 | ptList = [ Point( x[0], x[1]) for x in cleanLines ] 23 | print len( ptList ) 24 | asm.addShape( Shape( ptList ) ) 25 | 26 | 27 | 28 | asm.iterateAlignment() 29 | 30 | 31 | if __name__ == "__main__": 32 | run() 33 | 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/other/SimpleASM.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from pathos.multiprocessing import ProcessingPool as PPool 3 | import functools 4 | 5 | class SimpleASM ( object ): 6 | def __init__( self, listOfShapes ): 7 | self.shapeList = listOfShapes 8 | 9 | def scale( self, scaling ): 10 | return PPool().map( lambda line: SimpleShape( PPool().map( lambda x : x.scale( scaling ) , line)), map( lambda y : y.pointList, self.shapeList ) ) 11 | 12 | def rotate( self, rotation ): 13 | return map( lambda line: SimpleShape( PPool().map( lambda x : x.scale( rotation ) , line)), PPool().map( lambda y : y.pointList, self.shapeList ) ) 14 | 15 | def translate( self, translation ): 16 | return map( lambda line: SimpleShape( PPool().map( lambda x : x.scale( translation ) , line)), PPool().map( lambda y : y.pointList, self.shapeList ) ) 17 | 18 | class SimpleShape ( object ): 19 | def __init__( self, listOfPoints ): 20 | self.pointList = listOfPoints 21 | 22 | #def scale( self, scaling ): 23 | #result = Pool().amap( lambda x: 24 | #result = p.amap( lambda x : x.scale( scaling ), self.pointList ).get() 25 | 26 | 27 | class SimplePoint( object ): 28 | def __init__( self, tupleOfCoords ): 29 | self.coords = tupleOfCoords 30 | 31 | @property 32 | def x( self ): 33 | return self.coords[0] 34 | 35 | 36 | @property 37 | def y( self ): 38 | return self.coords[1] 39 | 40 | 41 | def scale( self, scaling ): 42 | x,y = np.multiply( scaling, [self.x, self.y] ) 43 | print "done" 44 | return SimplePoint( (x,y)) 45 | 46 | 47 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/other/main.py: -------------------------------------------------------------------------------- 1 | from ActiveShapeModelsBetter import Point, Shape, ASMB 2 | import os 3 | from matplotlib import pyplot as plt 4 | import seaborn as sn 5 | 6 | DIR = "C:\\Users\\Valerie\\Desktop\\stars" 7 | OUTPUT = os.path.join( DIR, "output5") 8 | 9 | def run(): 10 | 11 | files = next(os.walk(OUTPUT))[2] 12 | asm = ASMB([0,1],100) 13 | allLines = [] 14 | pts = [] 15 | for f in files: 16 | with open( os.path.join( OUTPUT, f), "r" ) as infile: 17 | 18 | allLines = infile.readlines() 19 | if len(allLines) > 0: 20 | cleanLines = [ x.strip().split('\t') for x in allLines] 21 | ptList = [ Point( x[0], x[1]) for x in cleanLines ] 22 | asm.addShape( Shape( ptList ) ) 23 | asm.iterateAlignment() 24 | 25 | 26 | if __name__ == "__main__": 27 | run() 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | ''' 40 | f, (ax1, ax2) = plt.subplots( 1,2, sharey=True, sharex = True) 41 | cp =sn.color_palette( "BrBG", 10 ) 42 | for i in range(asm.I): 43 | sh = asm.allShapes[i] 44 | _ = ax1.scatter( sh.xs, sh.ys, c = cp[i]) 45 | 46 | asm.alignShapes() 47 | for i in range(asm.I): 48 | sh = asm.allShapes[i] 49 | print map( str, sh.shapePoints ) 50 | _ = ax2.scatter( sh.xs, sh.ys, c = cp[i]) 51 | plt.show() 52 | ''' 53 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/other/shapeNormTest1.py: -------------------------------------------------------------------------------- 1 | from ActiveShapeModels import ASM, Point, Shape 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | import math 5 | import numpy as np 6 | import os 7 | def drawScaleSegment( axis, shape ): 8 | axis.plot( [shape.shapePoints[0].x, shape.shapePoints[1].x], # plot p0 to p1 segment 9 | [shape.shapePoints[0].y, shape.shapePoints[1].y], 10 | color= 'r', ls = '-') 11 | 12 | def drawLineUP( axis, shape, cm ): 13 | axis.plot( [ cm.x, shape.shapePoints[0].x], 14 | [ cm.y, shape.shapePoints[0].y], color='b') 15 | 16 | def drawShape( axis, shape ): 17 | shape.draw( sns.xkcd_palette( ["light blue"] ), 0, axis) 18 | 19 | def drawCentroid( axis, cm ): 20 | axis.scatter( cm.x, cm.y, c='r') 21 | 22 | def plotAll( axis, shape ): 23 | cm = ASM.centroid( shape ) 24 | drawShape( axis, shape) 25 | drawCentroid( axis, cm ) 26 | drawLineUP( axis, shape, cm ) 27 | drawScaleSegment( axis, shape ) 28 | 29 | DIR = "C:\\Users\\Valerie\\Desktop\\stars" 30 | OUTPUT = os.path.join( DIR, "output20") 31 | ## 20 point shape 32 | #s1 = Shape( [ Point(857, -129), Point(89,-409), Point(-404,254), Point( 96,957), Point(877,712) ]) 33 | files = next(os.walk(OUTPUT))[2] 34 | f = files[0] 35 | with open( os.path.join( OUTPUT, f), "r" ) as infile: 36 | 37 | allLines = infile.readlines() 38 | if len(allLines) > 0: 39 | cleanLines = [ x.strip().split('\t') for x in allLines] 40 | ptList = [ Point( x[0], x[1]) for x in cleanLines ] 41 | s1 = Shape( ptList ) 42 | 43 | f, ((ax1,ax2),(ax3,ax4)) = plt.subplots(2,2) 44 | 45 | ## Original shape (input) 46 | cmShape = ASM.centroid( s1 ) 47 | plotAll( ax1, s1 ) 48 | 49 | ############## Calc transformations ################### 50 | ## Translate 51 | t = [[ -cmShape.x ], [ -cmShape.y ]] 52 | for pt in s1.shapePoints: 53 | pt.translate( t ) 54 | s1.update() 55 | plotAll( ax2, s1) 56 | 57 | 58 | 59 | ## Scale 60 | d1 = s1.shapePoints[0].dist( s1.shapePoints[1] ) 61 | s = float(1)/float(d1) 62 | 63 | ## Rotation 64 | p0 = [ s1.shapePoints[0].x, s1.shapePoints[0].y ] 65 | axisVector = [ 0, 1] 66 | thetaP = ASM.angleV( p0, axisVector ) 67 | thetaRot = 2*math.pi - thetaP 68 | rot = [[ math.cos( thetaRot ), -math.sin( thetaRot ) ], 69 | [ math.sin( thetaRot ), math.cos( thetaRot ) ] ] 70 | 71 | 72 | 73 | 74 | 75 | 76 | ## What order to calculate and apply the transformations? 77 | 78 | 79 | 80 | 81 | for pt in s1.shapePoints: 82 | pt.rotate( rot ) 83 | s1.update() 84 | 85 | plotAll( ax3, s1 ) 86 | 87 | for pt in s1.shapePoints: 88 | pt.scale( s ) 89 | s1.update() 90 | plotAll( ax4, s1) 91 | 92 | 93 | plt.show() 94 | 95 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/other/shapeNormalizationTest.py: -------------------------------------------------------------------------------- 1 | from ActiveShapeModels import ASM, Point, Shape 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | import math 5 | import numpy as np 6 | 7 | s1 = Shape( [ Point(200,300), Point(100, 200), Point(300, 50 ) ] ) 8 | s2 = Shape( [ Point(150,250), Point(50, 100 ), Point(250, 0) ] ) 9 | 10 | 11 | 12 | f, ((ax1,ax2),(ax3,ax4)) = plt.subplots(2,2, sharex =True, sharey = True) 13 | s1.draw( sns.xkcd_palette( ["light blue" ]), ax1) 14 | s2.draw( sns.xkcd_palette( ["light blue"] ), ax2) 15 | 16 | 17 | cmShape = ASM.centroid( s1) 18 | 19 | cmMeanShape = ASM.centroid( s2 ) 20 | 21 | 22 | ax1.scatter( cmShape.x, cmShape.y, c='r') 23 | ax2.scatter( cmMeanShape.x, cmMeanShape.y, c='r') 24 | ax1.plot( [s1.shapePoints[0].x, s1.shapePoints[1].x], 25 | [s1.shapePoints[0].y, s1.shapePoints[1].y], 26 | color= 'r', ls = '-') 27 | 28 | ax2.plot( [s2.shapePoints[0].x, s2.shapePoints[1].x], 29 | [s2.shapePoints[0].y, s2.shapePoints[1].y], 30 | color= 'r', lw = 1, ls = '-') 31 | 32 | 33 | t = [[ cmShape.x - cmMeanShape.x ], [cmShape.y - cmMeanShape.y ]] 34 | 35 | ## Scale 36 | d1 = s1.shapePoints[0].dist( s1.shapePoints[1] ) 37 | d2 = s2.shapePoints[0].dist( s2.shapePoints[1] ) 38 | s = d1/d2 39 | 40 | ## Rotation 41 | #http://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python 42 | p0 = [ s1.shapePoints[0].x, s1.shapePoints[0].y ] 43 | m0 = [ s2.shapePoints[0].x, s2.shapePoints[0].y ] 44 | axisVector = [ 1, 0 ] 45 | 46 | thetaP = ASM.angleV( p0, axisVector ) 47 | thetaM = ASM.angleV( m0, axisVector ) 48 | 49 | thetaRot = thetaP - thetaM 50 | 51 | rot = [[ math.cos( thetaRot ), -math.sin( thetaRot ) ], 52 | [ math.sin( thetaRot ), math.cos( thetaRot ) ] ] 53 | 54 | d= { 'rot' : np.multiply( s , rot) , 't': t} 55 | s2.applyTrans( d ) 56 | s2.draw( sns.xkcd_palette( ["light blue"] ), ax4) 57 | 58 | cmMeanShape1 = ASM.centroid( s2 ) 59 | ax4.scatter( cmMeanShape1.x, cmMeanShape1.y, c='r') 60 | ax4.plot( [s2.shapePoints[0].x, s2.shapePoints[1].x], 61 | [s2.shapePoints[0].y, s2.shapePoints[1].y], 62 | color= 'r', lw = 1, ls = '-') 63 | 64 | 65 | 66 | ### Checks 67 | print d1 68 | print d2 69 | print s2.shapePoints[0].dist( s2.shapePoints[1] ) #should be == d1 70 | 71 | print thetaP 72 | print thetaM 73 | print ASM.angleV([ s2.shapePoints[0].x, s2.shapePoints[0].y ], axisVector ) 74 | 75 | plt.show() 76 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/other/shapeNormalizationTestToSelf.py: -------------------------------------------------------------------------------- 1 | from ActiveShapeModels import ASM, Point, Shape 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | import math 5 | import numpy as np 6 | 7 | #s1 = Shape( [ Point(200,300), Point(100, 200), Point(300, 50 ) ] ) 8 | #s2 = Shape( [ Point(150,250), Point(50, 100 ), Point(250, 0) ] ) 9 | 10 | 11 | 12 | s1 = Shape( [ Point(857, -129), Point(89,-409), Point(-404,254), Point( 96,957), Point(877,712) ]) 13 | 14 | f, ((ax1,ax2),(ax3,ax4)) = plt.subplots(2,2) 15 | 16 | s1.draw( sns.xkcd_palette( ["light blue" ]), 0, ax1) 17 | #s2.draw( sns.xkcd_palette( ["light blue"] ), ax2) 18 | 19 | cmShape = ASM.centroid( s1 ) 20 | #cmMeanShape = ASM.centroid( s2 ) 21 | 22 | 23 | ax1.scatter( cmShape.x, cmShape.y, c='r') 24 | #ax2.scatter( cmMeanShape.x, cmMeanShape.y, c='r') 25 | ax1.plot( [s1.shapePoints[0].x, s1.shapePoints[1].x], 26 | [s1.shapePoints[0].y, s1.shapePoints[1].y], 27 | color= 'r', ls = '-') 28 | 29 | #ax2.plot( [s2.shapePoints[0].x, s2.shapePoints[1].x], 30 | # [s2.shapePoints[0].y, s2.shapePoints[1].y], 31 | # color= 'r', lw = 1, ls = '-') 32 | 33 | 34 | t = [[ -cmShape.x ], [ -cmShape.y ]] 35 | 36 | ## Scale 37 | d1 = s1.shapePoints[0].dist( s1.shapePoints[1] ) 38 | #d2 = s2.shapePoints[0].dist( s2.shapePoints[1] ) 39 | s = 1/d1 40 | 41 | ## Rotation 42 | #http://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python 43 | p0 = [ s1.shapePoints[0].x, s1.shapePoints[0].y ] 44 | #m0 = [ s2.shapePoints[0].x, s2.shapePoints[0].y ] 45 | axisVector = [ 1, 0 ] 46 | 47 | thetaP = ASM.angleV( p0, axisVector ) 48 | #thetaM = ASM.angleV( m0, axisVector ) 49 | 50 | thetaRot = math.pi / 2 - thetaP 51 | 52 | rot = [[ math.cos( thetaRot ), -math.sin( thetaRot ) ], 53 | [ math.sin( thetaRot ), math.cos( thetaRot ) ] ] 54 | 55 | #d= { 'rot' : np.multiply( s , rot) , 't': t} 56 | #s1.applyTrans( d )A 57 | 58 | 59 | for pt in s1.shapePoints: 60 | pt.translate( t ) 61 | s1.update() 62 | s1.draw( sns.xkcd_palette( ["light blue"] ), 0, ax2) 63 | 64 | for pt in s1.shapePoints: 65 | pt.rotate( rot ) 66 | s1.update() 67 | s1.draw( sns.xkcd_palette( ["light blue"] ), 0, ax3) 68 | 69 | 70 | 71 | 72 | cmMeanShape1 = ASM.centroid( s1 ) 73 | ax2.scatter( cmMeanShape1.x, cmMeanShape1.y, c='r') 74 | ax2.plot( [s1.shapePoints[0].x, s1.shapePoints[1].x], 75 | [s1.shapePoints[0].y, s1.shapePoints[1].y], 76 | color= 'r', lw = 1, ls = '-') 77 | 78 | plt.show() 79 | 80 | ### Checks 81 | print d1 82 | print d2 83 | print s2.shapePoints[0].dist( s2.shapePoints[1] ) #should be == d1 84 | 85 | print thetaP 86 | print thetaM 87 | print ASM.angleV([ s2.shapePoints[0].x, s2.shapePoints[0].y ], axisVector ) 88 | 89 | plt.show() 90 | -------------------------------------------------------------------------------- /MicroExpressionDetector/legacy/other/shapeTestMapper.py: -------------------------------------------------------------------------------- 1 | from Geometry import Shape, Point 2 | from ActiveShapeModelsBetter import ASMB 3 | import functools 4 | import timeit 5 | 6 | s = Shape( [ Point(200,300), Point( 150, 200), Point( 130,500) ] ) 7 | 8 | 9 | def add( x ,y) : 10 | return x + y 11 | 12 | timeit.timeit('map( str , s.shapePoints )') 13 | timeit.timeit('[str(x) for x in s.shapePoints ]') 14 | 15 | 16 | map( add, s.shapePoints ) 17 | 18 | 19 | map( functools.partial( Point.rotate, [[ -1,1],[1,1]] ), s.shapePoints ) -------------------------------------------------------------------------------- /MicroExpressionDetector/mains/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/mains/__init__.py -------------------------------------------------------------------------------- /MicroExpressionDetector/point_annotator/annotator.py: -------------------------------------------------------------------------------- 1 | from PyQt5 import QtCore, QtGui, QtWidgets 2 | import os 3 | import sys 4 | import numpy as np 5 | import cv2 6 | from pointAnnotator import Ui_MainWindow 7 | 8 | 9 | NP = 20 10 | 11 | class Gui( QtWidgets.QMainWindow): 12 | def __init__( self, parent = None): 13 | QtWidgets.QWidget.__init__( self, parent ) 14 | self.initUI() 15 | 16 | 17 | def initUI( self ): 18 | # Instantiate classes 19 | self.ui = Ui_MainWindow() 20 | self.ui.setupUi( self ) 21 | 22 | self.ui.img.mousePressEvent = self.pixelSelect 23 | 24 | # Register actions (buttons) 25 | self.ui.dirSearch.clicked.connect( self.browseDir ) 26 | self.ui.nxtImg.clicked.connect( self.advanceImg ) 27 | self.ui.skip.clicked.connect( self.skip ) 28 | 29 | # Data members 30 | self.pointsToDraw = [] 31 | self.img = None 32 | self.allImages = None 33 | self.ix = None 34 | self.numPoints = 0 35 | self.pointsLeft = NP 36 | 37 | 38 | def pixelSelect( self, event ): 39 | position = QtCore.QPoint( event.pos().x(), event.pos().y()) 40 | self.pointsToDraw.append( position ) 41 | self.update() 42 | self.numPoints += 1 43 | self.pointsLeft -= 1 44 | self.ui.ptsLeft.setText( str( self.pointsLeft ) ) 45 | self.ui.numPts.setText( str( self.numPoints) ) 46 | 47 | #print event.pos().x(), event.pos().y() 48 | def skip( self ): 49 | self.pointsToDraw = [] 50 | self.numPoints = 0 51 | self.pointsLeft = NP 52 | if self.ix < len(self.allImages) - 1: 53 | self.ix += 1 54 | else: 55 | mb = QtWidgets.QMessageBox() 56 | mb.setText( "All done!" ) 57 | mb.exec_() 58 | self.update() 59 | 60 | def paintEvent( self, event ): 61 | #if self.img is not None: 62 | #self.ui.img.setPixmap(self.convertFrame( self.img )) 63 | if self.ix is not None: 64 | self.cImgLoc = self.allImages[ self.ix ] 65 | self.cImgPath = os.path.join( self.folder, self.cImgLoc) 66 | #print cImgPath 67 | self.ui.imgText.setText( str( self.cImgLoc )) 68 | 69 | pixmap = QtGui.QPixmap( self.cImgPath ) 70 | #self.ui.img.show() 71 | # self.ui.img.paintEvent(event) 72 | qp = QtGui.QPainter() 73 | 74 | qp.begin( pixmap ) 75 | p = QtGui.QPen() 76 | p.setColor( QtCore.Qt.red ) 77 | p.setWidth( 10 ) 78 | qp.setPen( p ) 79 | 80 | for pt in self.pointsToDraw: 81 | qp.drawPoint(pt) 82 | qp.end() 83 | 84 | self.ui.img.setPixmap( pixmap ) 85 | self.ui.img.setAlignment( QtCore.Qt.AlignTop | QtCore.Qt.AlignLeft) 86 | 87 | 88 | 89 | def browseDir( self ): 90 | folder = str(QtWidgets.QFileDialog.getExistingDirectory(self, "Select Directory")) 91 | #folder ="C:/Users/Valerie/Desktop/MicroExpress/CASME2/Cropped/Cropped/sub01/EP02_01f" 92 | print folder 93 | f = folder.split("/") 94 | 95 | # Get study and subject and display 96 | self.folder = os.path.abspath(folder) 97 | self.study = f.pop() 98 | self.subject = f.pop() 99 | 100 | self.ui.dirView.setText( self.folder ) 101 | self.ui.stuText.setText( self.study ) 102 | self.ui.subText.setText( self.subject ) 103 | 104 | # Get files and display 105 | self.allImages = next(os.walk(self.folder))[2] 106 | self.ix = 0 107 | #inputImg = cv2.imread( cImgPath ) 108 | #print inputImg 109 | #if self.isColor( inputImg ): 110 | # self.img = cv2.cvtColor( inputImg, cv2.COLOR_RGB2GRAY ) 111 | #else: 112 | # self.img = inputImg 113 | print "here" 114 | self.update() 115 | 116 | def advanceImg( self ): 117 | self.writeFile() 118 | self.skip() 119 | 120 | def gray2qimage(self, gray): 121 | """ 122 | Convert the 2D numpy array `gray` into a 8-bit QImage with a gray 123 | colormap. The first dimension represents the vertical image axis. 124 | """ 125 | if len(gray.shape) != 2: 126 | raise ValueError("gray2QImage can only convert 2D arrays") 127 | 128 | gray = np.require(gray, np.uint8, 'C') 129 | 130 | h, w = gray.shape 131 | 132 | result = QtGui.QImage(gray.data, w, h, QtGui.QImage.Format_Indexed8) 133 | result.ndarray = gray 134 | for i in range(256): 135 | result.setColor(i, QtGui.QColor(i, i, i).rgb()) 136 | return result 137 | 138 | def convertFrame(self,imgIn): 139 | """ 140 | converts frame to format suitable for QtGui 141 | """ 142 | try: 143 | img = self.gray2qimage( imgIn ) 144 | img = QtGui.QPixmap.fromImage(img) 145 | 146 | return img 147 | except: 148 | return None 149 | 150 | def isColor( self, imgIn ): 151 | ncolor = np.shape(imgIn)[2] 152 | boolt = int(ncolor) > 2 153 | return boolt 154 | def closeEvent( self, event ): 155 | self.writeFile() 156 | event.accept() 157 | 158 | def writeFile( self ): 159 | with open( os.path.join( self.folder, "output20", "%s.txt" % self.cImgLoc), "w") as file: 160 | for pt in self.pointsToDraw: 161 | file.write( "%d\t%d\n" % ( pt.x(), pt.y() ) ) 162 | 163 | 164 | # def mousePressEvent( self, QMouseEvent): 165 | # print QMouseEvent.pos() 166 | ''' 167 | def mouseReleaseEvent( self, QMouseEvent): 168 | cursor = QtGui.QCursor() 169 | self.pointsToDraw.append( cursor.pos() ) 170 | self.update() 171 | ''' 172 | 173 | 174 | def main(): 175 | app = QtWidgets.QApplication( sys.argv ) 176 | #filter = ValEventFilter() 177 | #app.installEventFilter(filter) 178 | g = Gui() 179 | g.show() 180 | 181 | sys.exit( app.exec_() ) 182 | 183 | if __name__ == '__main__': 184 | main() -------------------------------------------------------------------------------- /MicroExpressionDetector/point_annotator/pointAnnotator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Form implementation generated from reading ui file 'pointAnnotator.ui' 4 | # 5 | # Created: Wed Jul 01 15:51:35 2015 6 | # by: PyQt5 UI code generator 5.3 7 | # 8 | # WARNING! All changes made in this file will be lost! 9 | 10 | from PyQt5 import QtCore, QtGui, QtWidgets 11 | 12 | class Ui_MainWindow(object): 13 | def setupUi(self, MainWindow): 14 | MainWindow.setObjectName("MainWindow") 15 | MainWindow.resize(1200, 800) 16 | self.centralwidget = QtWidgets.QWidget(MainWindow) 17 | self.centralwidget.setObjectName("centralwidget") 18 | self.img = QtWidgets.QLabel(self.centralwidget) 19 | self.img.setGeometry(QtCore.QRect(20, 110, 771, 571)) 20 | self.img.setText("") 21 | self.img.setObjectName("img") 22 | self.dirView = QtWidgets.QLabel(self.centralwidget) 23 | self.dirView.setGeometry(QtCore.QRect(150, 30, 1041, 33)) 24 | self.dirView.setObjectName("dirView") 25 | self.dirSearch = QtWidgets.QPushButton(self.centralwidget) 26 | self.dirSearch.setGeometry(QtCore.QRect(20, 20, 101, 57)) 27 | self.dirSearch.setObjectName("dirSearch") 28 | self.currentImg = QtWidgets.QLabel(self.centralwidget) 29 | self.currentImg.setGeometry(QtCore.QRect(700, 520, 351, 33)) 30 | self.currentImg.setText("") 31 | self.currentImg.setObjectName("currentImg") 32 | self.formLayoutWidget = QtWidgets.QWidget(self.centralwidget) 33 | self.formLayoutWidget.setGeometry(QtCore.QRect(830, 90, 350, 277)) 34 | self.formLayoutWidget.setObjectName("formLayoutWidget") 35 | self.formLayout = QtWidgets.QFormLayout(self.formLayoutWidget) 36 | self.formLayout.setContentsMargins(0, 0, 0, 0) 37 | self.formLayout.setObjectName("formLayout") 38 | self.numPtsText = QtWidgets.QLabel(self.formLayoutWidget) 39 | self.numPtsText.setObjectName("numPtsText") 40 | self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.numPtsText) 41 | self.ptsLeftText = QtWidgets.QLabel(self.formLayoutWidget) 42 | self.ptsLeftText.setObjectName("ptsLeftText") 43 | self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.ptsLeftText) 44 | self.numPts = QtWidgets.QLabel(self.formLayoutWidget) 45 | self.numPts.setObjectName("numPts") 46 | self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.numPts) 47 | self.ptsLeft = QtWidgets.QLabel(self.formLayoutWidget) 48 | self.ptsLeft.setObjectName("ptsLeft") 49 | self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.ptsLeft) 50 | self.subText = QtWidgets.QLabel(self.formLayoutWidget) 51 | self.subText.setObjectName("subText") 52 | self.formLayout.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.subText) 53 | self.stuText = QtWidgets.QLabel(self.formLayoutWidget) 54 | self.stuText.setObjectName("stuText") 55 | self.formLayout.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.stuText) 56 | self.imgText = QtWidgets.QLabel(self.formLayoutWidget) 57 | self.imgText.setObjectName("imgText") 58 | self.formLayout.setWidget(5, QtWidgets.QFormLayout.FieldRole, self.imgText) 59 | self.label = QtWidgets.QLabel(self.formLayoutWidget) 60 | self.label.setObjectName("label") 61 | self.formLayout.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label) 62 | self.label_2 = QtWidgets.QLabel(self.formLayoutWidget) 63 | self.label_2.setObjectName("label_2") 64 | self.formLayout.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.label_2) 65 | self.label_3 = QtWidgets.QLabel(self.formLayoutWidget) 66 | self.label_3.setObjectName("label_3") 67 | self.formLayout.setWidget(5, QtWidgets.QFormLayout.LabelRole, self.label_3) 68 | spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) 69 | self.formLayout.setItem(2, QtWidgets.QFormLayout.SpanningRole, spacerItem) 70 | self.nxtImg = QtWidgets.QPushButton(self.centralwidget) 71 | self.nxtImg.setGeometry(QtCore.QRect(990, 370, 187, 57)) 72 | self.nxtImg.setObjectName("nxtImg") 73 | self.skip = QtWidgets.QPushButton(self.centralwidget) 74 | self.skip.setGeometry(QtCore.QRect(990, 430, 187, 57)) 75 | self.skip.setObjectName("skip") 76 | MainWindow.setCentralWidget(self.centralwidget) 77 | self.menubar = QtWidgets.QMenuBar(MainWindow) 78 | self.menubar.setGeometry(QtCore.QRect(0, 0, 1200, 47)) 79 | self.menubar.setObjectName("menubar") 80 | MainWindow.setMenuBar(self.menubar) 81 | self.statusbar = QtWidgets.QStatusBar(MainWindow) 82 | self.statusbar.setObjectName("statusbar") 83 | MainWindow.setStatusBar(self.statusbar) 84 | 85 | self.retranslateUi(MainWindow) 86 | QtCore.QMetaObject.connectSlotsByName(MainWindow) 87 | 88 | def retranslateUi(self, MainWindow): 89 | _translate = QtCore.QCoreApplication.translate 90 | MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow")) 91 | self.dirView.setText(_translate("MainWindow", "path")) 92 | self.dirSearch.setText(_translate("MainWindow", "Browse")) 93 | self.numPtsText.setText(_translate("MainWindow", "Num points:")) 94 | self.ptsLeftText.setText(_translate("MainWindow", "Points left:")) 95 | self.numPts.setText(_translate("MainWindow", "np")) 96 | self.ptsLeft.setText(_translate("MainWindow", "pl")) 97 | self.subText.setText(_translate("MainWindow", "subject")) 98 | self.stuText.setText(_translate("MainWindow", "study")) 99 | self.imgText.setText(_translate("MainWindow", "image")) 100 | self.label.setText(_translate("MainWindow", "Subject:")) 101 | self.label_2.setText(_translate("MainWindow", "Study: ")) 102 | self.label_3.setText(_translate("MainWindow", "Image:")) 103 | self.nxtImg.setText(_translate("MainWindow", "Next Image")) 104 | self.skip.setText(_translate("MainWindow", "Skip")) 105 | 106 | -------------------------------------------------------------------------------- /MicroExpressionDetector/shapes/ActiveShape.py: -------------------------------------------------------------------------------- 1 | from shapes.Shape import Shape 2 | from shapes.Point import Point 3 | from shapes.Vector import Vector 4 | import numpy as np 5 | 6 | class ActiveShape( Shape ): 7 | def __init__( self, *args, **kwargs ): 8 | super( ActiveShape, self).__init__( *args, **kwargs) 9 | self.R = self.calcR() 10 | # Shape operations 11 | def transform( self, transformation ): 12 | return ActiveShape( map( lambda q : q.transform( transformation ), self.shapePoints ) ) 13 | 14 | def M( self, scaling, theta ): 15 | return ActiveShape( map( lambda q : q.M( scaling, theta ), self.shapePoints ) ) 16 | 17 | def rotate( self, rotation ): 18 | return ActiveShape( map( lambda q : q.rotate( rotation ), self.shapePoints ) ) 19 | 20 | def translate( self, translation ): 21 | return ActiveShape( map( lambda q : q.translate( translation ) , self.shapePoints ) ) 22 | 23 | def scale( self, scaling ): 24 | return ActiveShape( map( lambda q : q.scale( scaling ) , self.shapePoints ) ) 25 | 26 | 27 | def calcR( self ): 28 | """ 29 | Calculates distance matrix between all points for a given shape 30 | sets global variable 31 | """ 32 | sp = self.shapePoints 33 | ## For every point in shapePoints, calculate distance to other points 34 | R = [ [Point.dist( sp[k], sp[l] ) for k in range( self.n )] for l in range( self.n ) ] 35 | return R 36 | 37 | 38 | def shapeDist( self, shape ): 39 | d = map( lambda x, y : Point.dist( x,y ), self.shapePoints, shape.shapePoints ) 40 | return d 41 | 42 | def calcSingleCov( self, shape ): 43 | # Difference from mean shape 44 | objectShape = self.unravel( self.shapePoints ) 45 | compShape = self.unravel( shape.shapePoints ) 46 | diffAllPts = np.subtract( objectShape, compShape ) 47 | singleCov = np.dot( np.transpose(np.mat(diffAllPts)), np.mat(diffAllPts) ) 48 | return singleCov 49 | 50 | @staticmethod 51 | def deravel( vect ): 52 | x, y = [], [] 53 | for i in range( len( vect ) ): 54 | if i % 2 == 0: 55 | x.append( vect[i] ) 56 | else: 57 | y.append( vect[i] ) 58 | return x, y 59 | 60 | @classmethod 61 | def createShape( cls, allPts ): 62 | xs,ys = cls.deravel( allPts ) 63 | return cls( zip( xs, ys) ) 64 | -------------------------------------------------------------------------------- /MicroExpressionDetector/shapes/ActiveShape.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/shapes/ActiveShape.pyc -------------------------------------------------------------------------------- /MicroExpressionDetector/shapes/Point.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import math 3 | 4 | class Point( object ): 5 | 6 | def __init__( self, x, y): 7 | self.__x = float(x) 8 | self.__y = float(y) 9 | 10 | @classmethod 11 | def fromTuple( cls, tup ): 12 | x = tup[0] 13 | y = tup[1] 14 | return cls( x, y ) 15 | 16 | ## Properties (avoid need to update) 17 | @property 18 | def p( self ): 19 | return ( self.__x, self.__y ) 20 | 21 | @property 22 | def x( self ): 23 | return self.__x 24 | 25 | @property 26 | def y( self ): 27 | return self.__y 28 | 29 | def setY( self,y ): 30 | self.__y = y 31 | 32 | def setX( self,x ): 33 | self.__x = x 34 | 35 | @staticmethod 36 | def dist( p1, p2 ): 37 | return math.sqrt( (p1.x - p2.x)**2 + (p1.y - p2.y)**2 ) 38 | 39 | def __str__(self): 40 | return '%f\t%f\n' % ( self.x, self.y ) 41 | 42 | -------------------------------------------------------------------------------- /MicroExpressionDetector/shapes/Point.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/shapes/Point.pyc -------------------------------------------------------------------------------- /MicroExpressionDetector/shapes/Shape.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import math 3 | import matplotlib.pyplot as plt 4 | import seaborn as sns 5 | from shapes.Point import Point 6 | from shapes.Vector import Vector 7 | 8 | class Shape( object ): 9 | """ 10 | Shape object 11 | """ 12 | 13 | def __init__(self, pointList ): 14 | 15 | self.shapePoints = [] 16 | 17 | # Add points to shape 18 | 19 | if isinstance( pointList[0], list): 20 | for [x,y] in pointList: 21 | self.shapePoints.append( Vector(x,y) ) 22 | elif isinstance( pointList[0], tuple): 23 | for (x,y) in pointList: 24 | self.shapePoints.append( Vector(x,y) ) 25 | 26 | else: 27 | self.shapePoints = pointList 28 | 29 | 30 | @property 31 | def xs( self ): 32 | """ 33 | returns all x values of all points for a given shape 34 | """ 35 | return map( lambda pt : pt.x, self.shapePoints ) 36 | 37 | @property 38 | def ys( self ): 39 | """ 40 | returns all y values of all points for a given shape 41 | """ 42 | return map( lambda pt : pt.y, self.shapePoints ) 43 | 44 | @property 45 | def n( self ): 46 | """ 47 | returns the number of points in a given shape 48 | """ 49 | return len( self.shapePoints ) 50 | 51 | 52 | def centroid( self ): 53 | return Point( np.mean( self.xs ) , np.mean( self.ys ) ) 54 | 55 | @staticmethod 56 | def unravel( pointList ): 57 | allPts = [] 58 | for pt in pointList: 59 | allPts.append( pt.x ) 60 | allPts.append( pt.y ) 61 | return allPts 62 | 63 | 64 | def flatten( self ): 65 | return self.unravel( self.shapePoints ) 66 | 67 | 68 | 69 | def draw( self, palette, axis ): 70 | _ = axis.scatter( self.xs, self.ys, c = palette, s= 3 ) 71 | 72 | 73 | def __str__( self ): 74 | a = map( str, self.shapePoints ) 75 | return ''.join( a ) 76 | 77 | 78 | 79 | 80 | -------------------------------------------------------------------------------- /MicroExpressionDetector/shapes/Shape.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/shapes/Shape.pyc -------------------------------------------------------------------------------- /MicroExpressionDetector/shapes/Vector.py: -------------------------------------------------------------------------------- 1 | from Point import Point 2 | import numpy as np 3 | import math 4 | 5 | class Vector( Point ): 6 | def __init__( self, *args, **kwargs): 7 | super( Vector, self ).__init__( *args, **kwargs) 8 | 9 | @property 10 | def v( self ): 11 | return [[ self.x ],[ self.y ]] 12 | 13 | def transform( self, transDict ): 14 | x1, y1 = np.add( np.dot( transDict['srot'], self.v ), transDict['t'] ) 15 | return Vector( x1, y1 ) 16 | 17 | def rotate( self, rot ): #as matrix 18 | x1, y1 = np.dot( rot , self.v ) 19 | return Vector( x1, y1 ) 20 | 21 | def M( self, scale, theta ): 22 | rotMat = self.calcSRotMat( scale, theta ) 23 | return self.rotate( rotMat ) 24 | 25 | def scale( self, scale ): 26 | x1, y1 = np.multiply( scale, self.v ) 27 | return Vector( x1, y1 ) 28 | 29 | def translate( self, vect ): 30 | x1, y1 = np.add( self.v, vect ) 31 | return Vector( x1, y1 ) 32 | 33 | @staticmethod 34 | def calcSRotMat( scale, theta ): 35 | d = scale * math.cos( theta ) 36 | e = scale * math.sin( theta ) 37 | sRotMat = [[ d, - e], 38 | [ e, d] ] 39 | 40 | return sRotMat 41 | 42 | @staticmethod 43 | def thetaFromRot( rotMat ): 44 | return math.atan2( rotMat[1][1], rotMat[1][0] ) 45 | 46 | @staticmethod 47 | def unit( v ): 48 | #print "UNITV" 49 | #print v 50 | if np.linalg.norm( v ) == 0: 51 | return np.array(v) 52 | 53 | if v[0] == 0 and v[1] == 0: 54 | return v 55 | 56 | else: 57 | return v / np.linalg.norm( v ) 58 | 59 | @staticmethod 60 | def angleBetween( v1, v2 ): 61 | return math.atan2( v1[0], v1[1] ) - math.atan2( v2[0], v2[1] ) 62 | 63 | 64 | -------------------------------------------------------------------------------- /MicroExpressionDetector/shapes/Vector.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/shapes/Vector.pyc -------------------------------------------------------------------------------- /MicroExpressionDetector/shapes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/shapes/__init__.py -------------------------------------------------------------------------------- /MicroExpressionDetector/shapes/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/shapes/__init__.pyc -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/active_shape_models/PCA_triangle_test.py: -------------------------------------------------------------------------------- 1 | from ParallelASM import Shape, Point, PASM 2 | from matplotlib import pyplot as plt 3 | import numpy as np 4 | import seaborn as sns 5 | import math 6 | 7 | pal = sns.xkcd_palette( ["pink", "azure", "bright yellow" ]) 8 | mpal = sns.xkcd_palette( ['black', 'black', 'black']) 9 | newpal = sns.xkcd_palette( ["crimson", "navy", "poop"] ) 10 | MUL = 1 11 | 12 | ## Setup 13 | def setup( ): 14 | s1 = Shape( [ Point(200,340), Point( 0, 200), Point( 350,200) ] ) 15 | s2 = Shape( [ Point(210,320), Point( 5, 205), Point( 340,190) ] ) 16 | s3 = Shape( [ Point(205,300), Point( 10, 190), Point( 344,204) ] ) 17 | s4 = Shape( [ Point(199,380), Point( -5, 205), Point( 333,203) ] ) 18 | s5 = Shape( [ Point(190,290), Point( 0, 190), Point( 351,201) ] ) 19 | 20 | asm = PASM( [0,1], 10 ) 21 | 22 | asm.addShape( s1 ) 23 | asm.addShape( s2 ) 24 | asm.addShape( s3 ) 25 | asm.addShape( s4 ) 26 | asm.addShape( s5 ) 27 | return asm 28 | 29 | ## Calculate PCS: 30 | def PCA( asm ): 31 | asm.calcMeanShape() 32 | map( lambda x : x.calcDiff( asm.meanShape ), asm.allShapes ) 33 | cov = map( lambda x : x.calcSingleCov(), asm.allShapes ) 34 | S = sum( cov ) 35 | 36 | vals, vecs = np.linalg.eig( S ) 37 | vecs = np.array( vecs ) 38 | return vals, vecs 39 | 40 | 41 | def plotOnePointOneEv( evIx, ptIx, asm, vals, vecs, color, axes): 42 | axes.plot( [ asm.meanShape.allPts[ptIx], asm.meanShape.allPts[ptIx] + MUL * math.sqrt(vals[evIx] )* vecs[ptIx][evIx] ] , 43 | [ asm.meanShape.allPts[ptIx+1], asm.meanShape.allPts[ptIx+1] + MUL * math.sqrt( vals[evIx] )* vecs[ptIx + 1][evIx] ], 44 | c = color, 45 | lw = 1) 46 | 47 | def plotOnePointOneEvOrigin( evIx, ptIx, vals, vecs, color, axes): 48 | axes.plot( [0, vecs[ptIx][evIx] * vals[evIx]] , 49 | [ 0, vecs[ptIx + 1][evIx] * vals[evIx] ], 50 | c = color, 51 | lw = 1) 52 | 53 | 54 | def plotEVmpCentered( asm, vals, vecs, newpal, axes): 55 | for pt in [0,2,4]: 56 | plotOnePointOneEv( 0, pt, asm, vals, vecs, newpal[pt/2], axes ) 57 | plotOnePointOneEv( 1, pt, asm, vals, vecs, newpal[pt/2], axes ) 58 | plotOnePointOneEv( 2, pt, asm, vals, vecs, newpal[pt/2], axes ) 59 | 60 | 61 | def plotEVOriginCentered( vals, vecs, newpal, axes): 62 | for pt in [0,2,4]: 63 | for i in range( 3 ): 64 | plotOnePointOneEvOrigin( i, pt, vals, vecs, newpal[pt/2], axes[i] ) 65 | 66 | 67 | 68 | 69 | def project( evIx, asm, vals, vecs ): 70 | X = np.add( asm.meanShape.allPts, np.multiply( vecs[:,evIx], math.sqrt( vals[evIx] ))) 71 | x,y = reravel( X ) 72 | #s = Shape( [ Point( X[0], X[1]), Point( X[2], X[3] ), Point( X[4], X[5] ) ] ) 73 | sv = [] 74 | for a, b in zip( x, y): 75 | sv.append( Point( a, b ) ) 76 | s = Shape( sv ) 77 | return s 78 | 79 | def singlePoint( ptIx, evIx, asm, vals, vecs ): 80 | return Point( asm.meanShape.xs[ptIx] + MUL * math.sqrt( vals[evIx] ) * vecs[ptIx][evIx], 81 | asm.meanShape.ys[ptIx] + MUL * math.sqrt(vals[evIx]) * vecs[ptIx+1][evIx] ) 82 | 83 | def reravel( vect ): 84 | x, y = [], [] 85 | vect = np.ravel( vect ) 86 | for i in range( len( vect ) ): 87 | if i % 2 == 0: 88 | x.append( vect[i] ) 89 | else: 90 | y.append( vect[i] ) 91 | return x, y 92 | 93 | def showVaryMultiplePCS( asm, vals, vecs, numPlots, numPCs, newpal): 94 | f, axes = plt.subplots( 1, numPlots, sharex = True, sharey = True ) 95 | bs = [] 96 | for p in range( numPCs ): 97 | rs = np.linspace( - MUL * math.sqrt( vals[p] ), MUL * math.sqrt( vals[p] ), numPlots ) 98 | bs.append( rs ) 99 | P = vecs[:, 0:numPCs] 100 | for pl in range(numPlots) : 101 | b = [ bs[p][pl] for p in range(len(bs) ) ] 102 | X = np.add( asm.meanShape.allPts, np.dot( P, b ) ) 103 | x, y = reravel( X ) 104 | s = Shape( [ Point (pt[0], pt[1] ) for pt in zip(x,y) ]) 105 | s.draw( newpal, axes[pl] ) ## diff 106 | axes[pl].plot( s.xs, s.ys, lw =1, c ='k') 107 | f.savefig( "simple-example-%d-at-a-time.png" % numPCs) 108 | 109 | def showVary( asm, vals, vecs, numPlots, eval ): 110 | # Vary one eigenvalue 111 | f, axes = plt.subplots( 1, numPlots, sharex = True, sharey = True ) 112 | 113 | vals = np.ravel( vals ) 114 | rs = np.linspace( - MUL * math.sqrt( vals[eval] ), MUL * math.sqrt( vals[eval] ), numPlots ) 115 | 116 | for m in range( len(rs) ): 117 | reps = np.add(asm.meanShape.allPts,np.multiply( np.ravel(vecs[:,eval] ), rs[m] )) 118 | x, y = reravel( reps ) 119 | s = Shape( [ Point (pt[0], pt[1] ) for pt in zip(x,y) ]) 120 | s.draw( newpal, axes[m] ) ## diff 121 | axes[m].plot( s.xs, s.ys, lw =1, c ='k') 122 | 123 | plt.savefig( 'simple-example-PC%d.png'% eval ) 124 | 125 | 126 | 127 | ### Main stuff 128 | asm = setup() 129 | 130 | # PCA 131 | vals, vecs = PCA( asm ) 132 | 133 | # Variance explained 134 | for i in range( 2 * asm.n ): 135 | print "%d: %f, %f" % ( i, vals[i] / sum( vals ), sum( vals[:i+1] ) / sum(vals ) ) 136 | 137 | # Setup grid 138 | f, axes = plt.subplots( 1, 3 ) 139 | f1, axes1 = plt.subplots( 1,1) 140 | 141 | 142 | # Draw mean shape 143 | asm.meanShape.draw( mpal, axes1) 144 | 145 | # Draw all shapes 146 | for sh in asm.allShapes: 147 | sh.draw( pal, axes1 ) 148 | 149 | 150 | # Draw centered eigenvectors 151 | plotEVmpCentered( asm, vals, vecs, newpal, axes1 ) 152 | plotEVOriginCentered( vals, vecs, newpal, axes ) 153 | 154 | # Example projection 155 | s1 = project( 0, asm, vals, vecs) 156 | s1.draw( newpal, axes1) 157 | 158 | f.savefig('simple-example-EVs.png') 159 | f1.savefig( 'simple-example-EVs-origin.png') 160 | 161 | for j in range( 3 ): 162 | showVaryMultiplePCS( asm, vals, vecs, 7, j+1, pal) 163 | showVary( asm, vals, vecs, 7, j ) 164 | 165 | 166 | 167 | 168 | 169 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/active_shape_models/face_ASM_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | from ActiveShapeModels import ASM, Point, Shape 3 | import matplotlib.pyplot as plt 4 | import seaborn as sns 5 | import math 6 | import numpy as np 7 | import os 8 | 9 | 10 | def drawEyeSegment( axis, shape ): 11 | axis.plot( [shape.shapePoints[33].x, shape.shapePoints[28].x], 12 | [shape.shapePoints[33].y , shape.shapePoints[28].y], 13 | color= 'g', ls = '-') 14 | axis.scatter( [shape.shapePoints[33].x, shape.shapePoints[28].x], 15 | [shape.shapePoints[33].y , shape.shapePoints[28].y], 16 | color= 'g') 17 | 18 | def drawShape( axis, shape ): 19 | shape.draw( sns.xkcd_palette( ["light blue"] ), 0, axis) 20 | 21 | def drawCentroid( axis, cm ): 22 | axis.scatter( cm.x, cm.y, c='r') 23 | 24 | def plotAll( axis, shape ): 25 | cm = ASM.centroid( shape ) 26 | drawShape( axis, shape) 27 | drawCentroid( axis, cm ) 28 | drawEyeSegment( axis, shape ) 29 | 30 | #########3 31 | DIR = "C:\\Users\\Valerie\\Desktop\\MicroExpress\\facePoints" 32 | SUBDIR = "session_1" 33 | folder = os.path.join( DIR, SUBDIR ) 34 | files = next(os.walk(folder))[2] 35 | 36 | for f in files: 37 | with open( os.path.join(folder,f), "r" ) as infile: 38 | ptList = [ ] 39 | allLines = infile.readlines() 40 | pointLine = False 41 | cleanLines = [ x.strip() for x in allLines] 42 | for line in cleanLines: 43 | if line is '{': 44 | pointLine = True 45 | 46 | elif line is '}': 47 | pointLine = False 48 | pass 49 | elif pointLine: 50 | ptList.append( map( float, line.split(' ') ) ) 51 | else: 52 | pass 53 | ptList 54 | s1 = Shape( ptList ) 55 | 56 | f, ((ax1,ax2),(ax3,ax4)) = plt.subplots(2,2) 57 | 58 | ## Original shape (input) 59 | cmShape = ASM.centroid( s1 ) 60 | plotAll( ax1, s1 ) 61 | 62 | ############## Calc transformations ################### 63 | ## Translate 64 | t = [[ -cmShape.x ], [ -cmShape.y ]] 65 | for pt in s1.shapePoints: 66 | pt.translate( t ) 67 | s1.update() 68 | plotAll( ax2, s1) 69 | 70 | leftEyeIx = 33 71 | rightEyeIx = 28 72 | 73 | ## Scale 74 | d1 = s1.shapePoints[leftEyeIx].dist( s1.shapePoints[rightEyeIx] ) 75 | s = float(1)/float(d1) 76 | 77 | ## Rotation 78 | leftEyeIx = 33 79 | rightEyeIx = 28 80 | 81 | xDiff = s1.shapePoints[rightEyeIx].x - s1.shapePoints[leftEyeIx].x 82 | yDiff = s1.shapePoints[rightEyeIx].y - s1.shapePoints[leftEyeIx].y 83 | 84 | p0 = [ xDiff, yDiff ] #s1.shapePoints[0].x, s1.shapePoints[0].y ] 85 | axisVector = [ 1, 0] 86 | thetaP = ASM.angleV( p0, axisVector ) 87 | thetaRot = thetaP 88 | 89 | #thetaRot = math.atan2( yDiff, xDiff ) 90 | 91 | rot = [[ math.cos( thetaRot ), -math.sin( thetaRot ) ], 92 | [ math.sin( thetaRot ), math.cos( thetaRot ) ] ] 93 | 94 | 95 | 96 | 97 | 98 | 99 | ## What order to calculate and apply the transformations? 100 | 101 | 102 | 103 | 104 | for pt in s1.shapePoints: 105 | pt.rotate( rot ) 106 | s1.update() 107 | 108 | plotAll( ax3, s1 ) 109 | 110 | for pt in s1.shapePoints: 111 | pt.scale( s ) 112 | s1.update() 113 | plotAll( ax4, s1) 114 | 115 | 116 | plt.show() 117 | 118 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/active_shape_models/face_read_points.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import os 3 | from ParallelASM import Shape, PASM, Point 4 | from FaceDraw import FaceDraw 5 | from matplotlib import pyplot as plt 6 | import numpy as np 7 | 8 | 9 | DIR = 'C:\\Users\\Valerie\\Desktop\\MicroExpress\\facePoints\\session_1\\' 10 | file = '000_1_1.pts' 11 | img = cv2.imread( 'C:\\Users\\Valerie\\Downloads\\000_1_1.ppm') 12 | img_g = cv2.cvtColor( img, cv2.COLOR_BGR2GRAY ) 13 | 14 | pasm = PASM( [36,31], 10 ) 15 | 16 | with open( os.path.join( DIR, file), "r" ) as infile: 17 | ptList = [ ] 18 | allLines = infile.readlines() 19 | pointLine = False 20 | cleanLines = [ x.strip() for x in allLines] 21 | for line in cleanLines: 22 | if line is '{': 23 | pointLine = True 24 | 25 | elif line is '}': 26 | pointLine = False 27 | pass 28 | elif pointLine: 29 | ptList.append( map( float, line.split(' ') ) ) 30 | else: 31 | pass 32 | ptList 33 | s1 = Shape( ptList ) 34 | plt.imshow( img_g, cmap = 'gray' ) 35 | FaceDraw( s1, plt).drawBold() 36 | plt.show() 37 | 38 | 39 | plt.imshow(cv2.filter2D( img_g, cv2.CV_8U, np.array([ -1, 0, 1]) )) 40 | FaceDraw( s1, plt).drawBold() 41 | plt.show() 42 | plt.imshow(cv2.filter2D( img_g, cv2.CV_8U, np.array([[ -1], [0],[ 1]]) )) 43 | FaceDraw( s1, plt).drawBold() 44 | plt.show() 45 | 46 | 47 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/active_shape_models/filter_training.py: -------------------------------------------------------------------------------- 1 | ### Filter out training images 2 | 3 | from active_shape_models.ActiveShapeModel import ActiveShapeModel 4 | from active_shape_models.ShapeAligner import ShapeAligner 5 | from active_shape_models.ApplyASM import ApplyASM 6 | 7 | from helpers.WriteUpHelper import WriteUp 8 | from helpers.FileHelper import FileHelper 9 | from helpers.DrawFace import DrawFace 10 | 11 | from shapes.ActiveShape import ActiveShape 12 | 13 | import logging 14 | from matplotlib import pyplot as plt 15 | import numpy as np 16 | import os 17 | 18 | output = "C:\\Users\\Valerie\\Desktop\\output\\" 19 | iters = 4 20 | training = 500 21 | 22 | train = True 23 | write = False 24 | align = True 25 | 26 | i = 0 27 | study = "%d-%d-%d" % ( iters, training, i ) 28 | if not os.path.exists( os.path.join( output, study ) ) and (train or align ): 29 | os.mkdir( os.path.join( output, study ) ) 30 | output = os.path.join( output, study ) 31 | else: 32 | while os.path.exists( os.path.join( output, study )): 33 | i += 1 34 | study = "%d-%d-%d" % ( iters, training, i ) 35 | if train or align: 36 | os.mkdir( os.path.join( output, study ) ) 37 | else: 38 | study = "%d-%d-%d" % ( iters, training, i ) 39 | output = os.path.join( output, study ) 40 | 41 | 42 | 43 | fh = FileHelper( iters, training , output ) 44 | 45 | if train: 46 | asm = ActiveShapeModel( [36,31] ) 47 | asm = fh.readInPoints( asm ) 48 | 49 | ### Align Shapes 50 | if align: 51 | asm = ShapeAligner( asm, iters, output ).alignTrainingSet( ) 52 | fh.writeOutASM( asm ) #write out to read in later 53 | 54 | 55 | d = map( lambda x: x.shapeDist( asm.normMeanShape ), asm.allShapes ) 56 | d1 = np.mean( d, 1 ) 57 | 58 | min = np.min( d1 ) 59 | mn = np.mean( d1 ) 60 | q1 = np.percentile( d1, 0.25) 61 | sd = np.std( d1 ) 62 | max = np.max( d1 ) 63 | q3 = np.percentile( d1, 0.75) 64 | iqr = q3 - q1 65 | 66 | cu1 = mn + 2*sd 67 | cu2 = mn + 3*sd 68 | 69 | i1 = np.where( d1 > cu1 )[0] 70 | i2 = np.where( d1 > cu2 )[0] 71 | 72 | f, (ax1, ax2) = plt.subplots( 1,2, sharex=True, sharey = True ) 73 | 74 | for ix in list(i1): 75 | DrawFace( asm.allShapes[ ix ], ax1 ).drawContrast() 76 | for ix in list(i2): 77 | DrawFace( asm.allShapes[ ix ], ax2 ).drawContrast() 78 | 79 | DrawFace( asm.meanShape, ax1).drawBold() 80 | DrawFace( asm.meanShape, ax2).drawBold() 81 | 82 | plt.show() 83 | plt.close() 84 | 85 | f, (ax1, ax2) = plt.subplots( 1,2, sharex=True, sharey = True ) 86 | for ix in ( set(range(500)) - set(i1) ): 87 | DrawFace( asm.allShapes[ ix ], ax1 ).drawContrast() 88 | for ix in ( set(range(500)) - set(i2) ): 89 | DrawFace( asm.allShapes[ ix ], ax2 ).drawContrast() 90 | DrawFace( asm.meanShape, ax1).drawBold() 91 | DrawFace( asm.meanShape, ax2).drawBold() 92 | 93 | plt.show() 94 | plt.close() -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/active_shape_models/gradient_deform_test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cv2 3 | from ParallelASM import Point, PASM 4 | import math 5 | 6 | fx = np.matrix( [-1, 0, 1] ) 7 | fy = np.transpose( fx ) 8 | 9 | img = cv2.imread( 'C:\\Users\\Valerie\\Desktop\\MicroExpress\\CASME2\\Cropped\\Cropped\\sub02\\EP01_11f\\reg_img46.jpg' ) 10 | imgg = cv2.cvtColor( img, cv2.COLOR_BGR2GRAY ) 11 | 12 | ptList = [ Point(1,1), Point(20,50), Point(5, 15) ] 13 | 14 | def getRegion( img, pt ): 15 | return img[ pt.y - 1 : pt.y + 2, pt.x - 1 : pt.x + 2 ] 16 | 17 | a = map( lambda x : getRegion( imgg, x ) , ptList ) 18 | 19 | def getGradient( pt, img ): 20 | img = np.array([]) 21 | dx = img 22 | delF = [ (img[ pt.y, pt.x + 1] - img[pt.y, pt.x - 1 ] )/2, 23 | (img[ pt.y + 1 , pt.x] - img[pt.y - 1, pt.x ] )/2 ] 24 | print delF 25 | mag = math.sqrt( delF[0] ** 2 + delF[1] ** 2 ) 26 | dir = PASM.angleV( delF, [ 0, 0] ) 27 | return mag, dir 28 | 29 | def applyFilter( mat, filter ): 30 | return cv2.filter2D( mat, cv2.CV_8U, filter ) 31 | 32 | map( lambda x : getGradient( x, imgg ), ptList) 33 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/active_shape_models/parallel_pool_rotation_test.py: -------------------------------------------------------------------------------- 1 | #from pathos.multiprocessing import ProcessingPool as Pool 2 | #import pathos.multiprocessing as mp 3 | 4 | from pathos.multiprocessing import ProcessPool as Pool 5 | from ActiveShapeModelsBetter import ASMB, Point, Shape 6 | import dill 7 | 8 | 9 | if __name__ == "__main__": 10 | asm = ASMB( [0,1],10 ) 11 | asm.addShape(Shape([ Point( 100,200), Point(200,440), Point( 400,300)] )) 12 | p = Pool() 13 | p.map(Point.rotate, asm.allShapes, [[-1,1],[1,-1]]) -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/active_shape_models/shapeNormTest1.py: -------------------------------------------------------------------------------- 1 | from ActiveShapeModels import ASM, Point, Shape 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | import math 5 | import numpy as np 6 | import os 7 | def drawScaleSegment( axis, shape ): 8 | axis.plot( [shape.shapePoints[0].x, shape.shapePoints[1].x], # plot p0 to p1 segment 9 | [shape.shapePoints[0].y, shape.shapePoints[1].y], 10 | color= 'r', ls = '-') 11 | 12 | def drawLineUP( axis, shape, cm ): 13 | axis.plot( [ cm.x, shape.shapePoints[0].x], 14 | [ cm.y, shape.shapePoints[0].y], color='b') 15 | 16 | def drawShape( axis, shape ): 17 | shape.draw( sns.xkcd_palette( ["light blue"] ), 0, axis) 18 | 19 | def drawCentroid( axis, cm ): 20 | axis.scatter( cm.x, cm.y, c='r') 21 | 22 | def plotAll( axis, shape ): 23 | cm = ASM.centroid( shape ) 24 | drawShape( axis, shape) 25 | drawCentroid( axis, cm ) 26 | drawLineUP( axis, shape, cm ) 27 | drawScaleSegment( axis, shape ) 28 | 29 | DIR = "C:\\Users\\Valerie\\Desktop\\stars" 30 | OUTPUT = os.path.join( DIR, "output20") 31 | ## 20 point shape 32 | #s1 = Shape( [ Point(857, -129), Point(89,-409), Point(-404,254), Point( 96,957), Point(877,712) ]) 33 | files = next(os.walk(OUTPUT))[2] 34 | f = files[0] 35 | with open( os.path.join( OUTPUT, f), "r" ) as infile: 36 | 37 | allLines = infile.readlines() 38 | if len(allLines) > 0: 39 | cleanLines = [ x.strip().split('\t') for x in allLines] 40 | ptList = [ Point( x[0], x[1]) for x in cleanLines ] 41 | s1 = Shape( ptList ) 42 | 43 | f, ((ax1,ax2),(ax3,ax4)) = plt.subplots(2,2) 44 | 45 | ## Original shape (input) 46 | cmShape = ASM.centroid( s1 ) 47 | plotAll( ax1, s1 ) 48 | 49 | ############## Calc transformations ################### 50 | ## Translate 51 | t = [[ -cmShape.x ], [ -cmShape.y ]] 52 | for pt in s1.shapePoints: 53 | pt.translate( t ) 54 | s1.update() 55 | plotAll( ax2, s1) 56 | 57 | 58 | 59 | ## Scale 60 | d1 = s1.shapePoints[0].dist( s1.shapePoints[1] ) 61 | s = float(1)/float(d1) 62 | 63 | ## Rotation 64 | p0 = [ s1.shapePoints[0].x, s1.shapePoints[0].y ] 65 | axisVector = [ 0, 1] 66 | thetaP = ASM.angleV( p0, axisVector ) 67 | thetaRot = 2*math.pi - thetaP 68 | rot = [[ math.cos( thetaRot ), -math.sin( thetaRot ) ], 69 | [ math.sin( thetaRot ), math.cos( thetaRot ) ] ] 70 | 71 | 72 | 73 | 74 | 75 | 76 | ## What order to calculate and apply the transformations? 77 | 78 | 79 | 80 | 81 | for pt in s1.shapePoints: 82 | pt.rotate( rot ) 83 | s1.update() 84 | 85 | plotAll( ax3, s1 ) 86 | 87 | for pt in s1.shapePoints: 88 | pt.scale( s ) 89 | s1.update() 90 | plotAll( ax4, s1) 91 | 92 | 93 | plt.show() 94 | 95 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/active_shape_models/shapeNormalizationTest.py: -------------------------------------------------------------------------------- 1 | from ActiveShapeModels import ASM, Point, Shape 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | import math 5 | import numpy as np 6 | 7 | s1 = Shape( [ Point(200,300), Point(100, 200), Point(300, 50 ) ] ) 8 | s2 = Shape( [ Point(150,250), Point(50, 100 ), Point(250, 0) ] ) 9 | 10 | 11 | 12 | f, ((ax1,ax2),(ax3,ax4)) = plt.subplots(2,2, sharex =True, sharey = True) 13 | s1.draw( sns.xkcd_palette( ["light blue" ]), ax1) 14 | s2.draw( sns.xkcd_palette( ["light blue"] ), ax2) 15 | 16 | 17 | cmShape = ASM.centroid( s1) 18 | 19 | cmMeanShape = ASM.centroid( s2 ) 20 | 21 | 22 | ax1.scatter( cmShape.x, cmShape.y, c='r') 23 | ax2.scatter( cmMeanShape.x, cmMeanShape.y, c='r') 24 | ax1.plot( [s1.shapePoints[0].x, s1.shapePoints[1].x], 25 | [s1.shapePoints[0].y, s1.shapePoints[1].y], 26 | color= 'r', ls = '-') 27 | 28 | ax2.plot( [s2.shapePoints[0].x, s2.shapePoints[1].x], 29 | [s2.shapePoints[0].y, s2.shapePoints[1].y], 30 | color= 'r', lw = 1, ls = '-') 31 | 32 | 33 | t = [[ cmShape.x - cmMeanShape.x ], [cmShape.y - cmMeanShape.y ]] 34 | 35 | ## Scale 36 | d1 = s1.shapePoints[0].dist( s1.shapePoints[1] ) 37 | d2 = s2.shapePoints[0].dist( s2.shapePoints[1] ) 38 | s = d1/d2 39 | 40 | ## Rotation 41 | #http://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python 42 | p0 = [ s1.shapePoints[0].x, s1.shapePoints[0].y ] 43 | m0 = [ s2.shapePoints[0].x, s2.shapePoints[0].y ] 44 | axisVector = [ 1, 0 ] 45 | 46 | thetaP = ASM.angleV( p0, axisVector ) 47 | thetaM = ASM.angleV( m0, axisVector ) 48 | 49 | thetaRot = thetaP - thetaM 50 | 51 | rot = [[ math.cos( thetaRot ), -math.sin( thetaRot ) ], 52 | [ math.sin( thetaRot ), math.cos( thetaRot ) ] ] 53 | 54 | d= { 'rot' : np.multiply( s , rot) , 't': t} 55 | s2.applyTrans( d ) 56 | s2.draw( sns.xkcd_palette( ["light blue"] ), ax4) 57 | 58 | cmMeanShape1 = ASM.centroid( s2 ) 59 | ax4.scatter( cmMeanShape1.x, cmMeanShape1.y, c='r') 60 | ax4.plot( [s2.shapePoints[0].x, s2.shapePoints[1].x], 61 | [s2.shapePoints[0].y, s2.shapePoints[1].y], 62 | color= 'r', lw = 1, ls = '-') 63 | 64 | 65 | 66 | ### Checks 67 | print d1 68 | print d2 69 | print s2.shapePoints[0].dist( s2.shapePoints[1] ) #should be == d1 70 | 71 | print thetaP 72 | print thetaM 73 | print ASM.angleV([ s2.shapePoints[0].x, s2.shapePoints[0].y ], axisVector ) 74 | 75 | plt.show() 76 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/active_shape_models/shapeNormalizationTestToSelf.py: -------------------------------------------------------------------------------- 1 | from ActiveShapeModels import ASM, Point, Shape 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | import math 5 | import numpy as np 6 | 7 | #s1 = Shape( [ Point(200,300), Point(100, 200), Point(300, 50 ) ] ) 8 | #s2 = Shape( [ Point(150,250), Point(50, 100 ), Point(250, 0) ] ) 9 | 10 | 11 | 12 | s1 = Shape( [ Point(857, -129), Point(89,-409), Point(-404,254), Point( 96,957), Point(877,712) ]) 13 | 14 | f, ((ax1,ax2),(ax3,ax4)) = plt.subplots(2,2) 15 | 16 | s1.draw( sns.xkcd_palette( ["light blue" ]), 0, ax1) 17 | #s2.draw( sns.xkcd_palette( ["light blue"] ), ax2) 18 | 19 | cmShape = ASM.centroid( s1 ) 20 | #cmMeanShape = ASM.centroid( s2 ) 21 | 22 | 23 | ax1.scatter( cmShape.x, cmShape.y, c='r') 24 | #ax2.scatter( cmMeanShape.x, cmMeanShape.y, c='r') 25 | ax1.plot( [s1.shapePoints[0].x, s1.shapePoints[1].x], 26 | [s1.shapePoints[0].y, s1.shapePoints[1].y], 27 | color= 'r', ls = '-') 28 | 29 | #ax2.plot( [s2.shapePoints[0].x, s2.shapePoints[1].x], 30 | # [s2.shapePoints[0].y, s2.shapePoints[1].y], 31 | # color= 'r', lw = 1, ls = '-') 32 | 33 | 34 | t = [[ -cmShape.x ], [ -cmShape.y ]] 35 | 36 | ## Scale 37 | d1 = s1.shapePoints[0].dist( s1.shapePoints[1] ) 38 | #d2 = s2.shapePoints[0].dist( s2.shapePoints[1] ) 39 | s = 1/d1 40 | 41 | ## Rotation 42 | #http://stackoverflow.com/questions/2827393/angles-between-two-n-dimensional-vectors-in-python 43 | p0 = [ s1.shapePoints[0].x, s1.shapePoints[0].y ] 44 | #m0 = [ s2.shapePoints[0].x, s2.shapePoints[0].y ] 45 | axisVector = [ 1, 0 ] 46 | 47 | thetaP = ASM.angleV( p0, axisVector ) 48 | #thetaM = ASM.angleV( m0, axisVector ) 49 | 50 | thetaRot = math.pi / 2 - thetaP 51 | 52 | rot = [[ math.cos( thetaRot ), -math.sin( thetaRot ) ], 53 | [ math.sin( thetaRot ), math.cos( thetaRot ) ] ] 54 | 55 | #d= { 'rot' : np.multiply( s , rot) , 't': t} 56 | #s1.applyTrans( d )A 57 | 58 | 59 | for pt in s1.shapePoints: 60 | pt.translate( t ) 61 | s1.update() 62 | s1.draw( sns.xkcd_palette( ["light blue"] ), 0, ax2) 63 | 64 | for pt in s1.shapePoints: 65 | pt.rotate( rot ) 66 | s1.update() 67 | s1.draw( sns.xkcd_palette( ["light blue"] ), 0, ax3) 68 | 69 | 70 | 71 | 72 | cmMeanShape1 = ASM.centroid( s1 ) 73 | ax2.scatter( cmMeanShape1.x, cmMeanShape1.y, c='r') 74 | ax2.plot( [s1.shapePoints[0].x, s1.shapePoints[1].x], 75 | [s1.shapePoints[0].y, s1.shapePoints[1].y], 76 | color= 'r', lw = 1, ls = '-') 77 | 78 | plt.show() 79 | 80 | ### Checks 81 | print d1 82 | print d2 83 | print s2.shapePoints[0].dist( s2.shapePoints[1] ) #should be == d1 84 | 85 | print thetaP 86 | print thetaM 87 | print ASM.angleV([ s2.shapePoints[0].x, s2.shapePoints[0].y ], axisVector ) 88 | 89 | plt.show() 90 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/feature_extraction/gabor_dim_OO.py: -------------------------------------------------------------------------------- 1 | import os 2 | from image_processing.PreProcessing import PreProcessing as pp 3 | from feature_extraction.GaborWindowExtractor import GaborWindowExtractor as w 4 | data = "C:\Users\Valerie\Desktop\MicroExpress\CASME2\Cropped\Cropped" 5 | sub = "sub01" 6 | ep = "EP02_01f" 7 | img = "reg_img46.jpg" 8 | I = pp.readInImg( os.path.join( data, sub, ep, img) ) 9 | 10 | gw = w( 5, 8, 15, 8 ) 11 | gw.process( I ) -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/feature_extraction/gabor_dimensionality.py: -------------------------------------------------------------------------------- 1 | from feature_extraction.GaborExtractor import GaborExtractor as gab 2 | from image_processing.PreProcessing import PreProcessing as pp 3 | import os 4 | from matplotlib import pyplot as plt 5 | import numpy as np 6 | 7 | 8 | 9 | #http://stackoverflow.com/questions/28995146/matlab-ind2sub-equivalent-in-python 10 | def getOrientationScaleIx( ix, no, ns ): 11 | return ind2sub( ix, no, ns ) 12 | 13 | ## Given number of rectangles wanted, returns 14 | def ind2sub( ix, nrows, ncols ): 15 | row = ix / ncols 16 | col = ix % ncols # or numpy.mod(ind.astype('int'), array_shape[1]) 17 | return (row, col) 18 | 19 | 20 | 21 | def getRectangleParams( img, numRects): ## only works with even 22 | nrows = 0 23 | ncols = 0 24 | cs = np.shape( img ) 25 | cs = list( cs ) 26 | print cs 27 | while numRects != 1: 28 | maxIx = np.argmax( cs ) 29 | if numRects % 2 == 0: #even 30 | replace( cs, 2, maxIx ) 31 | if maxIx == 0: 32 | nrows += 2 33 | else: 34 | ncols += 2 35 | numRects /= 2 36 | """ 37 | elif numRects % 3 == 0: 38 | replace( cs, 3, maxIx ) 39 | numRects /= 3 40 | elif numRects % 5 == 0: 41 | replace( cs, 5, maxIx ) 42 | numRects /= 5 43 | elif numRects % 7 == 0: 44 | replace( cs, 7, maxIx ) 45 | numRects /= 7 46 | """ 47 | return cs, (nrows, ncols) 48 | 49 | 50 | def replace( vect, mult, maxIx): 51 | v = vect.pop( maxIx) 52 | vect.insert( maxIx, v / mult ) 53 | print vect 54 | 55 | 56 | def sliceImg( img, rectSize, rectIx ): 57 | slices = [] 58 | for nr in range( rectIx[0] ): 59 | for nc in range( rectIx[1] ): 60 | slices.append( list( img[ nr * rectSize[0] + nr : nr * rectSize[0] + nr + rectSize[0], 61 | nc * rectSize[1] + nc : nc * rectSize[1] + nc + rectSize[1]] ) ) 62 | return slices 63 | 64 | 65 | 66 | def findMaxMag( fImg ): 67 | return np.argmax( fImg ) 68 | 69 | 70 | def getResponseVals( fImg, maxLoc): 71 | return fImg[ maxLoc[0] ][ maxLoc[1] ] 72 | 73 | 74 | 75 | 76 | 77 | 78 | data = "C:\Users\Valerie\Desktop\MicroExpress\CASME2\Cropped\Cropped" 79 | sub = "sub01" 80 | ep = "EP02_01f" 81 | img = "reg_img46.jpg" 82 | I = pp.readInImg( os.path.join( data, sub, ep, img) ) 83 | 84 | ns = 5 85 | no = 8 86 | ksize = 21 87 | 88 | g = gab( ns, no, ksize ) 89 | f = g.generateGaborKernels() 90 | imgs = g.processGaborRaw( I, f ) 91 | 92 | fVect = [] 93 | 94 | for fIx, fImg in enumerate( imgs ): 95 | o,s = getOrientationScaleIx( fIx, no, ns ) 96 | 97 | maxLocs = [] 98 | rsize, rIx = getRectangleParams( fImg, 8 ) 99 | slices = sliceImg( fImg, rsize, rIx ) 100 | 101 | for slice in slices: 102 | sliceR, sliceC = np.shape( slice ) 103 | if s == 0: 104 | maxLoc = findMaxMag( slice ) 105 | maxLoc = ind2sub( maxLoc, sliceR, sliceC ) 106 | 107 | 108 | fVect.append( getResponseVals( fImg ,maxLoc) ) 109 | 110 | 111 | 112 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/feature_extraction/video_capture_face_detection_MSER_affine_dictionary.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import imageProcessingHelp as iph 3 | 4 | MAx_FRAMES = 10000 5 | def processImage( videoFile ): 6 | nframes = 0 7 | cap = cv2.VideoCapture( videoFile ) 8 | 9 | imageSet = {} 10 | fileHelp = {} 11 | key = 0 12 | c = 0 13 | 14 | 15 | while(nframes < MAX_FRAMES): 16 | face_cascade = cv2.CascadeClassifier(CASCADE) ) 17 | 18 | ret,img= cap.read() 19 | if not ret: 20 | break 21 | gray = cv2.cvtColor( img, cv2.COLOR_BGR2GRAY) 22 | 23 | 24 | faces = face_cascade.detectMultiScale( gray, 1.3, 5) 25 | x,y,w,h = faces[0] 26 | 27 | info = {} 28 | d = {} 29 | j = 0 30 | roi_gray = gray[y:y+h, x:x+w] 31 | 32 | pyr, reg = iph.scalePyramid( imageIn , NUMLEVELS, SIGMAPYR ,TAPSPYR) 33 | ## i iterates levels 34 | for i in range( len( pyr ) ): 35 | ## r are msers detected in image 36 | for r in reg[i]: 37 | ## Affine Normalization 38 | props = iph.regionProps( pyr[i], r, (key, i, j) ) 39 | props['ncentroid'] = (props['centroid'][0] *2 ** i, props['centroid'][1] * 2 ** i ) 40 | props['narea'] = props['area'] * 4 ** i 41 | props['level'] = i 42 | d[j] = props 43 | # j flattens all regions to be considered together 44 | j += 1 45 | 46 | info['TotalRegions'] = len(d) 47 | 48 | # ELIMINATE DUPLICATE REGIONS 49 | elim = [] 50 | for r0, v0 in d.iteritems(): 51 | ## Finer scale 52 | cx0 = v0['ncentroid'][0] 53 | cy0 = v0['ncentroid'][1] 54 | a0 = v0['narea'] 55 | l0 = v0['level'] 56 | 57 | ma0 = v0['mal'] 58 | for r1, v1 in d.iteritems(): 59 | ## Coarser scale 60 | cx1 = v1['ncentroid'][0] 61 | cy1 = v1['ncentroid'][1] 62 | a1 = v1['narea'] 63 | l1 = v1['level'] 64 | 65 | 66 | ## Conditions 67 | if l1 - l0 != 1: # only compare to one coarser scale below 68 | continue 69 | 70 | # Centroid within 4 pix 71 | if abs( cx0 - cx1 ) < 4 and abs( cy0 - cy1 ) < 4: 72 | 73 | # Close areas 74 | if abs ( a0 - a1 ) / max( a0, a1 ) < 0.2: 75 | elim.append( r0 ) 76 | break 77 | 78 | # Small minor axes 79 | if l0 >= 1 : 80 | if ma0 < 25: 81 | elim.append( r0 ) 82 | 83 | for el in elim: 84 | if el in d.iterkeys(): 85 | d.pop( el ) 86 | 87 | 88 | combDict = { key : { 'info' : info, 'regions': d } } 89 | imageSet.update( combDict ) 90 | # print imageSet 91 | # break 92 | key +=1 93 | return imageSet, fileHelp 94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/feature_extraction/vidtoImg.py: -------------------------------------------------------------------------------- 1 | import os 2 | import cv2 3 | 4 | dir = "C:\Users\Valerie\Pictures\Camera Roll" 5 | vid = "WIN_20151017_12_26_36_Pro.mp4" 6 | 7 | i = 0 8 | cap = cv2.VideoCapture( os.path.join( dir, vid )) 9 | while( True ): 10 | ret, frame = cap.read() 11 | gray = cv2.cvtColor( frame, cv2.COLOR_BGR2GRAY ) 12 | cv2.imwrite(os.path.join( dir, "%d.jpg" % i ), gray ) 13 | i += 1 14 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/image_processing/correlation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import math 3 | w = [[120,130,140],[115,120,130],[100,115,120]] #template 4 | f = [[110,120,135],[120,115,95],[100,105,90]] #image frame 5 | f2 = [[90,120,230],[120,75,95],[100,11,90]] #image frame 6 | 7 | 8 | 9 | 10 | def response1( w, f ): 11 | fr = np.ravel( f ) 12 | wr = np.ravel( w ) 13 | 14 | ws = wr - np.mean( w ) 15 | fs = fr - np.mean( f ) 16 | 17 | #print ws 18 | #print fs 19 | 20 | #print ws ** 2 21 | #print fs ** 2 22 | 23 | num = np.dot( np.transpose( ws ), fs ) 24 | den = math.sqrt( np.dot( np.transpose( ws**2), fs**2 ) ) 25 | r = num / den 26 | return r 27 | 28 | def response2( w, f ): 29 | r1= 0 30 | for s in range( 2 ): 31 | for t in range( 2 ): 32 | wup = w[s][t] - np.mean( w ) 33 | fup = f[s][t] - np.mean( f ) 34 | num = wup * fup 35 | if num == 0: 36 | r1 += 0 37 | else: 38 | den = math.sqrt( ( wup ** 2 ) + ( fup ** 2) ) 39 | r1+= num/den 40 | return r1 41 | 42 | 43 | def response3( w, f ): 44 | ws = 0 45 | fs = 0 46 | for s in range( 2 ): 47 | for t in range( 2 ): 48 | wup = w[s][t] - np.mean( w ) 49 | fup = f[s][t] - np.mean( f ) 50 | ws += wup / math.sqrt( ) 51 | den = math.sqrt( ( wup ** 2 ) * ( fup ** 2) ) 52 | r1+= num/den 53 | 54 | return r1 55 | 56 | response1( w, f ) 57 | response1( w, f2 ) 58 | 59 | response2( w, f ) 60 | response2( w, f2 ) 61 | 62 | # r1 = 0 63 | # ws = 0 64 | # fs = 0 65 | # for s in range( 2 ): 66 | # for t in range( 2 ): 67 | # 68 | # num = ( w[s][t] - np.mean( w ) ) * ( f[s][t] - np.mean( f ) ) 69 | # if num == 0: 70 | # r1 += 0 71 | # else: 72 | # den = math.sqrt( ( (w[s][t] - np.mean( w ))** 2 ) * ( (f[s][t] - np.mean( f ) )**2 ) ) 73 | # r1+= num/den 74 | # return r1 75 | 76 | 77 | import cv2 78 | 79 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/image_processing/template_matching.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | from helpers.FileHelper import FileHelper 3 | from helpers.DrawFace import DrawFace 4 | from shapes.ActiveShape import ActiveShape 5 | from active_shape_models.ActiveShapeModel import ActiveShapeModel 6 | from active_shape_models.ApplyASM import ApplyASM 7 | from shapes.Vector import Vector 8 | from matplotlib import pyplot as plt 9 | from matplotlib import gridspec 10 | import numpy as np 11 | import math 12 | import sys 13 | import copy 14 | 15 | 16 | def hinge( v, n ): 17 | lower = int(round(v) - math.floor( n/2 )) 18 | upper = int(round(v) + math.floor( n/ 2)) 19 | return lower, upper 20 | 21 | def slice( mat, n, pt ): 22 | lc, uc = hinge( pt.x, n ) 23 | lr, ur = hinge( pt.y, n ) 24 | nr, nc = np.shape( mat ) 25 | nr = nr - 1 26 | nc = nc - 1 27 | 28 | alc, auc = lc, uc # copy.deepcopy( lc ), copy.deepcopy( uc ) 29 | alr, aur = lr, ur #copy.deepcopy( lr ), copy.deepcopy( lc ) 30 | rpb, rpa, cpl, cpr = [0,0,0,0] 31 | 32 | if lc < 0: 33 | alc = 0 34 | cpl = -lc 35 | 36 | if uc > nc: 37 | auc = nc #copy.deepcopy( nc ) 38 | cpr = uc - auc 39 | 40 | if lr < 0: 41 | alr = 0 42 | rpb = -lr 43 | 44 | if ur > nr: 45 | aur = nr #copy.deepcopy( nr ) 46 | rpa = ur - aur 47 | 48 | return np.pad( mat[ alr : aur + 1 , alc : auc + 1 ], (( rpb, rpa ),( cpl, cpr )), mode ='constant' ) 49 | 50 | 51 | 52 | ## http://stackoverflow.com/questions/28995146/matlab-ind2sub-equivalent-in-python 53 | def ind2sub(array_shape, ind): 54 | rows = (int(ind) / array_shape[1]) 55 | cols = (int(ind) % array_shape[1]) # or numpy.mod(ind.astype('int'), array_shape[1]) 56 | return (rows, cols) 57 | 58 | def normCorr( template, image ): 59 | t = np.ravel( template - np.mean( template )) 60 | nt = math.sqrt( sum( t ** 2 ) ) 61 | i = np.ravel( image - np.mean( image )) 62 | ni = math.sqrt( sum( i ** 2 ) ) 63 | if ni == 0 or nt == 0: 64 | return 0 65 | th = np.divide( t, nt ) 66 | ih = np.divide( i, ni ) 67 | return sum ( th * ih ) 68 | 69 | def SSD( template, image ): 70 | t = np.ravel(template - np.mean( template )) 71 | i = np.ravel(image - np.mean( image )) 72 | return sum( ( t - i ) ** 2 ) 73 | 74 | 75 | def matIxs( n ): 76 | rows, cols = np.indices( (n,n) ) 77 | row = rows.flatten() 78 | col = cols.flatten() 79 | 80 | return map( lambda x: Vector( x[0], x[1] ), zip( row, col ) ) 81 | 82 | def coordOffset( pt, n ): 83 | y = pt.y #row 84 | x = pt.x #col 85 | return x - ( n - 1)/2, y - (n-1)/1 86 | 87 | #def run( ): 88 | i = 20 89 | tr = 500 90 | out = "C:\\Users\\Valerie\\Desktop\\output" 91 | 92 | fh = FileHelper( i, tr, out, True, True ) 93 | 94 | ebenFace = fh.readInImage() 95 | ebenPoints = fh.readInOneDude( '000_1_1.pts') 96 | ebenShape = ActiveShape( ebenPoints ) 97 | 98 | #### MATCHING PROCESS 99 | 100 | # Read in image 101 | I = cv2.imread( "C:\Users\Valerie\Desktop\MicroExpress\CASME2\CASME2_RAW\CASME2-RAW\sub01\EP02_01f\img1.jpg") 102 | I = cv2.cvtColor( I, cv2.COLOR_BGR2GRAY) 103 | 104 | # Align shape 105 | asm = ActiveShapeModel( [36,31] ) 106 | asm = fh.readInASM( asm ) 107 | asm.PCA() 108 | appASM = ApplyASM( asm, i, tr, out, I ) 109 | m, tdict = appASM.initialPosition( ) 110 | 111 | def genTemplateArr( ): 112 | templates = [] 113 | for pt in ebenShape.shapePoints: 114 | templates.append( slice( ebenFace, 5, pt ) ) 115 | return templates 116 | 117 | def genTemplateDict( ): 118 | templates = {} 119 | ix = 0 120 | for pt in ebenShape.shapePoints: 121 | templates.update( { ix : slice( ebenFace, 5, pt ) } ) 122 | ix += 1 123 | return templates 124 | 125 | def genRegionArr( ): 126 | regions = [] 127 | for pt in m.shapePoints : 128 | regions.append( slice( I, 25, pt ) ) 129 | return regions 130 | 131 | def genRegionDict( ): 132 | regions = {} 133 | ix = 0 134 | for pt in m.shapePoints : 135 | regions.update( { ix : slice( I, 25, pt ) } ) 136 | ix += 1 137 | return regions 138 | 139 | 140 | ixs = matIxs( 25 ) 141 | response = {} 142 | meth = 'corr' 143 | ### For every region 144 | for rIx, r in genRegionDict().iteritems(): 145 | 146 | print rIx 147 | 148 | ## 5 x 5 windows for region r 149 | ws = map( lambda x : slice( r, 5, x ), ixs ) 150 | 151 | 152 | ## For every template 153 | minResp = sys.maxint 154 | maxResp = 0 155 | 156 | for tIx, t in genTemplateDict().iteritems(): 157 | 158 | if meth == 'SSD' : 159 | resp = map( lambda w: SSD( t, w), ws ) 160 | minIX = np.argmin( resp ) 161 | mag = resp[ minIX ] 162 | 163 | if mag < minResp : 164 | minResp = mag 165 | matchPt = ixs[ minIX ] 166 | dx, dy = coordOffset( matchPt, 25 ) 167 | response.update({ rIx : { 'tIx' : tIx, 'mag' : mag, 'd' : ( dx, dy ), 'pt' : ( matchPt.x, matchPt.y ) } }) 168 | else: 169 | resp = map( lambda w: normCorr( t, w), ws ) 170 | maxIx = np.argmax( resp ) 171 | mag = resp[ maxIx ] 172 | if mag > maxResp : 173 | maxResp = mag 174 | matchPt = ixs[ maxIx ] 175 | dx, dy = coordOffset( matchPt, 25 ) 176 | response.update({ rIx : { 'tIx' : tIx, 'mag' : mag, 'd' : ( dx, dy ), 'pt' : ( matchPt.x, matchPt.y ) } }) 177 | 178 | 179 | 180 | def minResponse( ws, t ): 181 | resp = map( lambda w : SSD( t, w ), ws ) 182 | minIX = np.argmin( resp ) 183 | mag = resp[ minIX ] 184 | 185 | 186 | 187 | 188 | 189 | 190 | def singleRegionTemplate( ixs ): 191 | r = regions[0] 192 | 193 | ## 5 x 5 windows for region r 194 | ws = map( lambda x : slice( r, 5, x ), ixs ) 195 | 196 | ## Get template response 197 | t = templates[0] 198 | resp = map( lambda w: SSD( t, w), ws ) 199 | 200 | ## Find min 201 | minIX = np.argmin( resp ) 202 | matchPt = ixs[ minIX ] 203 | dx, dy = coordOffset( matchPt, 25 ) 204 | 205 | 206 | 207 | 208 | def plotTemplates( ): 209 | for k, t in genTemplateDict().iteritems(): 210 | plt.imshow( t ) 211 | plt.gca().axes.xaxis.set_ticks([]) 212 | plt.gca().axes.yaxis.set_ticks([]) 213 | plt.savefig( "C:\\Users\\Valerie\\Desktop\\output\\matching\\templates\\%d.png" % k, bbox_inches = 0) 214 | 215 | 216 | def plotRegionResponse( resp ): 217 | ts = genTemplateArr() 218 | rs = genRegionArr() 219 | for rix, r in resp.iteritems(): 220 | fig = plt.figure() 221 | gs = gridspec.GridSpec( 2,2 ) 222 | ax1 = fig.add_subplot( gs[ : , 0 ] ) 223 | ax2 = fig.add_subplot( gs[ 0, 1 ]) 224 | ax3 = fig.add_subplot( gs[ 1, 1 ]) # sharex = ax1, sharey = ax1) 225 | 226 | 227 | ax1.imshow( rs[ rix ] ) 228 | ax1.scatter( r[ 'pt' ][0], r[ 'pt'][1] ) 229 | ax1.set_xlim( -1,25 ) 230 | ax1.set_ylim( 25, -1 ) 231 | ax2.imshow( ts[ rix ] ) 232 | ax3.imshow( ts[ r[ 'tIx' ] ] ) 233 | plt.savefig( "C:\\Users\\Valerie\\Desktop\\output\\matching\\matches\\%d.png" % rix ) 234 | plt.close() 235 | 236 | 237 | 238 | 239 | def plotResponse( resp ): 240 | f, (ax1, ax2 ) = plt.subplots( 1,2, sharex = True, sharey= True) 241 | # draw region 242 | ax1.imshow( r ) 243 | ax1.scatter( matchPt.x, matchPt.y ) 244 | 245 | ax2.imshow( t ) 246 | plt.xlim( -1, 25 ) 247 | plt.ylim( 25, -1 ) 248 | plt.show() 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | ### PUTTING IT ALL TOGETHER: 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | def addFace( ebenFace ): 276 | plt.imshow( ebenFace ) 277 | plt.gca().axes.xaxis.set_ticks([]) 278 | plt.gca().axes.yaxis.set_ticks([]) 279 | return plt 280 | 281 | 282 | def drawGroundTruth( ebenFace ): 283 | plt.imshow( ebenFace ) 284 | DrawFace( ebenShape, plt).drawBold() 285 | DrawFace( ebenShape, plt).drawPoints() 286 | plt.xlim( 200, 525) 287 | plt.ylim( 525, 225 ) 288 | plt.show() 289 | 290 | def drawIndices( ebenFace ): 291 | addFace( ebenFace) 292 | DrawFace( ebenShape, plt).drawBold() 293 | DrawFace( ebenShape, plt).labelIndices() 294 | plt.xlim( 200, 525) 295 | plt.ylim( 525, 225 ) 296 | plt.show() 297 | 298 | addFace( ebenFace) 299 | plt.imshow( ebenFace ) 300 | DrawFace( ebenShape, plt).drawBold() 301 | DrawFace( ebenShape, plt).labelIndices() 302 | plt.xlim( 300, 425) 303 | plt.ylim( 430, 380 ) 304 | #plt.show() 305 | 306 | 307 | w = [[120,130,140],[115,120,130],[100,115,120]] #template 308 | f = [[110,120,135],[120,115,95],[100,105,90]] #image frame 309 | f2 = [[90,120,230],[120,75,95],[100,11,90]] #image frame 310 | 311 | black = [[0,0,0],[0,0,0],[0,0,0]] 312 | white = [[255,255,255],[255,255,255],[255,255,255]] 313 | big = np.reshape( range( 25 ), (5,5 ) ) 314 | ## max SSD = 255 ^ 2 * number of tiles ( w/o sub mean) 315 | 316 | 317 | 318 | 319 | 320 | 321 | 322 | 323 | 324 | addFace(ebenFace) 325 | DrawFace( m, plt ).drawBold() 326 | DrawFace( m, plt ).labelIndices() 327 | 328 | 329 | 330 | ### Slicing testing 331 | 332 | def sliceTest( big ) : 333 | nr, nc = np.shape( big ) 334 | for i in range( nr ): 335 | for j in range( nc ): 336 | print i, j 337 | print slice( big, 3, Vector( j, i ) ) 338 | print slice( big, 5, Vector(j, i ) ) 339 | 340 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/image_processing/template_matching_class.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | from helpers.FileHelper import FileHelper 3 | from helpers.DrawFace import DrawFace 4 | from shapes.ActiveShape import ActiveShape 5 | from active_shape_models.ActiveShapeModel import ActiveShapeModel 6 | from active_shape_models.ApplyASM import ApplyASM 7 | from shapes.Vector import Vector 8 | from matplotlib import pyplot as plt 9 | from matplotlib import gridspec 10 | import numpy as np 11 | import math 12 | from image_processing.TemplateMatcher import TemplateMatcher 13 | 14 | i = 20 15 | tr = 500 16 | out = "C:\\Users\\Valerie\\Desktop\\output\\ASMTraining-MessingAround\\20-500-1" 17 | 18 | fh = FileHelper( i, tr, out, False, False ) 19 | 20 | ebenFace = fh.readInImage() 21 | ebenPoints = fh.readInOneDude( '000_1_1.pts') 22 | ebenShape = ActiveShape( ebenPoints ) 23 | 24 | ## draw indices 25 | DrawFace( ebenShape, plt).labelIndices() 26 | plt.imshow( ebenFace ) 27 | plt.set_cmap( "gray" ) 28 | plt.gca().axes.xaxis.set_ticks([]) 29 | plt.gca().axes.yaxis.set_ticks([]) 30 | plt.show() 31 | 32 | 33 | fh = FileHelper( i, tr, out, True, False ) 34 | 35 | ebenFace = fh.readInImage() 36 | ebenPoints = fh.readInOneDude( '000_1_1.pts') 37 | ebenShape = ActiveShape( ebenPoints 38 | 39 | ## draw indices 40 | DrawFace( ebenShape, plt).labelIndices() 41 | plt.imshow( ebenFace ) 42 | plt.set_cmap( "gray" ) 43 | plt.set_cmap( "gray" ) 44 | plt.gca().axes.xaxis.set_ticks([]) 45 | plt.gca().axes.yaxis.set_ticks([]) 46 | 47 | plt.show() 48 | 49 | 50 | 51 | 52 | 53 | #### MATCHING PROCESS 54 | 55 | # Read in image 56 | I = cv2.imread( "C:\Users\Valerie\Desktop\MicroExpress\CASME2\CASME2_RAW\CASME2-RAW\sub01\EP02_01f\img1.jpg") 57 | I = cv2.cvtColor( I, cv2.COLOR_BGR2GRAY) 58 | 59 | # Align shape 60 | asm = ActiveShapeModel( [36,31] ) 61 | asm = fh.readInASM( asm ) 62 | asm.PCA() 63 | appASM = ApplyASM( asm, i, tr, out, I, "SSD", 5,5 ) 64 | m, tdict = appASM.initialPosition( ) 65 | 66 | TM = TemplateMatcher( 'SSD', 5, True ) 67 | TM.performMatching(I, m) -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/image_processing/template_matching_parallel.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | from helpers.FileHelper import FileHelper 3 | from helpers.DrawFace import DrawFace 4 | from shapes.ActiveShape import ActiveShape 5 | from active_shape_models.ActiveShapeModel import ActiveShapeModel 6 | from active_shape_models.ApplyASM import ApplyASM 7 | from shapes.Vector import Vector 8 | from matplotlib import pyplot as plt 9 | from matplotlib import gridspec 10 | import numpy as np 11 | import math 12 | import sys 13 | import copy 14 | 15 | 16 | def hinge( v, n ): 17 | lower = int(round(v) - math.floor( n/2 )) 18 | upper = int(round(v) + math.floor( n/ 2)) 19 | return lower, upper 20 | 21 | def slice( mat, n, pt ): 22 | lc, uc = hinge( pt.x, n ) 23 | lr, ur = hinge( pt.y, n ) 24 | nr, nc = np.shape( mat ) 25 | nr = nr - 1 26 | nc = nc - 1 27 | 28 | alc, auc = lc, uc # copy.deepcopy( lc ), copy.deepcopy( uc ) 29 | alr, aur = lr, ur #copy.deepcopy( lr ), copy.deepcopy( lc ) 30 | rpb, rpa, cpl, cpr = [0,0,0,0] 31 | 32 | if lc < 0: 33 | alc = 0 34 | cpl = -lc 35 | 36 | if uc > nc: 37 | auc = nc #copy.deepcopy( nc ) 38 | cpr = uc - auc 39 | 40 | if lr < 0: 41 | alr = 0 42 | rpb = -lr 43 | 44 | if ur > nr: 45 | aur = nr #copy.deepcopy( nr ) 46 | rpa = ur - aur 47 | 48 | return np.pad( mat[ alr : aur + 1 , alc : auc + 1 ], (( rpb, rpa ),( cpl, cpr )), mode ='constant' ) 49 | 50 | 51 | 52 | ## http://stackoverflow.com/questions/28995146/matlab-ind2sub-equivalent-in-python 53 | def ind2sub(array_shape, ind): 54 | rows = (int(ind) / array_shape[1]) 55 | cols = (int(ind) % array_shape[1]) # or numpy.mod(ind.astype('int'), array_shape[1]) 56 | return (rows, cols) 57 | 58 | def normCorr( template, image ): 59 | t = np.ravel( template - np.mean( template )) 60 | nt = math.sqrt( sum( t ** 2 ) ) 61 | i = np.ravel( image - np.mean( image )) 62 | ni = math.sqrt( sum( i ** 2 ) ) 63 | if ni == 0 or nt == 0: 64 | return 0 65 | th = np.divide( t, nt ) 66 | ih = np.divide( i, ni ) 67 | return sum ( th * ih ) 68 | 69 | def SSD( template, image ): 70 | t = np.ravel(template - np.mean( template )) 71 | i = np.ravel(image - np.mean( image )) 72 | return sum( ( t - i ) ** 2 ) 73 | 74 | 75 | def matIxs( n ): 76 | rows, cols = np.indices( (n,n) ) 77 | row = rows.flatten() 78 | col = cols.flatten() 79 | 80 | return map( lambda x: Vector( x[0], x[1] ), zip( row, col ) ) 81 | 82 | def coordOffset( pt, n ): 83 | y = pt.y #row 84 | x = pt.x #col 85 | return x - ( n - 1)/2, y - (n-1)/1 86 | 87 | #def run( ): 88 | i = 20 89 | tr = 500 90 | out = "C:\\Users\\Valerie\\Desktop\\output\\20-500-1" 91 | 92 | fh = FileHelper( i, tr, out ) 93 | 94 | ebenFace = fh.readInImage() 95 | ebenPoints = fh.readInOneDude( '000_1_1.pts') 96 | ebenShape = ActiveShape( ebenPoints ) 97 | 98 | #### MATCHING PROCESS 99 | 100 | # Read in image 101 | I = cv2.imread( "C:\Users\Valerie\Desktop\MicroExpress\CASME2\CASME2_RAW\CASME2-RAW\sub01\EP02_01f\img1.jpg") 102 | I = cv2.cvtColor( I, cv2.COLOR_BGR2GRAY) 103 | 104 | # Align shape 105 | asm = ActiveShapeModel( [36,31] ) 106 | asm = fh.readInASM( asm ) 107 | asm.PCA() 108 | appASM = ApplyASM( asm, i, tr, out, I ) 109 | m, tdict = appASM.initialPosition( ) 110 | 111 | def genTemplateArr( ): 112 | templates = [] 113 | for pt in ebenShape.shapePoints: 114 | templates.append( slice( ebenFace, 5, pt ) ) 115 | return templates 116 | 117 | def genTemplateDict( ): 118 | templates = {} 119 | ix = 0 120 | for pt in ebenShape.shapePoints: 121 | templates.update( { ix : slice( ebenFace, 5, pt ) } ) 122 | ix += 1 123 | return templates 124 | 125 | def genRegionArr( ): 126 | regions = [] 127 | for pt in m.shapePoints : 128 | regions.append( slice( I, 25, pt ) ) 129 | return regions 130 | 131 | def genRegionDict( ): 132 | regions = {} 133 | ix = 0 134 | for pt in m.shapePoints : 135 | regions.update( { ix : slice( I, 25, pt ) } ) 136 | ix += 1 137 | return regions 138 | 139 | 140 | ixs = matIxs( 25 ) 141 | response = {} 142 | meth = 'corr' 143 | ### For every region 144 | 145 | 146 | r = genRegionArr()[0] 147 | 148 | def processRegionSSD( r ): 149 | ixs = matIxs( 25 ) 150 | ## 5 x 5 windows for region r 151 | ws = map( lambda x : slice( r, 5, x ), ixs ) 152 | 153 | ts = map( lambda w : map( lambda t : SSD( t, w ), genTemplateArr() ) , ws) 154 | tMatch = map( lambda x : np.argmin( x, axis = 0), ts ) 155 | lMatch = map( lambda x : np.min( x, axis = 0) ,ts ) 156 | ix = np.argmin( lMatch ) 157 | return ixs[ix] 158 | 159 | 160 | if meth == 'SSD' : 161 | resp = map( lambda w: SSD( t, w), ws ) 162 | minIX = np.argmin( resp ) 163 | mag = resp[ minIX ] 164 | 165 | if mag < minResp : 166 | minResp = mag 167 | matchPt = ixs[ minIX ] 168 | dx, dy = coordOffset( matchPt, 25 ) 169 | response.update({ rIx : { 'tIx' : tIx, 'mag' : mag, 'd' : ( dx, dy ), 'pt' : ( matchPt.x, matchPt.y ) } }) 170 | else: 171 | resp = map( lambda w: normCorr( t, w), ws ) 172 | maxIx = np.argmax( resp ) 173 | mag = resp[ maxIx ] 174 | if mag > maxResp : 175 | maxResp = mag 176 | matchPt = ixs[ maxIx ] 177 | dx, dy = coordOffset( matchPt, 25 ) 178 | response.update({ rIx : { 'tIx' : tIx, 'mag' : mag, 'd' : ( dx, dy ), 'pt' : ( matchPt.x, matchPt.y ) } }) 179 | 180 | 181 | 182 | def minResponse( ws, t ): 183 | resp = map( lambda w : SSD( t, w ), ws ) 184 | minIX = np.argmin( resp ) 185 | mag = resp[ minIX ] 186 | 187 | 188 | 189 | 190 | 191 | 192 | def singleRegionTemplate( ixs ): 193 | r = regions[0] 194 | 195 | ## 5 x 5 windows for region r 196 | ws = map( lambda x : slice( r, 5, x ), ixs ) 197 | 198 | ## Get template response 199 | t = templates[0] 200 | resp = map( lambda w: SSD( t, w), ws ) 201 | 202 | ## Find min 203 | minIX = np.argmin( resp ) 204 | matchPt = ixs[ minIX ] 205 | dx, dy = coordOffset( matchPt, 25 ) 206 | 207 | 208 | 209 | 210 | def plotTemplates( ): 211 | for k, t in genTemplateDict().iteritems(): 212 | plt.imshow( t ) 213 | plt.gca().axes.xaxis.set_ticks([]) 214 | plt.gca().axes.yaxis.set_ticks([]) 215 | plt.savefig( "C:\\Users\\Valerie\\Desktop\\output\\matching\\templates\\%d.png" % k, bbox_inches = 0) 216 | 217 | 218 | def plotRegionResponse( resp ): 219 | ts = genTemplateArr() 220 | rs = genRegionArr() 221 | for rix, r in resp.iteritems(): 222 | fig = plt.figure() 223 | gs = gridspec.GridSpec( 2,2 ) 224 | ax1 = fig.add_subplot( gs[ : , 0 ] ) 225 | ax2 = fig.add_subplot( gs[ 0, 1 ]) 226 | ax3 = fig.add_subplot( gs[ 1, 1 ]) # sharex = ax1, sharey = ax1) 227 | 228 | 229 | ax1.imshow( rs[ rix ] ) 230 | ax1.scatter( r[ 'pt' ][0], r[ 'pt'][1] ) 231 | ax1.set_xlim( -1,25 ) 232 | ax1.set_ylim( 25, -1 ) 233 | ax2.imshow( ts[ rix ] ) 234 | ax3.imshow( ts[ r[ 'tIx' ] ] ) 235 | plt.savefig( "C:\\Users\\Valerie\\Desktop\\output\\matching\\matches\\%d.png" % rix ) 236 | plt.close() 237 | 238 | 239 | 240 | 241 | def plotResponse( resp ): 242 | f, (ax1, ax2 ) = plt.subplots( 1,2, sharex = True, sharey= True) 243 | # draw region 244 | ax1.imshow( r ) 245 | ax1.scatter( matchPt.x, matchPt.y ) 246 | 247 | ax2.imshow( t ) 248 | plt.xlim( -1, 25 ) 249 | plt.ylim( 25, -1 ) 250 | plt.show() 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | ### PUTTING IT ALL TOGETHER: 260 | 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | def addFace( ebenFace ): 278 | plt.imshow( ebenFace ) 279 | plt.gca().axes.xaxis.set_ticks([]) 280 | plt.gca().axes.yaxis.set_ticks([]) 281 | return plt 282 | 283 | 284 | def drawGroundTruth( ebenFace ): 285 | plt.imshow( ebenFace ) 286 | DrawFace( ebenShape, plt).drawBold() 287 | DrawFace( ebenShape, plt).drawPoints() 288 | plt.xlim( 200, 525) 289 | plt.ylim( 525, 225 ) 290 | plt.show() 291 | 292 | def drawIndices( ebenFace ): 293 | addFace( ebenFace) 294 | DrawFace( ebenShape, plt).drawBold() 295 | DrawFace( ebenShape, plt).labelIndices() 296 | plt.xlim( 200, 525) 297 | plt.ylim( 525, 225 ) 298 | plt.show() 299 | 300 | addFace( ebenFace) 301 | plt.imshow( ebenFace ) 302 | DrawFace( ebenShape, plt).drawBold() 303 | DrawFace( ebenShape, plt).labelIndices() 304 | plt.xlim( 300, 425) 305 | plt.ylim( 430, 380 ) 306 | #plt.show() 307 | 308 | 309 | w = [[120,130,140],[115,120,130],[100,115,120]] #template 310 | f = [[110,120,135],[120,115,95],[100,105,90]] #image frame 311 | f2 = [[90,120,230],[120,75,95],[100,11,90]] #image frame 312 | 313 | black = [[0,0,0],[0,0,0],[0,0,0]] 314 | white = [[255,255,255],[255,255,255],[255,255,255]] 315 | big = np.reshape( range( 25 ), (5,5 ) ) 316 | ## max SSD = 255 ^ 2 * number of tiles ( w/o sub mean) 317 | 318 | 319 | 320 | 321 | 322 | 323 | 324 | 325 | 326 | addFace(ebenFace) 327 | DrawFace( m, plt ).drawBold() 328 | DrawFace( m, plt ).labelIndices() 329 | 330 | 331 | 332 | ### Slicing testing 333 | 334 | def sliceTest( big ) : 335 | nr, nc = np.shape( big ) 336 | for i in range( nr ): 337 | for j in range( nc ): 338 | print i, j 339 | print slice( big, 3, Vector( j, i ) ) 340 | print slice( big, 5, Vector(j, i ) ) 341 | 342 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/pipeline/CASME_video_structure.py: -------------------------------------------------------------------------------- 1 | """ 2 | Gets tallies on how many videos, etc... are in the directory tree 3 | """ 4 | 5 | 6 | import os 7 | DATA = "C:\Users\Valerie\Desktop\MicroExpress\CASME2\Cropped\Cropped" 8 | DATA = "C:\Users\Valerie\Desktop\cropped2" 9 | 10 | ### NAVIGATING THE IMAGE DATA PATHS (want to be recursive) 11 | ts = 0 12 | v = 0 13 | tv = 0 14 | f = 0 15 | tf = 0 16 | 17 | accum = {} 18 | for sub in os.listdir( DATA ): 19 | 20 | ts += 1 21 | v = 0 22 | for vid in os.listdir( os.path.join( DATA, sub ) ): 23 | v += 1 24 | tv += 1 25 | f = 0 26 | for frame in os.listdir( os.path.join( DATA, sub, vid ) ): 27 | f += 1 28 | tf += 1 29 | accum.update( {(sub, vid) : f} ) 30 | accum.update( {sub : v} ) 31 | print ts 32 | print tv 33 | print tf 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/pipeline/img_eye_detection_debug.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | from matplotlib import pyplot as plt 3 | 4 | def preview( img ): 5 | plt.imshow( img, cmap = 'gray' ) 6 | plt.show() 7 | 8 | print sub 9 | print vid 10 | print frame 11 | 12 | 13 | 14 | 15 | 16 | I = cv2.imread( os.path.join( DATA, sub, vid, frame ) ) 17 | I1 = cv2.cvtColor( I, cv2.COLOR_BGR2GRAY ) 18 | 19 | preview(I1) 20 | 21 | 22 | eye_cascade = cv2.CascadeClassifier('C:\\OpenCV\\data\\haarcascades\\haarcascade_eye.xml') 23 | eyes = eye_cascade.detectMultiScale( I1 ) 24 | 25 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/pipeline/simple_video.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cv2 3 | 4 | VID = "C:\Users\Valerie\Documents\Research\MicroExpress\CASME2_Compressed video\CASME2_compressed\sub01\EP02_01f.avi" 5 | 6 | cap = cv2.VideoCapture(VID) 7 | 8 | while(cap.isOpened()): 9 | face_cascade = cv2.CascadeClassifier('C:\\OpenCV\\data\\haarcascades\\haarcascade_frontalface_default.xml') 10 | ret, frame = cap.read() 11 | 12 | if not ret: 13 | break 14 | 15 | gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) 16 | 17 | cv2.imshow('frame',gray) 18 | 19 | #faces = face_cascade.detectMultiScale( gray, 1.3, 4) 20 | #print faces 21 | if cv2.waitKey(1) & 0xFF == ord('q'): 22 | break 23 | 24 | cap.release() 25 | -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/shapes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vsimonis/MicroExpressionDetector/fd6498aa679a31338c324a5419152bde1dd9427c/MicroExpressionDetector/tests/script_tests/shapes/__init__.py -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/shapes/rotation_angle_test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import math 3 | 4 | def unitV( v ): 5 | return v / np.linalg.norm( v ) 6 | 7 | def angleVcp( v1, v2 ): 8 | v1_u = unitV( v1 ) 9 | v2_u = unitV(v2) 10 | angle = np.arccos(np.dot(v1_u, v2_u)) 11 | if np.isnan(angle): 12 | if (v1_u == v2_u).all(): 13 | return 0.0 14 | else: 15 | return np.pi 16 | return angle 17 | 18 | def angleVtan( v1, v2 ): 19 | 20 | return math.atan2( v1[0], v1[1] ) - math.atan2( v2[0], v2[1] ) 21 | """" 22 | q2 | q1 23 | --------- 24 | q3 | q4 25 | """ 26 | 27 | q1 = ( 3, 4 ) 28 | q2 = ( -3, 4 ) 29 | q3 = ( -3, -4 ) 30 | q4 = ( 3, -4 ) 31 | 32 | ref = ( 0, 1 ) 33 | 34 | ### Cross product method 35 | print "q1: %f" % math.degrees( angleVcp( q1, ref )) 36 | print "q2: %f" % math.degrees( angleVcp( q2, ref )) 37 | print "q3: %f" % math.degrees( angleVcp( q3, ref )) 38 | print "q4: %f" % math.degrees( angleVcp( q4, ref )) 39 | 40 | 41 | ### tan method 42 | print "q1: %f" % math.degrees( angleVtan( q1, ref )) 43 | print "q2: %f" % math.degrees( angleVtan( q2, ref )) 44 | print "q3: %f" % math.degrees( angleVtan( q3, ref )) 45 | print "q4: %f" % math.degrees( angleVtan( q4, ref )) -------------------------------------------------------------------------------- /MicroExpressionDetector/tests/script_tests/shapes/shape_aligment_test.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | from active_shape_models.ShapeAligner import ShapeAligner 4 | from active_shape_models.ActiveShapeModel import ActiveShapeModel 5 | from shapes.ActiveShape import ActiveShape 6 | 7 | r = math.pi / 4 8 | s = 1.2 9 | t = [[-2],[4] ] 10 | 11 | rMat = [ [ s * math.cos( r ), - s* math.sin( r ) ] , 12 | [ s * math.sin( r ), s * math.cos( r ) ]] 13 | 14 | def p( x, y ): 15 | return [[x],[y]] 16 | 17 | p1 = p( 1,1 ) 18 | p2 = p( 3,4 ) 19 | p3 = p( 4,2 ) 20 | 21 | 22 | def trans( p, rMat, t ): 23 | return np.dot( rMat, p ) + t 24 | 25 | 26 | n1 = trans( p1, rMat, t) 27 | n2 = trans( p2, rMat, t) 28 | n3 = trans( p3, rMat, t) 29 | 30 | 31 | AS1 = ActiveShape( [ (1,1), (3,4), (4,2) ] ) 32 | AS2 = ActiveShape( [(-2, 5.697), (-2.849, 9.940), (-0.303, 9.091 ) ]) 33 | ASM = ActiveShapeModel( [0,1] ) 34 | SA = ShapeAligner( ASM, 100, "" ) 35 | 36 | tdict = SA.calcAlignTransBtwn( AS2, AS1, np.ones( 3 ) ) 37 | 38 | --------------------------------------------------------------------------------