├── MatlabModelForProjection ├── 3DChessboard.mat ├── Demo.m ├── colourImage.cpp ├── colourImage.mexw64 ├── generate3DChessboard.m ├── generate3DChessboard_withWhite.m ├── points2Image.m └── 说明.txt ├── PoseEstimateOnRaspberryPi ├── CaptureDemoVideo.py ├── Video.py ├── Video.pyc ├── calibFile │ ├── calib.npz │ ├── calibImages │ │ ├── 1466257516.jpg │ │ ├── 1466257664.jpg │ │ ├── 1466257682.jpg │ │ ├── 1466257710.jpg │ │ ├── 1466257725.jpg │ │ ├── 1466257735.jpg │ │ ├── 1466257817.jpg │ │ ├── 1466257874.jpg │ │ ├── 1466257937.jpg │ │ ├── 1466257951.jpg │ │ ├── 1466257962.jpg │ │ ├── 1466257970.jpg │ │ ├── 1466258006.jpg │ │ ├── 1466258021.jpg │ │ ├── 1466258033.jpg │ │ └── 640_480 │ │ │ ├── 640_4800.jpg │ │ │ ├── 640_4801.jpg │ │ │ ├── 640_48010.jpg │ │ │ ├── 640_48011.jpg │ │ │ ├── 640_48012.jpg │ │ │ ├── 640_48013.jpg │ │ │ ├── 640_48014.jpg │ │ │ ├── 640_4802.jpg │ │ │ ├── 640_4803.jpg │ │ │ ├── 640_4804.jpg │ │ │ ├── 640_4805.jpg │ │ │ ├── 640_4806.jpg │ │ │ ├── 640_4807.jpg │ │ │ ├── 640_4808.jpg │ │ │ └── 640_4809.jpg │ └── checkerboardPattern.pdf ├── demoVideo │ ├── 1466260688.avi │ ├── Demo.avi │ └── dayDemo.avi ├── detectChessboardRegion │ ├── detectPaper.py │ ├── edged.jpg │ ├── mask.jpg │ ├── nobackground.jpg │ ├── origin.jpg │ └── outline.jpg ├── experiment_data │ └── test.txt ├── icons │ ├── camera.png │ ├── estimate.png │ ├── printer.png │ ├── quit.png │ ├── record.png │ ├── start.png │ ├── stop.png │ └── windowIcon.png ├── inputDialog.py ├── inputDialog.pyc ├── main.py ├── playDemoVideo.py ├── playDemoVideo.pyc ├── poseEstimate.py ├── poseEstimate.pyc ├── recordVideo.py ├── recordVideo.pyc ├── settingDialog.py ├── settingDialog.pyc └── 说明.txt ├── README.md ├── calib.npz ├── camcalib.py ├── chessboard ├── chessboard_gray0.jpg ├── chessboard_gray1.jpg ├── chessboard_gray10.jpg ├── chessboard_gray11.jpg ├── chessboard_gray12.jpg ├── chessboard_gray13.jpg ├── chessboard_gray14.jpg ├── chessboard_gray15.jpg ├── chessboard_gray16.jpg ├── chessboard_gray17.jpg ├── chessboard_gray18.jpg ├── chessboard_gray19.jpg ├── chessboard_gray2.jpg ├── chessboard_gray20.jpg ├── chessboard_gray3.jpg ├── chessboard_gray4.jpg ├── chessboard_gray5.jpg ├── chessboard_gray6.jpg ├── chessboard_gray7.jpg ├── chessboard_gray8.jpg └── chessboard_gray9.jpg ├── img ├── GUI.png ├── chessboard_1.jpg ├── chessboard_2.jpg ├── device.jpg └── work.jpg └── solpnp.py /MatlabModelForProjection/3DChessboard.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/MatlabModelForProjection/3DChessboard.mat -------------------------------------------------------------------------------- /MatlabModelForProjection/Demo.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/MatlabModelForProjection/Demo.m -------------------------------------------------------------------------------- /MatlabModelForProjection/colourImage.cpp: -------------------------------------------------------------------------------- 1 | /*COLOURIMAGE A mex implementation to quickly colour an image given a set 2 | *of 2D Points. Designed to be called through points2Image.m 3 | * 4 | * Required Inputs: 5 | * locs- nx2 set of 2d points, (x,y) 6 | * colours- nxm set of intensity values 7 | * disk- rxr array giving opacity of sprite to assign to each point loc 8 | * imageSize- 1x2 vector giving the height and width of the output image 9 | * [h,w] 10 | * 11 | * Outputs: 12 | * image- hxwxm output image 13 | * 14 | * References- 15 | * This code was used in generating the results for the journal paper 16 | * Multi-modal sensor calibration using a gradient orientation measure 17 | * http://www.zjtaylor.com/welcome/download_pdf?pdf=JFR2013.pdf as well 18 | * as several of my other publications 19 | * 20 | * This code was written by Zachary Taylor 21 | * zacharyjeremytaylor@gmail.com 22 | * http://www.zjtaylor.com 23 | */ 24 | 25 | #include "mex.h" 26 | #include "matrix.h" 27 | 28 | void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[]) { 29 | 30 | //check inputs 31 | if (nrhs != 4 || (nlhs != 1 && nlhs != 0)) 32 | mexErrMsgIdAndTxt("COLOURIMAGE:BadNArgs","Need 4 inputs and 1 output."); 33 | 34 | //check input types 35 | if(mxGetClassID(prhs[0]) != mxDOUBLE_CLASS) 36 | mexErrMsgIdAndTxt("COLOURIMAGE:BadArg","locs must be of type double"); 37 | if(mxGetClassID(prhs[1]) != mxDOUBLE_CLASS) 38 | mexErrMsgIdAndTxt("COLOURIMAGE:BadArg","colours must be of type double"); 39 | if(mxGetClassID(prhs[2]) != mxDOUBLE_CLASS) 40 | mexErrMsgIdAndTxt("COLOURIMAGE:BadArg","disk must be of type double"); 41 | if(mxGetClassID (prhs[3]) != mxUINT32_CLASS) 42 | mexErrMsgIdAndTxt("COLOURIMAGE:BadArg","imageSize must be of type uint32"); 43 | 44 | //check number of dimensions 45 | for(int i = 0; i < 4; i++){ 46 | if(mxGetNumberOfDimensions(prhs[i]) != 2) 47 | mexErrMsgIdAndTxt("COLOURIMAGE:BadArg","All arguments must be 2D"); 48 | } 49 | 50 | //check sizes 51 | if(mxGetDimensions(prhs[0])[1] != 2) 52 | mexErrMsgIdAndTxt("COLOURIMAGE:BadArg","locs must have 2 columns [x,y]"); 53 | if(mxGetDimensions(prhs[0])[0] != mxGetDimensions(prhs[1])[0]) 54 | mexErrMsgIdAndTxt("COLOURIMAGE:BadArg","locs and colours must be the same length"); 55 | if((mxGetDimensions(prhs[2])[0] != mxGetDimensions(prhs[2])[1])) 56 | mexErrMsgIdAndTxt("COLOURIMAGE:BadArg","disk must be a square matrix"); 57 | if((mxGetDimensions(prhs[3])[0] != 1) || (mxGetDimensions(prhs[3])[1]) != 2) 58 | mexErrMsgIdAndTxt("COLOURIMAGE:BadArg","imageSize must be 1x2"); 59 | 60 | //get value of remaining input variables 61 | double * locs = mxGetPr(prhs[0]); 62 | double * colours = mxGetPr(prhs[1]); 63 | double * disk = mxGetPr(prhs[2]); 64 | 65 | //get required variable sizes 66 | size_t length = mxGetDimensions(prhs[0])[0]; 67 | size_t imageHeight = ((uint32_T *) mxGetData(prhs[3]))[0]; 68 | size_t imageWidth = ((uint32_T *) mxGetData(prhs[3]))[1]; 69 | size_t imageDepth = mxGetDimensions(prhs[1])[1]; 70 | int pointRadius = (mxGetDimensions(prhs[2])[0]-1)/2; 71 | 72 | //check size 73 | if((imageHeight == 0) || (imageWidth == 0)) 74 | mexErrMsgIdAndTxt("COLOURIMAGE:BadArg","imageSize cannot be zero"); 75 | 76 | //setup output 77 | const mwSize outSize[] = {imageHeight, imageWidth, imageDepth}; 78 | plhs[0] = mxCreateNumericArray(3, outSize, mxDOUBLE_CLASS, mxREAL); 79 | double *image = mxGetPr(plhs[0]); 80 | 81 | //colour image 82 | for(int i = 0; i < length; i++){ 83 | for(int iy = -pointRadius; iy < pointRadius; iy++){ 84 | for(int ix = -pointRadius; ix < pointRadius; ix++){ 85 | //check if point inside image 86 | bool valid = (locs[i]+ix >= 0) && (locs[i]+ix < imageWidth) && (locs[i+length]+iy >= 0) && (locs[i+length]+iy < imageHeight); 87 | 88 | if(valid){ 89 | for(int id = 0; id < imageDepth; id++){ 90 | //get opacity of point 91 | double opac = disk[iy+pointRadius + (2*pointRadius+1)*(ix+pointRadius)]; 92 | //get index of point 93 | int idx = locs[i+length]+iy + (locs[i]+ix)*imageHeight + id*imageHeight*imageWidth; 94 | //add point to image 95 | image[idx] = (1-opac)*image[idx] + opac*colours[i + length*id]; 96 | } 97 | } 98 | } 99 | } 100 | } 101 | } -------------------------------------------------------------------------------- /MatlabModelForProjection/colourImage.mexw64: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/MatlabModelForProjection/colourImage.mexw64 -------------------------------------------------------------------------------- /MatlabModelForProjection/generate3DChessboard.m: -------------------------------------------------------------------------------- 1 | function [points] = generate3DChessboard (step, cell_y, cell_x, cell_size) 2 | % step=0.5; 3 | % cell_size = 50; 4 | % cell_y = 7; 5 | % cell_x = 10; 6 | 7 | x_num = cell_x * cell_size; 8 | y_num = cell_y * cell_size; 9 | 10 | x_row = zeros(1, x_num); 11 | index1 = 1; 12 | for i = 0: x_num-1 13 | x_row(index1) = i*step; 14 | index1 = index1 + 1; 15 | end 16 | x = repmat(x_row, y_num, 1); 17 | 18 | y_col = zeros(y_num,1); 19 | index2 = 1; 20 | for i = 0 : y_num-1 21 | y_col(index2) = i*step; 22 | index2 = index2 + 1; 23 | end 24 | y = repmat(y_col, 1, x_num); 25 | z = zeros(y_num, x_num); 26 | 27 | color1=1; 28 | color2=color1; 29 | color=zeros(cell_y * cell_size,cell_x * cell_size); 30 | for i=0:(cell_y-1) 31 | color2=color1; 32 | for j=0:(cell_x-1) 33 | if color2==1 34 | color(i*cell_size+1:(i+1)*cell_size-1,j*cell_size+1:(j+1)*cell_size-1)=color2; 35 | end 36 | color2=~color2; 37 | end 38 | color1=~color1; 39 | end 40 | c = repmat(color(:),1, 3); 41 | points = [x(:),y(:),z(:),c]; 42 | save('.\3DChessboard.mat', 'points'); 43 | %figure; 44 | %scatter3(points(:,1),points(:,2),points(:,3),20,points(:,4:6),'fill'); -------------------------------------------------------------------------------- /MatlabModelForProjection/generate3DChessboard_withWhite.m: -------------------------------------------------------------------------------- 1 | function [points] = generate3DChessboard_withWhite (step, cell_y, cell_x, cell_size) 2 | % step=0.5; 3 | % cell_size = 50; 4 | % cell_y = 7; 5 | % cell_x = 10; 6 | 7 | x_num = (cell_x+4) * cell_size; 8 | y_num = (cell_y+4) * cell_size; 9 | 10 | x_row = zeros(1, x_num); 11 | index1 = 1; 12 | for i = 0: x_num-1 13 | x_row(index1) = i*step; 14 | index1 = index1 + 1; 15 | end 16 | x = repmat(x_row, y_num, 1); 17 | 18 | y_col = zeros(y_num,1); 19 | index2 = 1; 20 | for i = 0 : y_num-1 21 | y_col(index2) = i*step; 22 | index2 = index2 + 1; 23 | end 24 | y = repmat(y_col, 1, x_num); 25 | z = zeros(y_num, x_num); 26 | 27 | color1=1; 28 | color2=color1; 29 | color=zeros(cell_y * cell_size,cell_x * cell_size); 30 | for i=0:(cell_y-1) 31 | color2=color1; 32 | for j=0:(cell_x-1) 33 | if color2==1 34 | color(i*cell_size+1:(i+1)*cell_size-1,j*cell_size+1:(j+1)*cell_size-1)=color2; 35 | end 36 | color2=~color2; 37 | end 38 | color1=~color1; 39 | end 40 | color_back = zeros((cell_y+4) * cell_size, (cell_x+4) * cell_size); 41 | color_back(2*cell_size:2*cell_size+cell_y*cell_size-1,2*cell_size:2*cell_size+cell_x*cell_size-1)=color; 42 | 43 | c = repmat(color_back(:),1, 3); 44 | points = [x(:),y(:),z(:),1-c]; 45 | save('.\3DChessboard.mat', 'points'); 46 | figure; 47 | scatter3(points(:,1),points(:,2),points(:,3),20,points(:,4:6),'fill'); -------------------------------------------------------------------------------- /MatlabModelForProjection/points2Image.m: -------------------------------------------------------------------------------- 1 | function [ image ] = points2Image( points, imageSize, cam, tform, D, pointRadius, opacity, useMex ) 2 | %POINTS2IMAGE Uses a camera model to project 3D points onto a plane to 3 | % Required Inputs: 4 | % points- nxm set of 3d points, the 1st 3 columns are the point locations 5 | % (x,y,z) and the remaining coloumns the intensity values (i1,i2,etc), if 6 | % no intensity values are given, all points will have an intensity of 1 7 | % imageSize- 1x2 vector giving the height and width of the output image 8 | % [h,w] 9 | % cam- either a 3x4 camera projection matrix or a scalar focal length 10 | % 11 | % Optional Inputs: (will give default values on empty array []) 12 | % tform- 4x4 transform to apply to points before projecting them (default 13 | % identity matrix) 14 | % pointRadius- radius of point in pixels (default 1) 15 | % opacity- scalar with range 0 to 1, specifies opacity of points (default 16 | % 1) 17 | % useMex- use a mex file to speed up process (default true) 18 | % 19 | % Outputs: 20 | % image- hxwx(m-3) output image of given size with (m-3) colour bands 21 | 22 | %% check inputs 23 | 24 | validateattributes(points, {'numeric'},{'2d'}); 25 | if(size(points,2) < 3) 26 | error('points must have atleast 3 columns, currently has %i',size(points,2)); 27 | end 28 | validateattributes(imageSize, {'numeric'},{'size',[1,2],'positive','integer'}); 29 | 30 | if(size(cam,2) == 3) 31 | cam(end,4) = 0; 32 | end 33 | validateattributes(cam, {'numeric'},{'size',[3,4]}); 34 | 35 | if(nargin < 4) 36 | tform = []; 37 | end 38 | if(isempty(tform)) 39 | tform = eye(4); 40 | else 41 | validateattributes(tform, {'numeric'},{'size',[4,4]}); 42 | end 43 | 44 | if(nargin < 5) 45 | D = []; 46 | end 47 | if(isempty(D)) 48 | D = [0,0,0,0,0]; 49 | else 50 | validateattributes(D, {'numeric'},{'nrows',1}); 51 | end 52 | if(size(D,2) > 5) 53 | error('distortion vector D, must have 5 or less columns currently has %i',size(D,2)); 54 | end 55 | D = double(D); 56 | 57 | if(nargin < 6) 58 | pointRadius = []; 59 | end 60 | if(isempty(pointRadius)) 61 | pointRadius = 1; 62 | else 63 | validateattributes(pointRadius, {'numeric'},{'scalar','positive','integer'}); 64 | end 65 | 66 | if(nargin < 7) 67 | opacity = []; 68 | end 69 | if(isempty(opacity)) 70 | opacity = 1; 71 | else 72 | validateattributes(opacity, {'numeric'},{'scalar','positive'}); 73 | if((opacity > 1) || (opacity < 0)) 74 | error('Opacity must be in range 0 to 1'); 75 | end 76 | end 77 | 78 | if(nargin < 8) 79 | useMex = []; 80 | end 81 | if(isempty(useMex)) 82 | useMex = true; 83 | else 84 | validateattributes(useMex, {'logical'},{'scalar'}); 85 | end 86 | 87 | %convert elements which require precision to doubles 88 | points = double(points); 89 | tform = double(tform); 90 | cam = double(cam); 91 | 92 | %% run image generation 93 | 94 | %create filter 95 | disk = fspecial('disk',pointRadius); 96 | disk = opacity.*disk./max(disk(:)); 97 | 98 | %split distortion into radial and tangential 99 | if(size(D,2) < 5) 100 | D(1,5) = 0; 101 | end 102 | k = [D(1),D(2),D(5)]; 103 | p = [D(3),D(4)]; 104 | 105 | %split points into locations and colour 106 | locs = [points(:,1:3), ones(size(points,1),1)]; 107 | if(size(points,2) > 3) 108 | colours = points(:,4:end); 109 | else 110 | colours = ones(size(points,1),1); 111 | end 112 | 113 | %move camera position 114 | locs = (tform*locs')'; 115 | 116 | %sort points by distance from camera 117 | dist = sum(locs(:,1:3).^2,2); 118 | [~,idx] = sort(dist,'descend'); 119 | locs = locs(idx,:); 120 | colours = colours(idx,:); 121 | 122 | %% Distort 123 | %reject points behind camera 124 | valid = locs(:,3) > 0; 125 | locs = locs(valid,:); 126 | colours = colours(valid,:); 127 | 128 | %project onto a plane using normalized image coordinates 129 | x = locs(:,1)./locs(:,3); 130 | y = locs(:,2)./locs(:,3); 131 | 132 | %find radial distance 133 | r2 = x.^2 + y.^2; 134 | 135 | %find tangential distortion 136 | xTD = 2*p(1)*x.*y + p(2).*(r2 + 2*x.^2); 137 | yTD = p(1)*(r2 + 2*y.^2) + 2*p(2)*x.*y; 138 | 139 | %find radial distortion 140 | xRD = x.*(1 + k(1)*r2 + k(2)*r2.^2 + k(3)*r2.^3); 141 | yRD = y.*(1 + k(1)*r2 + k(2)*r2.^2 + k(3)*r2.^3); 142 | 143 | %combine distorted points 144 | x = xRD + xTD; 145 | y = yRD + yTD; 146 | 147 | %project distorted points back into 3D 148 | locs = [x,y,ones(size(x,1),1)].*repmat(locs(:,3),1,3); 149 | locs = [locs, ones(size(locs,1),1)]; 150 | %% 151 | %project points into 2D 152 | locs = (cam*locs')'; 153 | keep = locs(:,3) > 0; 154 | locs = locs(keep,:); 155 | colours = colours(keep,:); 156 | locs = locs(:,1:2)./repmat(locs(:,3),1,2); 157 | locs = round(locs); 158 | 159 | %remove unseen points 160 | keep = (locs(:,1) >= -size(disk,1)) & ... 161 | (locs(:,1) < (imageSize(2) + size(disk,1))) & ... 162 | (locs(:,2) >= -size(disk,2)) & ... 163 | (locs(:,2) < (imageSize(1) + size(disk,2))); 164 | 165 | locs = locs(keep,:); 166 | colours = colours(keep,:); 167 | % disp(size(locs)) 168 | % disp(size(colours)) 169 | %form image 170 | if(useMex) 171 | %compile if required 172 | if(exist('colourImage') ~= 3) 173 | mex colourImage.cpp; 174 | end 175 | image = colourImage(locs, colours, disk, uint32(imageSize)); 176 | else 177 | %run slow non-mex version of code 178 | image = zeros([imageSize,size(colours,2)]); 179 | for i = 1:size(locs,1) 180 | for iy = -pointRadius:pointRadius 181 | for ix = -pointRadius:pointRadius 182 | valid = (locs(i,1)+ix >= 0) & ... 183 | (locs(i,1)+ix < imageSize(2)) & ... 184 | (locs(i,2)+iy >= 0) & ... 185 | (locs(i,2)+iy < imageSize(1)); 186 | 187 | if(valid) 188 | opac = disk(iy+pointRadius+1,ix+pointRadius+1); 189 | col = (1-opac)*image(locs(i,2)+iy+1, locs(i,1)+ix+1,:); 190 | image(locs(i,2)+iy+1, locs(i,1)+ix+1,:) = col(:)' + opac*colours(i,:); 191 | end 192 | end 193 | end 194 | end 195 | end 196 | 197 | end -------------------------------------------------------------------------------- /MatlabModelForProjection/说明.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/MatlabModelForProjection/说明.txt -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/CaptureDemoVideo.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cv2 3 | 4 | cap = cv2.VideoCapture(0) 5 | 6 | # Define the codec and create VideoWriter object 7 | fourcc = cv2.VideoWriter_fourcc(*'XVID') 8 | out = cv2.VideoWriter('Demo.avi',fourcc, 20.0, (640,480)) 9 | 10 | while(cap.isOpened()): 11 | ret, frame = cap.read() 12 | if ret==True: 13 | out.write(frame) 14 | 15 | cv2.imshow('frame',frame) 16 | if cv2.waitKey(1) & 0xFF == ord('q'): 17 | break 18 | else: 19 | break 20 | 21 | # Release everything if job is finished 22 | cap.release() 23 | out.release() 24 | cv2.destroyAllWindows() -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/Video.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | from PyQt4 import QtGui 4 | from imutils.video import VideoStream 5 | import imutils 6 | import time 7 | class Video(): 8 | def __init__(self): 9 | self.vs = VideoStream(usePiCamera=1 > 0).start() 10 | time.sleep(2.0) 11 | self.currentFrame = np.array([]) 12 | self.raw_img = np.array([]) 13 | 14 | 15 | def captureRawFrame(self): 16 | """ 17 | capture frame and reverse RBG BGR and return opencv image 18 | """ 19 | rawFrame = self.vs.read() 20 | rawFrame = imutils.resize(rawFrame, width=640) 21 | self.raw_img = rawFrame 22 | #return rawFrame 23 | 24 | def convertFrame(self): 25 | """ 26 | converts frame to format suitable for QtGui 27 | """ 28 | try: 29 | self.currentFrame = cv2.cvtColor(self.raw_img, cv2.COLOR_BGR2RGB) 30 | height, width = self.currentFrame.shape[:2] 31 | img = QtGui.QImage(self.currentFrame, 32 | width, 33 | height, 34 | QtGui.QImage.Format_RGB888) 35 | img = QtGui.QPixmap.fromImage(img) 36 | #self.previousFrame = self.currentFrame 37 | img = img.scaledToHeight(480) 38 | img = img.scaledToWidth(360) 39 | return img 40 | except: 41 | return None 42 | -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/Video.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/Video.pyc -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calib.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calib.npz -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257516.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257516.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257664.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257664.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257682.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257682.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257710.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257710.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257725.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257725.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257735.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257735.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257817.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257817.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257874.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257874.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257937.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257937.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257951.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257951.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257962.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257962.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257970.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466257970.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466258006.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466258006.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466258021.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466258021.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/1466258033.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/1466258033.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4800.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4800.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4801.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4801.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_48010.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_48010.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_48011.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_48011.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_48012.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_48012.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_48013.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_48013.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_48014.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_48014.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4802.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4802.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4803.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4803.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4804.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4804.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4805.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4805.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4806.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4806.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4807.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4807.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4808.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4808.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4809.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/calibImages/640_480/640_4809.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/calibFile/checkerboardPattern.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/calibFile/checkerboardPattern.pdf -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/demoVideo/1466260688.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/demoVideo/1466260688.avi -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/demoVideo/Demo.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/demoVideo/Demo.avi -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/demoVideo/dayDemo.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/demoVideo/dayDemo.avi -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/detectChessboardRegion/detectPaper.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sun Jun 12 09:12:34 2016 4 | 5 | @author: li_ch 6 | 7 | detect paper in an image 8 | """ 9 | # import the necessary packages 10 | 11 | import numpy as np 12 | 13 | import cv2 14 | 15 | 16 | image = cv2.imread('H:/blackEdge/1465780496_white.jpg') 17 | 18 | # convert the image to grayscale, blur it, and find edges 19 | # in the image 20 | gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) 21 | gray = cv2.GaussianBlur(gray, (5, 5), 0) 22 | edged = cv2.Canny(gray, 75, 200) 23 | 24 | # show the original image and the edge detected image 25 | print "STEP 1: Edge Detection" 26 | cv2.imshow("Image", image) 27 | cv2.imshow("Edged", edged) 28 | cv2.waitKey(0) 29 | cv2.destroyAllWindows() 30 | #cv2.imwrite('H:/origin.jpg',image) 31 | #cv2.imwrite('H:/edged.jpg',edged) 32 | # find the contours in the edged image, keeping only the 33 | # largest ones, and initialize the screen contour 34 | (_, cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE) 35 | cnts = sorted(cnts, key = cv2.contourArea, reverse = True)[:5] 36 | 37 | # loop over the contours 38 | for c in cnts: 39 | # approximate the contour 40 | peri = cv2.arcLength(c, True) 41 | approx = cv2.approxPolyDP(c, 0.02 * peri, True) 42 | 43 | # if our approximated contour has four points, then we 44 | # can assume that we have found our screen 45 | if len(approx) == 4: 46 | screenCnt = approx 47 | break 48 | 49 | # show the contour (outline) of the piece of paper 50 | print "STEP 2: Find contours of paper" 51 | cv2.drawContours(image, [screenCnt], -1, (0, 255, 0), 2) 52 | cv2.imshow("Outline", image) 53 | cv2.waitKey(0) 54 | cv2.destroyAllWindows() 55 | #cv2.imwrite('H:/outline.jpg',image) 56 | 57 | # show the mask of the piece of paper 58 | mask = np.zeros(gray.shape,np.uint8) 59 | cv2.drawContours(mask,[screenCnt],0,255,-1) 60 | cv2.imshow('mask',mask),cv2.waitKey(0),cv2.destroyAllWindows() 61 | #cv2.imwrite('H:/mask.jpg',mask) 62 | mask = mask/255 63 | result = image.copy() 64 | result[:,:,0] = result[:,:,0]*mask 65 | result[:,:,1] = result[:,:,1]*mask 66 | result[:,:,2] = result[:,:,2]*mask 67 | cv2.imshow('without background',result),cv2.waitKey(0),cv2.destroyAllWindows() 68 | cv2.imwrite('H:/white.jpg',result) -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/detectChessboardRegion/edged.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/detectChessboardRegion/edged.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/detectChessboardRegion/mask.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/detectChessboardRegion/mask.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/detectChessboardRegion/nobackground.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/detectChessboardRegion/nobackground.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/detectChessboardRegion/origin.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/detectChessboardRegion/origin.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/detectChessboardRegion/outline.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/detectChessboardRegion/outline.jpg -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/experiment_data/test.txt: -------------------------------------------------------------------------------- 1 | 0.1364 -0.0138 -0.0062 -53.7190 -26.9249 214.7653 0.169267185153 2 | 0.1343 -0.0145 -0.0047, -53.6357 -27.0765 214.9984,0.144827237548 3 | -0.1580 0.0186 -0.0084, -53.6543 -30.2860 191.5224,0.244263675031 4 | -0.1583 0.0230 -0.0088, -53.6693 -30.1254 191.7525,0.235874817847 5 | 0.2663 -0.0214 0.0110, -55.1959 -38.8334 210.3663,0.156394249292 6 | 0.2691 -0.0220 0.0116, -55.1775 -38.8431 210.0449,0.130041065761 7 | -0.0661 -0.0987 0.0122, -54.3342 -40.6183 187.7648,0.135347900334 8 | -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/icons/camera.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/icons/camera.png -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/icons/estimate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/icons/estimate.png -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/icons/printer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/icons/printer.png -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/icons/quit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/icons/quit.png -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/icons/record.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/icons/record.png -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/icons/start.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/icons/start.png -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/icons/stop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/icons/stop.png -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/icons/windowIcon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/icons/windowIcon.png -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/inputDialog.py: -------------------------------------------------------------------------------- 1 | 2 | # -*- coding: utf-8 -*- 3 | from PyQt4.QtCore import * 4 | from PyQt4.QtGui import * 5 | import sys 6 | import numpy as np 7 | import cv2 8 | import glob 9 | import pdb 10 | 11 | QTextCodec.setCodecForTr(QTextCodec.codecForName("utf8")) 12 | 13 | class calibrateCamera(): 14 | def __init__(self, img_list, height, width, cell_size): 15 | # Step 1: Data prep 16 | self.img_list = img_list 17 | # self.img_list = glob.glob('./chessboard/chessboard_gray*.jpg') 18 | self.img_list_detected = [] 19 | self.cell_height = height-1 20 | self.cell_width = width-1 21 | self.cell_size = cell_size 22 | # set opts 23 | self.objp = np.zeros((self.cell_width*self.cell_height,3), np.float32) 24 | self.objp[:,:2] = np.mgrid[0:self.cell_height,0:self.cell_width].T.reshape(-1,2) 25 | self.objp = self.objp * self.cell_size 26 | self.size = (self.cell_height, self.cell_width) 27 | 28 | # Arrays to store object points and image points from all the images. 29 | self.objpoints = [] # 3d point in real world space 30 | self.imgpoints = [] # 2d points in image plane. 31 | 32 | def calibration(self): 33 | 34 | for fname in self.img_list: 35 | img = cv2.imread(fname) 36 | grey = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) 37 | 38 | ret, corners = cv2.findChessboardCorners(grey, self.size, None) 39 | cv2.drawChessboardCorners(img, self.size, corners,ret) 40 | 41 | # if found, show imgs 42 | if ret: 43 | criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001) 44 | cv2.cornerSubPix(grey,corners,(11,11),(-1,-1),criteria) 45 | self.imgpoints.append(corners) 46 | self.objpoints.append(self.objp) 47 | self.img_list_detected.append(fname) 48 | print fname 49 | 50 | cv2.imshow('img',img) 51 | cv2.waitKey(500) 52 | 53 | cv2.destroyAllWindows() 54 | 55 | 56 | # Step 2: Calibration 57 | # shape[::-1]: (480,640) => (640,480) 58 | ret, cmx, dist, rvecs, tvecs = cv2.calibrateCamera( 59 | self.objpoints, self.imgpoints, grey.shape[::-1],None,None) 60 | print cmx 61 | print dist 62 | # save calibration result 63 | np.savez('./calibFile/calib.npz', cmx=cmx, dist=dist, rvecs=rvecs, tvecs=tvecs) 64 | 65 | class InputDlg(QDialog): 66 | def __init__(self ,parent=None): 67 | super(InputDlg ,self).__init__(parent) 68 | self.setWindowIcon(QIcon('./icons/windowIcon.png')) 69 | self.height = 10 70 | self.width = 9 71 | self.size = 12 72 | label1=QLabel( self.tr("图片路径")) 73 | label2=QLabel( self.tr("棋盘格长(个)")) 74 | label3=QLabel( self.tr("棋盘格宽(个)")) 75 | label4=QLabel( self .tr("单格边长(毫米)")) 76 | 77 | self.heightLabel=QLabel("10") 78 | self.heightLabel.setFrameStyle(QFrame.Panel|QFrame.Sunken ) 79 | self.widthLabel=QLabel("9") 80 | self.widthLabel.setFrameStyle(QFrame.Panel|QFrame.Sunken ) 81 | self.sizeLabel=QLabel("12") 82 | self.sizeLabel.setFrameStyle(QFrame.Panel|QFrame.Sunken ) 83 | 84 | picButton=QPushButton( u"选择图片文件") 85 | heightButton=QPushButton( u"修改") 86 | widthButton=QPushButton( u"修改") 87 | sizeButton=QPushButton( u"修改" ) 88 | calibButton = QPushButton(u"开始标定") 89 | 90 | self.connect(picButton,SIGNAL( "clicked()"),self.slotPic) 91 | self.connect(heightButton,SIGNAL( "clicked()"),self.slotHeight) 92 | self.connect(widthButton,SIGNAL( "clicked()"),self.slotWidth) 93 | self.connect(sizeButton,SIGNAL( "clicked()"),self.slotSize) 94 | self.connect(calibButton, SIGNAL("clicked()"), self.calib) 95 | 96 | layout=QGridLayout() 97 | layout.addWidget(label1,0,0, 1,0) 98 | layout.addWidget(label2,2,0) 99 | layout.addWidget(label3,3,0) 100 | layout.addWidget(label4,4,0) 101 | layout.addWidget(self.heightLabel, 2, 1) 102 | layout.addWidget(self.widthLabel, 3, 1) 103 | layout.addWidget(self.sizeLabel, 4, 1) 104 | layout.addWidget (heightButton,2,2) 105 | layout.addWidget (widthButton,3,2) 106 | layout.addWidget (sizeButton,4,2) 107 | layout.addWidget(picButton, 0, 1, 1, 2) 108 | layout.addWidget(calibButton, 5,1, 5,2) 109 | 110 | self. setLayout( layout) 111 | 112 | self.setWindowTitle(self.tr("标定相机")) 113 | 114 | def slotPic(self): 115 | img_QStringList = QFileDialog.getOpenFileNames(self, 116 | "多文件选择", 117 | "./", 118 | "All Files (*);;Text Files (*.jpg)") 119 | self.img_list = str(img_QStringList.join("")).split("") 120 | 121 | def slotHeight(self): 122 | h, ok = QInputDialog.getInteger(self, self.tr("棋盘格长"), 123 | self.tr("请输入棋盘格长(个):"), 124 | int(self.heightLabel.text()), 0, 150) 125 | self.height = int(h) 126 | if ok: 127 | self.heightLabel.setText(str(h)) 128 | 129 | def slotWidth(self): 130 | w,ok=QInputDialog. getInteger(self,self.tr("棋盘格宽"), 131 | self.tr("请输入棋盘格宽(个):"), 132 | int(self. widthLabel. text()),0,150) 133 | 134 | self.width = int(w) 135 | if ok: 136 | self.widthLabel.setText(str(w)) 137 | 138 | def slotSize(self): 139 | s,ok =QInputDialog.getDouble(self ,self.tr("棋盘格边长"), 140 | self.tr("请输入棋盘格边长(毫米):"), 141 | float(self.sizeLabel.text()) ,0 ,150.00) 142 | self.size = float(s) 143 | if ok: 144 | self.sizeLabel.setText(str(s)) 145 | 146 | def calib(self): 147 | calib = calibrateCamera(self.img_list, self.height, self.width, self.size) 148 | calib.calibration() 149 | -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/inputDialog.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/inputDialog.pyc -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import sys 3 | import numpy as np 4 | import cv2 5 | import time 6 | import os 7 | 8 | from PyQt4.QtGui import * 9 | from PyQt4.QtCore import * 10 | 11 | from Video import Video 12 | from playDemoVideo import playDemoVideo 13 | from recordVideo import recordVideo 14 | from poseEstimate import poseEstimate 15 | from inputDialog import InputDlg 16 | from settingDialog import settingDlg 17 | 18 | QTextCodec.setCodecForTr(QTextCodec.codecForName("utf8")) 19 | 20 | class MainWidget(QMainWindow): 21 | def __init__(self ,parent=None): 22 | super(MainWidget ,self).__init__(parent) 23 | self.raw_img = np.array([]) 24 | self.play_flag = 0 25 | self.demo_path = './demoVideo/Demo.avi' 26 | # 设置主窗口 27 | self.setWindowIcon(QIcon('./icons/windowIcon.png')) 28 | self.showFullScreen() 29 | self.setWindowTitle(self.tr("位姿估计")) 30 | self.imageLabel=QLabel() 31 | #self.img = QPixmap.fromImage(QImage("640_4800.jpg")) 32 | #te.setPixmap(self.img) 33 | self.imageLabel.setMaximumSize(480, 360) 34 | self.imageLabel.setAlignment(Qt.AlignCenter) 35 | self.setCentralWidget(self.imageLabel) 36 | 37 | # 设置工具栏 38 | self.toolbar() 39 | 40 | # 设置菜单栏 41 | self.menu() 42 | 43 | # 停靠窗口1 44 | dock1= QDockWidget(self.tr("进行位姿测量的帧"),self) 45 | dock1.setFeatures(QDockWidget.DockWidgetMovable) 46 | dock1.setAllowedAreas(Qt.LeftDockWidgetArea|Qt. RightDockWidgetArea) 47 | 48 | self.te1 = QLabel() 49 | dock1.setWidget(self.te1) 50 | dock1.setMaximumSize(320, 240) 51 | dock1.setMinimumSize(320, 240) 52 | self.addDockWidget(Qt.RightDockWidgetArea,dock1) 53 | 54 | # 停靠窗口2 55 | dock2=QDockWidget ( self.tr("位姿测量结果"),self) 56 | dock2.setFeatures(QDockWidget.DockWidgetFloatable|QDockWidget . DockWidgetClosable) 57 | self.te2=QTextEdit( self.tr("本窗口将显示位姿处理结果")) 58 | dock2.setWidget(self.te2) 59 | dock2.setMinimumSize(320, 240) 60 | self.addDockWidget(Qt.RightDockWidgetArea,dock2) 61 | 62 | 63 | def toolbar(self): 64 | # 定义开始,停止,估计位姿和退出,这四种动作 65 | quitAction = QAction(QIcon('./icons/quit.png'), u'退出', self) 66 | quitAction.triggered.connect(self.quit) 67 | 68 | startAction = QAction(QIcon('./icons/start.png'), u'开始采集', self) 69 | startAction.triggered.connect(self.start) 70 | 71 | stopAction = QAction(QIcon('./icons/stop.png'), u'停止采集', self) 72 | stopAction.triggered.connect(self.stop) 73 | 74 | estimateAction = QAction(QIcon('./icons/estimate.png'), u'估计位姿', self) 75 | estimateAction.triggered.connect(self.estimate) 76 | 77 | # 动作: 拍摄标定用的图片 78 | calibCaptureAction = QAction(QIcon('./icons/camera.png'), u'拍照', self) 79 | self.connect(calibCaptureAction, SIGNAL("triggered()"), self.calibCapture) 80 | 81 | # 动作: 录制视频 82 | recordVideoAction = QAction(QIcon('./icons/record.png'), u'录像', self) 83 | self.connect(recordVideoAction, SIGNAL("triggered()"), self.recordVideo) 84 | 85 | # 把按钮和动作连接到一起 86 | toolBar = self.addToolBar("开始") 87 | toolBar.addAction(startAction) 88 | 89 | toolBar = self.addToolBar("停止") 90 | toolBar.addAction(stopAction) 91 | 92 | toolBar = self.addToolBar("退出") 93 | toolBar.addAction(quitAction) 94 | 95 | toolBar = self.addToolBar("估计") 96 | toolBar.addAction(estimateAction) 97 | 98 | toolBar = self.addToolBar("拍照") 99 | toolBar.addAction(calibCaptureAction) 100 | 101 | toolBar = self.addToolBar("录像") 102 | toolBar.addAction(recordVideoAction) 103 | 104 | def menu(self): 105 | # 定义动作: 106 | # 动作: 标定 107 | calibAction = QAction(self.tr("标定相机"), self) 108 | calibAction.setStatusTip(self.tr("标定相机")) 109 | self.connect(calibAction, SIGNAL("triggered()"), self.calib_Cam) 110 | 111 | # 动作: 播放白天演示视频 112 | dayDemoAction = QAction(self.tr("演示1"), self) 113 | self.connect(dayDemoAction, SIGNAL("triggered()"), self.dayDemo) 114 | 115 | 116 | # 动作: 打开位姿测量设置窗口 117 | estimateSettingAction = QAction(self.tr("位姿测量设置"), self) 118 | self.connect(estimateSettingAction, SIGNAL("triggered()"), self.estimateSetting) 119 | 120 | # Action: Select demo_path 121 | demoPathSettingAction = QAction(self.tr("演示路径设置"), self) 122 | self.connect(demoPathSettingAction, SIGNAL("triggered()"), self.setDemoPath) 123 | 124 | 125 | menubar = self.menuBar() 126 | calibMenu = menubar.addMenu(u'&标定') 127 | demoMenu = menubar.addMenu(u'&演示视频') 128 | settingMenu = menubar.addMenu(u'&设置') 129 | 130 | calibMenu.addAction(calibAction) 131 | demoMenu.addAction(dayDemoAction) 132 | demoMenu.addAction(demoPathSettingAction) 133 | settingMenu.addAction( estimateSettingAction) 134 | 135 | def start(self): 136 | self.play_flag = 1 137 | self.video = Video() 138 | self._timer1 = QTimer(self) 139 | try: 140 | self._timer1.timeout.connect(self.play) 141 | finally: 142 | self._timer1.start(0) 143 | self.update() 144 | 145 | def play(self): 146 | try: 147 | self.video.captureRawFrame() 148 | self.imageLabel.setPixmap(self.video.convertFrame()) 149 | 150 | except TypeError: 151 | print "No frame" 152 | 153 | def playAndRecord(self): 154 | try: 155 | self.rec_video.captureRawFrame() 156 | self.rec_video.record() 157 | self.imageLabel.setPixmap(self.rec_video.convertFrame()) 158 | except TypeError: 159 | print "No frame" 160 | 161 | def stop(self): 162 | if self.play_flag ==1: 163 | self._timer1.stop() 164 | self.video.vs.stop() 165 | elif self.play_flag == 2: 166 | self._timer1.stop() 167 | self.video.capture.release() 168 | elif self.play_flag == 3: 169 | self._timer1.stop() 170 | self.rec_video.vs.stop() 171 | self.rec_video.writer.release() 172 | self.play_flag = 0 173 | 174 | 175 | def quit(self): 176 | try: 177 | self._timer1.stop() 178 | finally: 179 | sys.exit(0) 180 | 181 | def estimate(self): 182 | img = self.video.raw_img 183 | try: 184 | chessboard_h = self.settingDialog.height 185 | chessboard_w = self.settingDialog.width 186 | chessboard_size = self.settingDialog.size 187 | cmxDir = self.settingDialog.cmxDir 188 | except: 189 | self.settingDialog = settingDlg() 190 | self.settingDialog.show() 191 | pos = poseEstimate(img,chessboard_h, chessboard_w, chessboard_size, cmxDir) 192 | img = pos.solvePnP() 193 | 194 | # Save result to txt file in experiment_data 195 | self.saveData('test',pos.rvec_str,pos.tvec_str, pos.err_str) 196 | 197 | try: 198 | img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) 199 | height, width = img.shape[:2] 200 | img = QImage(img, 201 | width, 202 | height, 203 | QImage.Format_RGB888) 204 | img = QPixmap.fromImage(img) 205 | except: 206 | return None 207 | img = img.scaledToHeight(320) 208 | img = img.scaledToWidth(240) 209 | self.te1.setPixmap(img) 210 | if pos.ret ==1: 211 | self.te2.setText(self.tr("旋转向量为:")) 212 | #self.te2.append('') 213 | self.te2.append(pos.rvec_str) 214 | 215 | self.te2.append(self.tr("平移向量为:")) 216 | #self.te2.append('') 217 | self.te2.append(pos.tvec_str) 218 | self.te2.append('') 219 | self.te2.append(self.tr("重投影误差是:")+pos.err_str) 220 | else: 221 | self.te2.setText(self.tr("No Chessboard!")) 222 | 223 | def calib_Cam(self): 224 | ''' 225 | 打开标定对话框,并输入相关参数进行标定,标定结果被保存在./calibFile/calib.npz 226 | ''' 227 | self.inputDialog = InputDlg() 228 | self.inputDialog.show() 229 | 230 | def calibCapture(self): 231 | ''' 232 | 每隔3秒拍摄一张照片,并保存在一个单独的文件夹中,一共拍摄15张 233 | 与此同时用户应该手持标定板,不断摆出不同的位置和角度让相机拍摄。 234 | 最后程序还应该输出一个包含了图片路径的list,用于calib_Cam的标定 235 | ''' 236 | img = self.video.raw_img 237 | cv2.imwrite('./calibFile/calibImages/'+str(int(time.time()))+'.jpg', img) 238 | 239 | def recordVideo(self): 240 | self.play_flag = 3 241 | self.rec_video = recordVideo() 242 | self.rec_video.initRecord() 243 | self._timer1 = QTimer(self) 244 | try: 245 | self._timer1.timeout.connect(self.playAndRecord) 246 | finally: 247 | self._timer1.start(100) 248 | self.update() 249 | 250 | 251 | def dayDemo(self): 252 | ''' 253 | 播放提前录制好的白天标定板不断运动的图像 254 | ''' 255 | self.startDemo(self.demo_path) 256 | 257 | def nightDemo(self): 258 | ''' 259 | 播放提前录制好的夜间标定板不断运动的视频 260 | ''' 261 | pass 262 | 263 | def startDemo(self,file): 264 | self.play_flag = 2 265 | self.video = playDemoVideo(file) 266 | self._timer1 = QTimer(self) 267 | try: 268 | self._timer1.timeout.connect(self.play) 269 | finally: 270 | self._timer1.start(100) 271 | self.update() 272 | 273 | def saveData(self,experiment_name,rvec, tvec, error): 274 | f = open('./experiment_data/'+str(experiment_name)+'.txt','a') 275 | f.writelines(rvec+","+tvec+","+error+'\n') 276 | f.close() 277 | 278 | def estimateSetting(self): 279 | self.settingDialog=settingDlg() 280 | self.settingDialog.show() 281 | 282 | def setDemoPath(self): 283 | demo_QStringList = QFileDialog.getOpenFileNames(self, 284 | "多文件选择", 285 | "./demoVideo", 286 | "All Files (*);;AVI Files (*.avi)") 287 | self.demo_path = str(demo_QStringList.join("")).split("")[0] 288 | 289 | 290 | app=QApplication (sys.argv) 291 | main=MainWidget() 292 | main. show () 293 | app.exec_() 294 | -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/playDemoVideo.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | from PyQt4 import QtGui 4 | 5 | class playDemoVideo(): 6 | def __init__(self,file): 7 | self.capture = cv2.VideoCapture(file) 8 | 9 | def captureRawFrame(self): 10 | """ 11 | capture frame and reverse RBG BGR and return opencv image 12 | """ 13 | #ret = self.capture.set(3, 640) 14 | #ret = self.capture.set(4, 480) 15 | ret, rawFrame = self.capture.read() 16 | if (ret == True): 17 | 18 | self.raw_img = rawFrame 19 | #return rawFrame 20 | 21 | def convertFrame(self): 22 | """ 23 | converts frame to format suitable for QtGui 24 | """ 25 | try: 26 | self.currentFrame = cv2.cvtColor(self.raw_img, cv2.COLOR_BGR2RGB) 27 | height, width = self.currentFrame.shape[:2] 28 | img = QtGui.QImage(self.currentFrame, 29 | width, 30 | height, 31 | QtGui.QImage.Format_RGB888) 32 | img = QtGui.QPixmap.fromImage(img) 33 | img = img.scaledToHeight(480) 34 | img = img.scaledToWidth(360) 35 | #self.previousFrame = self.currentFrame 36 | return img 37 | except: 38 | return None 39 | -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/playDemoVideo.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/playDemoVideo.pyc -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/poseEstimate.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import cv2 3 | import numpy as np 4 | import time 5 | 6 | class poseEstimate(): 7 | def __init__(self, frame, height, width, size, cmxDir): 8 | self.frame = frame 9 | self.rvec_str = '' 10 | self.tvec_str = '' 11 | self.height = height-1 12 | self.width = width-1 13 | self.size = size 14 | self.cmxDir = cmxDir 15 | self.err_str = '' 16 | 17 | def solvePnP(self): 18 | """ 19 | 已知相机标定参数,估计相机坐标系相对于棋盘格坐标系的旋转平移向量 20 | """ 21 | frame = self.frame 22 | font = cv2.FONT_HERSHEY_SIMPLEX 23 | objp = np.zeros((self.width * self.height, 3), np.float32) 24 | objp[:, :2] = np.mgrid[0:self.height, 0:self.width].T.reshape(-1, 2) 25 | objp = objp * self.size 26 | calib_data = np.load(self.cmxDir) 27 | cmx = calib_data['cmx'] 28 | dist = calib_data['dist'] 29 | #criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001) 30 | axis = self.size*np.float32([[3, 0, 0], [0, 3, 0], [0, 0, -3]]).reshape(-1, 3) 31 | grey = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) 32 | self.ret, corners = cv2.findChessboardCorners(grey, (self.height, self.width), None) 33 | if self.ret == 1: 34 | #corners2 = cv2.cornerSubPix(grey, corners, (11, 11), (-1, -1), criteria) 35 | ret, rvec, tvec = cv2.solvePnP(objp, corners, cmx, dist) 36 | imgpoints2, _ = cv2.projectPoints(objp, rvec, tvec, cmx, dist) 37 | error = cv2.norm(corners, imgpoints2, cv2.NORM_L2)/len(imgpoints2) 38 | self.err_str = str(error) 39 | 40 | axis_img, _ = cv2.projectPoints(axis, rvec, tvec, cmx, dist) 41 | self.draw(frame, corners, axis_img) 42 | self.rvec_str = self.vec2str(rvec) 43 | self.tvec_str = self.vec2str(tvec) 44 | cv2.putText(frame, 'rvec:' + self.rvec_str, (20, 425), font, 0.8, (255, 255, 255), 2) 45 | cv2.putText(frame, 'tvec:' + self.tvec_str, (20, 450), font, 0.8, (255, 255, 255), 2) 46 | else: 47 | cv2.putText(frame, 'No Chessboard Corners Detected!', (20, 425), font, 0.8, (255, 255, 255), 2) 48 | 49 | return frame 50 | 51 | 52 | def vec2str(self, vec): 53 | """ 54 | 用来把旋转平移向量转换为适合在屏幕上显示的字符串 55 | """ 56 | result = '' 57 | for i in range(0, len(vec)): 58 | i_str = str(vec[i]).strip('[').strip(']') 59 | result = result + ' ' + i_str[:i_str.index('.')+5] 60 | return result 61 | 62 | def draw(self, img, corners, imgpts): 63 | """ 64 | 在图片上画出三根坐标轴 65 | """ 66 | corner = tuple(corners[0].ravel()) 67 | cv2.line(img, corner, tuple(imgpts[0].ravel()), (255, 0, 0), 5) 68 | cv2.line(img, corner, tuple(imgpts[1].ravel()), (0, 255, 0), 5) 69 | cv2.line(img, corner, tuple(imgpts[2].ravel()), (0, 0, 255), 5) 70 | return img 71 | -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/poseEstimate.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/poseEstimate.pyc -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/recordVideo.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu Jun 16 17:06:07 2016 4 | 5 | @author: li_ch 6 | """ 7 | 8 | from __future__ import print_function 9 | from imutils.video import VideoStream 10 | import numpy as np 11 | import imutils 12 | import time 13 | import cv2 14 | from PyQt4 import QtGui 15 | 16 | class recordVideo(): 17 | def __init__(self): 18 | # initialize the video stream and allow the camera 19 | # sensor to warmup 20 | self.vs = VideoStream(usePiCamera=1 > 0).start() 21 | time.sleep(2.0) 22 | self.currentFrame = np.array([]) 23 | self.raw_img = np.array([]) 24 | 25 | self.writer = None 26 | (h, w) = (None, None) 27 | 28 | def captureRawFrame(self): 29 | """ 30 | capture frame and reverse RBG BGR and return opencv image, and also record the video 31 | """ 32 | rawFrame = self.vs.read() 33 | rawFrame = imutils.resize(rawFrame, width=640) 34 | self.raw_img = rawFrame 35 | #return rawFrame 36 | 37 | def initRecord(self): 38 | if self.writer == None: 39 | # store the image dimensions, initialzie the video writer, 40 | # and construct the zeros array 41 | #(h, w) = self.raw_img.shape[:2] 42 | self.writer = cv2.VideoWriter('./demoVideo/'+str(int(time.time()))+'.avi', cv2.cv.FOURCC(*"XVID"), 15, 43 | (640 , 480 ), True) 44 | def record(self): 45 | # write the output frame to file 46 | self.writer.write(self.raw_img) 47 | 48 | def convertFrame(self): 49 | """ 50 | converts frame to format suitable for QtGui 51 | """ 52 | try: 53 | self.currentFrame = cv2.cvtColor(self.raw_img, cv2.COLOR_BGR2RGB) 54 | height, width = self.currentFrame.shape[:2] 55 | img = QtGui.QImage(self.currentFrame, 56 | width, 57 | height, 58 | QtGui.QImage.Format_RGB888) 59 | img = QtGui.QPixmap.fromImage(img) 60 | #self.previousFrame = self.currentFrame 61 | img = img.scaledToHeight(480) 62 | img = img.scaledToWidth(360) 63 | return img 64 | except: 65 | return None 66 | -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/recordVideo.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/recordVideo.pyc -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/settingDialog.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from PyQt4.QtCore import * 3 | from PyQt4.QtGui import * 4 | 5 | QTextCodec.setCodecForTr(QTextCodec.codecForName("utf8")) 6 | 7 | class settingDlg(QDialog): 8 | def __init__(self ,parent=None): 9 | super(settingDlg ,self).__init__(parent) 10 | self.setWindowIcon(QIcon('./icons/windowIcon.png')) 11 | self.height = 10 12 | self.width = 9 13 | self.size = 12 14 | self.cmxDir = './calibFile/calib.npz' 15 | label1=QLabel( self.tr("内参路径")) 16 | label2=QLabel( self.tr("棋盘格长(个)")) 17 | label3=QLabel( self.tr("棋盘格宽(个)")) 18 | label4=QLabel( self .tr("单格边长(毫米)")) 19 | 20 | self.heightLabel=QLabel("10") 21 | self.heightLabel.setFrameStyle(QFrame.Panel|QFrame.Sunken ) 22 | self.widthLabel=QLabel("9") 23 | self.widthLabel.setFrameStyle(QFrame.Panel|QFrame.Sunken ) 24 | self.sizeLabel=QLabel("12") 25 | self.sizeLabel.setFrameStyle(QFrame.Panel|QFrame.Sunken ) 26 | 27 | picButton=QPushButton( u"选择内参路径") 28 | heightButton=QPushButton( u"修改") 29 | widthButton=QPushButton( u"修改") 30 | sizeButton=QPushButton( u"修改" ) 31 | #calibButton = QPushButton(u"开始标定") 32 | 33 | self.connect(picButton,SIGNAL( "clicked()"),self.slotPic) 34 | self.connect(heightButton,SIGNAL( "clicked()"),self.slotHeight) 35 | self.connect(widthButton,SIGNAL( "clicked()"),self.slotWidth) 36 | self.connect(sizeButton,SIGNAL( "clicked()"),self.slotSize) 37 | #self.connect(calibButton, SIGNAL("clicked()"), self.calib) 38 | 39 | layout=QGridLayout() 40 | layout.addWidget(label1,0,0, 1,0) 41 | layout.addWidget(label2,2,0) 42 | layout.addWidget(label3,3,0) 43 | layout.addWidget(label4,4,0) 44 | layout.addWidget(self.heightLabel, 2, 1) 45 | layout.addWidget(self.widthLabel, 3, 1) 46 | layout.addWidget(self.sizeLabel, 4, 1) 47 | layout.addWidget (heightButton,2,2) 48 | layout.addWidget (widthButton,3,2) 49 | layout.addWidget (sizeButton,4,2) 50 | layout.addWidget(picButton, 0, 1, 1, 2) 51 | #layout.addWidget(calibButton, 5,1, 5,2) 52 | 53 | self. setLayout( layout) 54 | 55 | self.setWindowTitle(self.tr("位姿测量设置")) 56 | 57 | def slotPic(self): 58 | img_QStringList = QFileDialog.getOpenFileNames(self, 59 | "多文件选择", 60 | "./calibFile", 61 | "All Files (*);;NPZ Files (*.npz)") 62 | self.cmxDir = str(img_QStringList.join("")).split("")[0] 63 | 64 | def slotHeight(self): 65 | h, ok = QInputDialog.getInteger(self, self.tr("棋盘格长"), 66 | self.tr("请输入棋盘格长(个):"), 67 | int(self.heightLabel.text()), 0, 150) 68 | self.height = int(h) 69 | if ok: 70 | self.heightLabel.setText(str(h)) 71 | 72 | def slotWidth(self): 73 | w,ok=QInputDialog. getInteger(self,self.tr("棋盘格宽"), 74 | self.tr("请输入棋盘格宽(个):"), 75 | int(self. widthLabel. text()),0,150) 76 | 77 | self.width = int(w) 78 | if ok: 79 | self.widthLabel.setText(str(w)) 80 | 81 | def slotSize(self): 82 | s,ok =QInputDialog.getDouble(self ,self.tr("棋盘格边长"), 83 | self.tr("请输入棋盘格边长(毫米):"), 84 | float(self.sizeLabel.text()) ,0 ,150.00) 85 | self.size = float(s) 86 | if ok: 87 | self.sizeLabel.setText(str(s)) 88 | -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/settingDialog.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/settingDialog.pyc -------------------------------------------------------------------------------- /PoseEstimateOnRaspberryPi/说明.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/PoseEstimateOnRaspberryPi/说明.txt -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pose-estimation_python-opencv 2 | Camera calibration and pose estimation of a chessboard using solvePnP 3 | Using Opencv-python to estimate pose of a chessboard. 4 | Run camcalib.py first to calibrate the camera and get the camera matrix and distortion coefficient. 5 | Then run solpnp.py to estimate the extrinsic matrix. 6 | 7 | ### 1. PoseEstimateOnRaspberryPi 8 | This folder contains the program I used to run on Raspberry pi. PyQt is used to make the GUI. Run main.py 9 | 10 | Required packages include: 11 | 1. PyQt4 12 | 2. OpenCV3 13 | 3. imutils(for the raspberry pi camera) 14 | 4. numpy 15 | #### 1.1 The GUI on Raspberry Pi 16 | 图片名称 17 | 18 | #### 1.2 Pi NoIR infrared camera 19 | 图片名称 20 | 21 | ### 2. MatlabModelForProjection 22 | This folder contains a matlab model of the camera. You can specify the parameters of the intrinsic and extrinsic matrix and watch how the image of the chessboard changes. 23 | #### 2.1 The model of chessboard made up of 3D points 24 | 图片名称 25 | 26 | #### 2.2 The projected 2D image with user-specified parameters 27 | 图片名称 28 | -------------------------------------------------------------------------------- /calib.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/calib.npz -------------------------------------------------------------------------------- /camcalib.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # This code is based on Python OpenCV Camera Calibration Tutorial 4 | # Use ROS Camera Calibration in Real Application 5 | # 6 | # This example shows 7 | # - how to detect chessboard 8 | # - how to call camera calibration 9 | # - how to use camera params to undistort image 10 | # - how to reproject 3d points to image plane (draw circle) 11 | 12 | import numpy as np 13 | import cv2 14 | import glob 15 | import pdb 16 | 17 | # Step 1: Data prep 18 | img_list = glob.glob('./chessboard/chessboard_gray*.jpg') 19 | img_list_detected = [] 20 | 21 | # set opts 22 | objp = np.zeros((6*9,3), np.float32) 23 | objp[:,:2] = np.mgrid[0:9,0:6].T.reshape(-1,2) 24 | size = (9,6) 25 | 26 | # Arrays to store object points and image points from all the images. 27 | objpoints = [] # 3d point in real world space 28 | imgpoints = [] # 2d points in image plane. 29 | 30 | for fname in img_list: 31 | img = cv2.imread(fname) 32 | grey = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) 33 | 34 | ret, corners = cv2.findChessboardCorners(grey, size, None) 35 | cv2.drawChessboardCorners(img, (9,6), corners,ret) 36 | 37 | # if found, show imgs 38 | if ret: 39 | criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001) 40 | cv2.cornerSubPix(grey,corners,(11,11),(-1,-1),criteria) 41 | imgpoints.append(corners) 42 | objpoints.append(objp) 43 | img_list_detected.append(fname) 44 | print fname 45 | 46 | cv2.imshow('img',img) 47 | cv2.waitKey(500) 48 | 49 | cv2.destroyAllWindows() 50 | 51 | 52 | # Step 2: Calibration 53 | # shape[::-1]: (480,640) => (640,480) 54 | ret, cmx, dist, rvecs, tvecs = cv2.calibrateCamera( 55 | objpoints, imgpoints, grey.shape[::-1],None,None) 56 | 57 | # save calibration result 58 | np.savez('calib.npz', cmx=cmx, dist=dist, rvecs=rvecs, tvecs=tvecs) 59 | 60 | 61 | # Step 3: Validation Undistor Image 62 | img = cv2.imread('./chessboard/chessboard_gray0.jpg') 63 | img_size = grey.shape[::-1] 64 | newcmx, roi=cv2.getOptimalNewCameraMatrix(cmx, dist, img_size, 0, img_size) 65 | 66 | dst = cv2.undistort(img, cmx, dist, None, newcmx) 67 | cv2.imshow('original', img) 68 | cv2.imshow('undistort', dst) 69 | cv2.waitKey(500) 70 | cv2.destroyAllWindows() 71 | # pdb.set_trace() 72 | 73 | # Step 4: Reporject Points 74 | img = cv2.imread(img_list_detected[0]) 75 | imgpts2,_ = cv2.projectPoints(objp, rvecs[0], tvecs[0], cmx, dist) 76 | for pt in imgpts2: 77 | center = (pt[0,0], pt[0,1]) 78 | cv2.circle(img, center, 5, (0,0,255), 2) 79 | cv2.imshow('reproject', img) 80 | cv2.waitKey() 81 | cv2.destroyAllWindows() 82 | 83 | 84 | 85 | 86 | 87 | -------------------------------------------------------------------------------- /chessboard/chessboard_gray0.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray0.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray1.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray10.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray10.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray11.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray11.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray12.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray12.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray13.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray13.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray14.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray14.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray15.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray15.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray16.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray16.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray17.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray17.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray18.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray18.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray19.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray19.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray2.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray20.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray20.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray3.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray4.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray5.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray6.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray6.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray7.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray7.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray8.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray8.jpg -------------------------------------------------------------------------------- /chessboard/chessboard_gray9.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/chessboard/chessboard_gray9.jpg -------------------------------------------------------------------------------- /img/GUI.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/img/GUI.png -------------------------------------------------------------------------------- /img/chessboard_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/img/chessboard_1.jpg -------------------------------------------------------------------------------- /img/chessboard_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/img/chessboard_2.jpg -------------------------------------------------------------------------------- /img/device.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/img/device.jpg -------------------------------------------------------------------------------- /img/work.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cyrilli/pose-estimation_python-opencv/f6e4125dc78344e5f448cfa720fc2d6bc1c4de69/img/work.jpg -------------------------------------------------------------------------------- /solpnp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Show how to use solvePnP 4 | # - run camcalib.py first to get camera calibration data 5 | 6 | import numpy as np 7 | import cv2 8 | 9 | 10 | # load previously saved calibration data 11 | calib_data = np.load('calib.npz') 12 | cmx = calib_data['cmx'] 13 | dist = calib_data['dist'] 14 | 15 | def draw(img, corners, imgpts): 16 | """Draw Axes""" 17 | corner = tuple(corners[0].ravel()) 18 | cv2.line(img, corner, tuple(imgpts[0].ravel()), (255,0,0), 5) 19 | cv2.line(img, corner, tuple(imgpts[1].ravel()), (0,255,0), 5) 20 | cv2.line(img, corner, tuple(imgpts[2].ravel()), (0,0,255), 5) 21 | return img 22 | 23 | axis = np.float32([[3,0,0], [0,3,0], [0,0,-3]]).reshape(-1,3) 24 | 25 | # img points 26 | img = cv2.imread('./chessboard//chessboard_gray1.jpg') 27 | grey = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) 28 | ret, corners = cv2.findChessboardCorners(grey, (9,6), None) 29 | 30 | # 3d points 31 | objp = np.zeros((6*9,3), np.float32) 32 | objp[:,:2] = np.mgrid[0:9,0:6].T.reshape(-1,2) 33 | 34 | # compute transform 35 | # - solvePnP requires camera calibraiton 36 | # - the same info is also returned by calibrateCamera 37 | ret, rvec, tvec = cv2.solvePnP(objp, corners, cmx, dist) 38 | 39 | # transform axis to images plane 40 | axis_img,_ = cv2.projectPoints(axis, rvec, tvec, cmx, dist) 41 | 42 | # draw axis 43 | draw(img, corners, axis_img) 44 | cv2.imshow('img', img) 45 | cv2.waitKey() 46 | cv2.destroyAllWindows() 47 | 48 | --------------------------------------------------------------------------------