├── LICENSE ├── README.md ├── airrace.py ├── airrace_drone.py ├── airrace_test.py ├── ardrone2.py ├── ardrone2_test.py ├── cimg ├── cimg.c └── setup.py ├── cvideo ├── _readme.txt ├── convert.cpp ├── cvideo.cpp ├── cvideo.sln ├── cvideo.vcproj ├── main.cpp ├── setup.py └── test.py ├── fre_drone.py ├── green.py ├── guide ├── introduction.md ├── lesson1.md ├── lesson1.py ├── lesson2.md ├── lesson2.py ├── lesson3.md ├── lesson3.py ├── lesson4.md ├── lesson4.py ├── lesson5.md ├── lesson5.py ├── lesson6.md ├── lesson6.py ├── lesson7.md ├── lesson7.py ├── overview.md └── todo.txt ├── h264drone.py ├── launcher.py ├── line.py ├── metalog.py ├── navdata.py ├── onboard ├── parse_navdata.py └── test_navdata.c ├── pave.py ├── pave_test.py ├── paveproxy.py ├── paveproxy_test.py ├── pose.py ├── pose_test.py ├── ro.py ├── route.py ├── rr_drone.py ├── rr_drone_test.py ├── sonar.py ├── sourcelogger.py ├── striploc.py ├── striploc_test.py ├── video.py ├── viewer.py ├── viewlog.py └── wifisentinel.py /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013 robotika.cz 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Heidi 2 | ===== 3 | 4 | autonomous AR Drone 2.0 controlled via Python 5 | 6 | ![Hedi Drone](http://robotika.cz/robots/heidi/heidi-box.jpg) 7 | 8 | References at 9 | http://robotika.cz/robots/heidi/ 10 | 11 | -------------------------------------------------------------------------------- /airrace.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | AirRace competition in Vienna. See robotchallenge.org 4 | usage: 5 | ./airrace.py [|] 6 | """ 7 | # for introduction of cv2 for Python have a look at 8 | # http://docs.opencv.org/trunk/doc/py_tutorials/py_tutorials.html 9 | import sys 10 | import cv2 11 | import math 12 | import numpy as np 13 | from pave import PaVE, isIFrame, frameNumber, timestamp 14 | from pose import Pose 15 | 16 | FRAMES_PER_INDEX = 15 # for simpler review of 15Hz images 17 | THRESHOLD_FRACTION = 0.05 18 | 19 | g_filename = None 20 | g_mser = None 21 | 22 | # Jakub's workaround for OpenCV 2.4.2 on linux 23 | def arrayTo3d( contours ): 24 | contours3d = [] 25 | for cnt in contours: 26 | cnt3d = np.zeros( (cnt.shape[0], 1, cnt.shape[1]), dtype=int ) 27 | for ii in xrange(cnt.shape[0]): 28 | cnt3d[ii] = cnt[ii] 29 | contours3d.append(cnt3d) 30 | return contours3d 31 | 32 | 33 | def saveIndexedImage( img ): 34 | if g_filename: 35 | cv2.imwrite( g_filename, img ) 36 | 37 | 38 | def processFrame( frame, debug=False ): 39 | result = [] 40 | global g_mser 41 | global THRESHOLD_FRACTION 42 | if g_mser == None: 43 | g_mser = cv2.MSER( _delta = 10, _min_area=100, _max_area=300*50*2 ) 44 | gray = cv2.cvtColor( frame, cv2.COLOR_BGR2GRAY ) 45 | if g_mser: 46 | contours = g_mser.detect(gray, None) 47 | if cv2.__version__ == "2.4.2": 48 | contours = arrayTo3d( contours ) # Jakub's workaround for 2.4.2 on linux 49 | else: 50 | histogram = cv2.calcHist([gray],[0],None,[256],[0,256]) 51 | s = 0 52 | for i, h in enumerate(histogram): 53 | s += h 54 | if s > THRESHOLD_FRACTION * 640 * 360: 55 | break 56 | ret, binary = cv2.threshold( gray, i, 255, cv2.THRESH_BINARY ) 57 | # ret, binary = cv2.threshold( gray, 0, 255, cv2.THRESH_OTSU ) 58 | contours, hierarchy = cv2.findContours( binary, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE ) 59 | for cnt in contours: 60 | if g_mser == None: 61 | area = cv2.contourArea(cnt, oriented=True) 62 | if g_mser != None or (area > 100 and area < 100000): 63 | rect = cv2.minAreaRect(cnt) 64 | if g_mser == None or len(cnt)/float(rect[1][0]*rect[1][1]) > 0.70: 65 | result.append( rect ) 66 | if g_mser != None: 67 | result = removeDuplicities( result ) 68 | if debug: 69 | if g_mser == None: 70 | cv2.drawContours(frame, contours, -1, (0,255,0), 3) 71 | for rect in result: 72 | box = cv2.cv.BoxPoints(rect) 73 | box = np.int0(box) 74 | cv2.drawContours( frame,[box],0,(255,0,0),2) 75 | result = filterRectangles([((int(x),int(y)),(int(w),int(h)),int(a)) for ((x,y),(w,h),a) in result], minWidth=150/2) 76 | if debug: 77 | for rect in result: 78 | box = cv2.cv.BoxPoints(rect) 79 | box = np.int0(box) 80 | cv2.drawContours( frame,[box],0,(0,0,255),2) 81 | cv2.imshow('image', frame) 82 | # save image for simpler results review, angle is used as hash for search sub-sequence 83 | if g_filename: 84 | cv2.imwrite( g_filename, frame ) 85 | return result 86 | 87 | def filterRectangles( rects, minWidth=150, maxWidth=200 ): 88 | ret = [] 89 | for (x,y),(w,h),a in rects: 90 | if w < h: 91 | w,h,a = h,w,a+90 92 | if w > 3*h and w >= minWidth and w <= maxWidth: 93 | ret.append( ((x,y,),(w,h),a) ) 94 | return ret 95 | 96 | def stripPose( rect, highResolution=True, scale=None ): 97 | "return relative pose of image rectangle" 98 | (x,y),(w,h),a = rect 99 | assert w > 3*h, (w,h) # 30cm long, 5cm wide ... i.e. should be 6 times 100 | a = -a-90 101 | if a > 90: 102 | a -= 180 103 | if a < -90: 104 | a += 180 105 | if scale == None: 106 | if w >= 5*h: # it should be 6x, but width is more precise 107 | scale = 0.3/float(w) 108 | else: 109 | scale = 0.05/float(h) 110 | if highResolution: 111 | return Pose( scale*(720/2-y), scale*(1280/2-x), math.radians( a ) ) 112 | else: 113 | # return Pose( scale*(360/2-y), scale*(640/2-x), math.radians( a ) ) 114 | return Pose( scale*(360/2-y), scale*(270-x), math.radians( a ) ) 115 | 116 | def allStripPoses( rects, highResolution=True ): 117 | # expected low resolution 640x390 118 | if len(rects) == 1: 119 | # keep the old behavior 120 | return [stripPose( rects[0], highResolution=highResolution )] 121 | maxWidth = None 122 | for (x,y),(w,h),a in rects: 123 | width = max(w,h) 124 | if maxWidth == None or width > maxWidth: 125 | maxWidth = width 126 | return [stripPose(x, highResolution=highResolution, scale=0.3/float(maxWidth)) for x in rects] 127 | 128 | def removeDuplicities( rectangles, desiredRatio=6.0 ): 129 | "for MSER remove multiple detections of the same strip" 130 | radius = 30 131 | ret = [] 132 | for (x,y),(w,h),a in rectangles: 133 | for (x2,y2),(w2,h2),a2 in ret: 134 | if abs(x-x2) < radius and abs(y-y2) < radius: 135 | ratio = max(h,w)/float(min(h,w)) 136 | ratio2 = max(h2,w2)/float(min(h2,w2)) 137 | if abs(ratio-desiredRatio) < abs(ratio2-desiredRatio): 138 | # use the bigger one 139 | ret.remove( ((x2,y2),(w2,h2),a2) ) 140 | ret.append( ((x,y),(w,h),a) ) 141 | break 142 | else: 143 | ret.append( ((x,y),(w,h),a) ) 144 | return ret 145 | 146 | 147 | def testFrame( filename ): 148 | img = cv2.imread( filename, cv2.CV_LOAD_IMAGE_COLOR ) 149 | processFrame( img, debug=True ) 150 | cv2.waitKey(0) 151 | 152 | def testVideo( filename ): 153 | cap = cv2.VideoCapture( filename ) 154 | while(cap.isOpened()): 155 | ret, frame = cap.read() 156 | if ret: 157 | processFrame( frame, debug=True ) 158 | else: 159 | break 160 | if cv2.waitKey(1) & 0xFF == ord('q'): 161 | break 162 | cap.release() 163 | 164 | def testPaVEVideo( filename, onlyFrameNumber=None, refLog=None ): 165 | f = open( filename, "rb" ) 166 | data = f.read(10000) 167 | pave = PaVE() 168 | cap = None 169 | total = 0 170 | while len(data) > 0: 171 | pave.append( data ) 172 | header,payload = pave.extract() 173 | while payload: 174 | if isIFrame( header ) and (onlyFrameNumber == None or onlyFrameNumber==frameNumber( header )/FRAMES_PER_INDEX): 175 | tmpFile = open( "tmp.bin", "wb" ) 176 | tmpFile.write( payload ) 177 | tmpFile.flush() 178 | tmpFile.close() 179 | cap = cv2.VideoCapture( "tmp.bin" ) 180 | ret, frame = cap.read() 181 | assert ret 182 | if ret: 183 | global g_filename 184 | g_filename = "tmp_%04d.jpg" % (frameNumber( header )/FRAMES_PER_INDEX) 185 | result = processFrame( frame, debug=True ) 186 | if refLog != None: 187 | print refLog.readline().strip() 188 | (oldFrameNumber, oldTimestamp), oldResult = eval(refLog.readline().strip()) 189 | assert oldFrameNumber == frameNumber(header), (oldFrameNumber, frameNumber(header)) 190 | assert oldTimestamp == timestamp(header), (oldTimestamp, timestamp(header)) 191 | print ((frameNumber(header), timestamp(header)), result) 192 | # assert oldResult == result, oldResult # potential difference linux/windows 193 | else: 194 | print frameNumber( header )/FRAMES_PER_INDEX, result 195 | if onlyFrameNumber: 196 | cv2.waitKey(0) 197 | return 198 | 199 | header,payload = pave.extract() 200 | if cv2.waitKey(1) & 0xFF == ord('q'): 201 | break 202 | data = f.read(10000) 203 | if refLog != None: # complete termination line 204 | print refLog.readline().strip() 205 | 206 | 207 | def isParrotVideo( filename ): 208 | return open( filename ).read(4) == "PaVE" 209 | 210 | def main( argv, imgFce=None ): 211 | if imgFce != None: 212 | global processFrame 213 | processFrame = imgFce 214 | if len(argv) < 2: 215 | print __doc__ 216 | sys.exit(2) 217 | filename = argv[1] 218 | if filename.endswith(".jpg"): 219 | testFrame( filename ) 220 | else: 221 | if isParrotVideo( filename ): 222 | if len(argv) > 2: 223 | if "src_cv2_" in argv[2]: 224 | testPaVEVideo( filename, refLog=open(argv[2]) ) 225 | else: 226 | testPaVEVideo( filename, onlyFrameNumber=int(eval(argv[2])) ) 227 | else: 228 | testPaVEVideo( filename ) 229 | else: 230 | testVideo( filename ) 231 | cv2.destroyAllWindows() 232 | 233 | if __name__ == "__main__": 234 | main( sys.argv ) 235 | 236 | -------------------------------------------------------------------------------- /airrace_drone.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | AirRace competition in Vienna. See robotchallenge.org 4 | usage: 5 | ./airrace.py 6 | """ 7 | import sys 8 | import datetime 9 | import multiprocessing 10 | import cv2 11 | import math 12 | from pave import PaVE, isIFrame, frameNumber, timestamp, correctTimePeriod 13 | from airrace import processFrame, allStripPoses 14 | from sourcelogger import SourceLogger 15 | from ardrone2 import ARDrone2, ManualControlException, manualControl, normalizeAnglePIPI, distance 16 | import viewlog 17 | from line import Line 18 | from pose import Pose 19 | from striploc import StripsLocalisation, REF_CIRCLE_RADIUS 20 | from striploc import PATH_STRAIGHT, PATH_TURN_LEFT, PATH_TURN_RIGHT 21 | 22 | 23 | MAX_ALLOWED_SPEED = 0.8 24 | STRIPS_FAILURE_SPEED = 0.5 # ??? speed when localisation is not updated from last image ... maybe two images?? 25 | TRANSITION_SPEED = 0.4 26 | NUM_FAST_STRIPS = 4 # now the same number for straight and turn segments -> TODO split 27 | MAX_ALLOWED_VIDEO_DELAY = 2.0 # in seconds, then it will wait (desiredSpeed = 0.0) 28 | 29 | def timeName( prefix, ext ): 30 | dt = datetime.datetime.now() 31 | filename = prefix + dt.strftime("%y%m%d_%H%M%S.") + ext 32 | return filename 33 | 34 | g_pave = None 35 | 36 | def wrapper( packet ): 37 | global g_pave 38 | if g_pave == None: 39 | g_pave = PaVE() 40 | g_pave.append( packet ) 41 | header,payload = g_pave.extract() 42 | while payload: 43 | if isIFrame( header ): 44 | tmpFile = open( "tmp.bin", "wb" ) 45 | tmpFile.write( payload ) 46 | tmpFile.flush() 47 | tmpFile.close() 48 | cap = cv2.VideoCapture( "tmp.bin" ) 49 | ret, frame = cap.read() 50 | assert ret 51 | if ret: 52 | return (frameNumber( header ), timestamp(header)), processFrame( frame, debug=False ) 53 | header,payload = g_pave.extract() 54 | 55 | g_queueResults = multiprocessing.Queue() 56 | 57 | def getOrNone(): 58 | if g_queueResults.empty(): 59 | return None 60 | return g_queueResults.get() 61 | 62 | 63 | class AirRaceDrone( ARDrone2 ): 64 | def __init__( self, replayLog=None, speed = 0.2, skipConfigure=False, metaLog=None, console=None ): 65 | self.loggedVideoResult = None 66 | self.lastImageResult = None 67 | self.videoHighResolution = False 68 | ARDrone2.__init__( self, replayLog, speed, skipConfigure, metaLog, console ) 69 | if replayLog == None: 70 | name = timeName( "logs/src_cv2_", "log" ) 71 | metaLog.write("cv2: "+name+'\n' ) 72 | self.loggedVideoResult = SourceLogger( getOrNone, name ).get 73 | self.startVideo( wrapper, g_queueResults, record=True, highResolution=self.videoHighResolution ) 74 | else: 75 | assert metaLog 76 | self.loggedVideoResult = SourceLogger( None, metaLog.getLog("cv2:") ).get 77 | self.startVideo( record=True, highResolution=self.videoHighResolution ) 78 | 79 | def update( self, cmd="AT*COMWDG=%i,\r" ): 80 | ARDrone2.update( self, cmd ) 81 | if self.loggedVideoResult != None: 82 | self.lastImageResult = self.loggedVideoResult() 83 | 84 | 85 | def competeAirRace( drone, desiredHeight = 1.7 ): 86 | loops = [] 87 | drone.speed = 0.1 88 | maxVideoDelay = 0.0 89 | maxControlGap = 0.0 90 | loc = StripsLocalisation() 91 | 92 | remainingFastStrips = 0 93 | desiredSpeed = TRANSITION_SPEED 94 | updateFailedCount = 0 95 | 96 | try: 97 | drone.wait(1.0) 98 | drone.setVideoChannel( front=False ) 99 | drone.takeoff() 100 | poseHistory = [] 101 | startTime = drone.time 102 | while drone.time < startTime + 1.0: 103 | drone.update("AT*PCMD=%i,0,0,0,0,0\r") # drone.hover(1.0) 104 | poseHistory.append( (drone.time, (drone.coord[0], drone.coord[1], drone.heading), (drone.angleFB, drone.angleLR)) ) 105 | magnetoOnStart = drone.magneto[:3] 106 | print "NAVI-ON" 107 | pathType = PATH_TURN_LEFT 108 | virtualRefCircle = None 109 | startTime = drone.time 110 | sx,sy,sz,sa = 0,0,0,0 111 | lastUpdate = None 112 | while drone.time < startTime + 600.0: 113 | sz = max( -0.2, min( 0.2, desiredHeight - drone.coord[2] )) 114 | sx = max( 0, min( drone.speed, desiredSpeed - drone.vx )) 115 | 116 | if drone.lastImageResult: 117 | lastUpdate = drone.time 118 | assert len( drone.lastImageResult ) == 2 and len( drone.lastImageResult[0] ) == 2, drone.lastImageResult 119 | (frameNumber, timestamp), rects = drone.lastImageResult 120 | viewlog.dumpCamera( "tmp_%04d.jpg" % (frameNumber/15,), 0 ) 121 | 122 | # keep history small 123 | videoTime = correctTimePeriod( timestamp/1000., ref=drone.time ) 124 | videoDelay = drone.time - videoTime 125 | if videoDelay > 1.0: 126 | print "!DANGER! - video delay", videoDelay 127 | maxVideoDelay = max( videoDelay, maxVideoDelay ) 128 | toDel = 0 129 | for oldTime, oldPose, oldAngles in poseHistory: 130 | toDel += 1 131 | if oldTime >= videoTime: 132 | break 133 | poseHistory = poseHistory[:toDel] 134 | 135 | tiltCompensation = Pose(desiredHeight*oldAngles[0], desiredHeight*oldAngles[1], 0) # TODO real height? 136 | print "FRAME", frameNumber/15, "[%.1f %.1f]" % (math.degrees(oldAngles[0]), math.degrees(oldAngles[1])), 137 | loc.updateFrame( Pose( *oldPose ).add(tiltCompensation), allStripPoses( rects, highResolution=drone.videoHighResolution ) ) 138 | if loc.pathType != pathType: 139 | print "TRANS", pathType, "->", loc.pathType 140 | if pathType == PATH_TURN_LEFT and loc.pathType == PATH_STRAIGHT: 141 | if len(loops) > 0: 142 | print "Loop %d, time %d" % (len(loops), drone.time-loops[-1]) 143 | print "-----------------------------------------------" 144 | loops.append( drone.time ) 145 | if drone.magneto[:3] == magnetoOnStart: 146 | print "!!!!!!!! COMPASS FAILURE !!!!!!!!" 147 | pathType = loc.pathType 148 | print "----" 149 | remainingFastStrips = NUM_FAST_STRIPS 150 | 151 | if loc.crossing: 152 | print "X", True, remainingFastStrips 153 | else: 154 | print pathType, loc.pathUpdated, remainingFastStrips 155 | if not loc.pathUpdated: 156 | updateFailedCount += 1 157 | if updateFailedCount > 1: 158 | print "UPDATE FAILED", updateFailedCount 159 | else: 160 | updateFailedCount = 0 161 | 162 | if remainingFastStrips > 0: 163 | remainingFastStrips -= 1 164 | desiredSpeed = MAX_ALLOWED_SPEED 165 | if not loc.pathUpdated and not loc.crossing: 166 | desiredSpeed = STRIPS_FAILURE_SPEED 167 | else: 168 | desiredSpeed = TRANSITION_SPEED 169 | if videoDelay > MAX_ALLOWED_VIDEO_DELAY: 170 | desiredSpeed = 0.0 171 | 172 | if drone.battery < 10: 173 | print "BATTERY LOW!", drone.battery 174 | 175 | # height debugging 176 | #print "HEIGHT\t%d\t%d\t%.2f\t%d\t%d\t%d\t%d\t%d\t%d" % tuple([max([0]+[w for ((x,y),(w,h),a) in rects])] + list(drone.altitudeData[:4]) + list(drone.pressure) ) 177 | 178 | for sp in allStripPoses( rects, highResolution=drone.videoHighResolution ): 179 | sPose = Pose( *oldPose ).add(tiltCompensation).add( sp ) 180 | viewlog.dumpBeacon( sPose.coord(), index=3 ) 181 | viewlog.dumpObstacles( [[(sPose.x-0.15*math.cos(sPose.heading), sPose.y-0.15*math.sin(sPose.heading)), 182 | (sPose.x+0.15*math.cos(sPose.heading), sPose.y+0.15*math.sin(sPose.heading))]] ) 183 | 184 | refCircle,refLine = loc.getRefCircleLine( Pose(drone.coord[0], drone.coord[1], drone.heading) ) 185 | if refCircle == None and refLine == None and virtualRefCircle != None: 186 | refCircle = virtualRefCircle 187 | # error definition ... if you substract that you get desired position or angle 188 | # error is taken from the path point of view, x-path direction, y-positive left, angle-anticlockwise 189 | errY, errA = 0.0, 0.0 190 | assert refCircle == None or refLine == None # they cannot be both active at the same time 191 | if refCircle: 192 | if pathType == PATH_TURN_LEFT: 193 | errY = refCircle[1] - math.hypot( drone.coord[0]-refCircle[0][0], drone.coord[1]-refCircle[0][1] ) 194 | errA = normalizeAnglePIPI( - math.atan2( refCircle[0][1] - drone.coord[1], refCircle[0][0] - drone.coord[0] ) 195 | - math.radians(-90) + drone.heading ) 196 | if pathType == PATH_TURN_RIGHT: 197 | errY = math.hypot( drone.coord[0]-refCircle[0][0], drone.coord[1]-refCircle[0][1] ) - refCircle[1] 198 | errA = normalizeAnglePIPI( - math.atan2( refCircle[0][1] - drone.coord[1], refCircle[0][0] - drone.coord[0] ) 199 | - math.radians(90) + drone.heading ) 200 | if refLine: 201 | errY = refLine.signedDistance( drone.coord ) 202 | errA = normalizeAnglePIPI( drone.heading - refLine.angle ) 203 | 204 | # get the height first 205 | if drone.coord[2] < desiredHeight - 0.1 and drone.time-startTime < 5.0: 206 | sx = 0.0 207 | if refCircle == None and refLine == None and virtualRefCircle == None: 208 | sx = 0.0 # wait for Z-up 209 | if drone.coord[2] > desiredHeight - 0.1: 210 | print "USING VIRTUAL LEFT TURN CIRCLE!" 211 | circCenter = Pose( drone.coord[0], drone.coord[1], drone.heading ).add( Pose(0.0, REF_CIRCLE_RADIUS, 0 )).coord() 212 | viewlog.dumpBeacon( circCenter, index=0 ) 213 | virtualRefCircle = circCenter, REF_CIRCLE_RADIUS 214 | 215 | # error correction 216 | # the goal is to have errY and errA zero in 1 second -> errY defines desired speed at given distance from path 217 | sy = max( -0.2, min( 0.2, -errY-drone.vy ))/2.0 218 | 219 | # there is no drone.va (i.e. derivative of heading) available at the moment ... 220 | sa = max( -0.1, min( 0.1, -errA/2.0 ))*1.35*(desiredSpeed/0.4) # originally set for 0.4=OK 221 | 222 | # print "%0.2f\t%d\t%0.2f\t%0.2f\t%0.2f" % (errY, int(math.degrees(errA)), drone.vy, sy, sa) 223 | prevTime = drone.time 224 | drone.moveXYZA( sx, sy, sz, sa ) 225 | maxControlGap = max( drone.time - prevTime, maxControlGap ) 226 | poseHistory.append( (drone.time, (drone.coord[0], drone.coord[1], drone.heading), (drone.angleFB, drone.angleLR)) ) 227 | print "NAVI-OFF", drone.time - startTime 228 | drone.hover(0.5) 229 | drone.land() 230 | drone.setVideoChannel( front=True ) 231 | except ManualControlException, e: 232 | print "ManualControlException" 233 | if drone.ctrlState == 3: # CTRL_FLYING=3 ... i.e. stop the current motion 234 | drone.hover(0.1) 235 | drone.land() 236 | drone.wait(1.0) 237 | drone.stopVideo() 238 | print "MaxVideoDelay", maxVideoDelay 239 | print "MaxControlGap", maxControlGap 240 | print "Loops", len(loops)-1, [int(now-prev) for prev,now in zip(loops[:-1],loops[1:])] 241 | print "Battery", drone.battery 242 | 243 | 244 | if __name__ == "__main__": 245 | import launcher 246 | launcher.launch( sys.argv, AirRaceDrone, competeAirRace ) 247 | 248 | -------------------------------------------------------------------------------- /airrace_test.py: -------------------------------------------------------------------------------- 1 | from airrace import * 2 | import unittest 3 | 4 | 5 | class AirRaceTest( unittest.TestCase ): 6 | def testFilterRectanbles( self ): 7 | self.assertEqual( filterRectangles([]), [] ) 8 | self.assertEqual( filterRectangles( 9 | [((817, 597), (10, 12), -26), ((223, 440), (319, 60), -88)], maxWidth=500), 10 | [((223, 440), (319, 60), -88)] ) 11 | self.assertEqual( filterRectangles( [((298, 508), (58, 319), -15)], maxWidth=500 ), [((298, 508), (319, 58), 75)] ) 12 | self.assertEqual( filterRectangles( [((982, 492), (29, 17), -45), ((951, 507), (60, 84), -69)] ), [] ) 13 | 14 | def testStripPose( self ): 15 | s = 0.3/319. 16 | self.assertEqual( repr(stripPose( ((223, 440), (319, 60), -88) )), repr(Pose(s*(720/2-440), s*(-223+1280/2), math.radians(-2))) ) 17 | self.assertEqual( repr(stripPose( ((298, 508), (319, 58), 75) )), repr(Pose(s*(720/2-508), s*(-298+1280/2), math.radians(15))) ) 18 | s = 0.3/float(319/2) 19 | self.assertEqual( repr(stripPose( ((298/2, 508/2), (319/2, 58/2), 75), highResolution=False )), repr(Pose(s*((720/2-508)/2), s*((-298+1280/2)/2), math.radians(15))) ) 20 | 21 | def testZScalingBug( self ): 22 | sWidth = 0.3/189. 23 | s = 0.05/53. 24 | self.assertEqual( repr(stripPose( ((898, 257), (189, 53), -58) )), repr(Pose(s*(720/2-257), s*(-898+1280/2), math.radians(-32))) ) 25 | self.assertEqual( repr(allStripPoses( [((898, 257), (189, 53), -58)] )[0]), repr(Pose(s*(720/2-257), s*(-898+1280/2), math.radians(-32))) ) # not visible :( 26 | 27 | def testAllStripPoses( self ): 28 | # video_rec_140325_200709.bin 435/15 29 | asp = allStripPoses( [((318, 201), (134, 22), 82), ((293, 43), (90, 18), 82)], highResolution=False ) 30 | self.assertEqual( len(asp), 2 ) 31 | self.assertAlmostEqual( abs(asp[1].sub( asp[0] ).y), 0.01, 2 ) # straight line - should be less than 0.25 32 | self.assertAlmostEqual( abs(asp[1].sub( asp[0] ).x), 0.36, 2 ) # ... too far individual was 0.51 33 | self.assertEqual( allStripPoses([]), [] ) 34 | 35 | def testRemoveDuplicities( self ): 36 | self.assertEqual( removeDuplicities([]), [] ) 37 | self.assertEqual( removeDuplicities( [((400, 162), (128, 20), -72), ((400, 161), (131, 24), -72)] ), [((400, 162), (128, 20), -72)] ) 38 | self.assertEqual( removeDuplicities( [((209, 292), (133, 30), -87), ((201, 136), (95, 20), 77), ((198, 123), (143, 27), 78)] ), 39 | [((209, 292), (133, 30), -87), ((198, 123), (143, 27), 78)] ) 40 | # MSER in [((599, 241), (89, 20), -18), ((353, 183), (78, 18), 90), ((352, 161), (128,23), 88)], frame 61? 41 | self.assertEqual( removeDuplicities([((599, 241), (89, 20), -18), ((353, 183), (78, 18), 90), ((352, 161), (128,23), 88)]), 42 | [((599, 241), (89, 20), -18), ((352, 161), (128,23), 88)] ) 43 | 44 | def testRemoveDuplicitiesMistakes( self ): 45 | # video_rec_140317_130554.bin : frame 14 - status quo 46 | #self.assertEqual( removeDuplicities( [((25, 169), (50, 109), 0), ((35, 139), (279, 71), -90), ((64, 338), (77, 22), -45), 47 | # ((65, 338), (82, 26), -45), ((52, 340), (111, 46), -24), ((214, 47), (54, 23), -78), ((216, 37), (76, 25), -78), 48 | # ((216, 37), (81, 28), -79), ((216, 48), (104, 39), -77), ((155, 196), (160, 26), -63), ((141, 188), (163, 62), -63)] ), 49 | # [((25, 169), (50, 109), 0), ((35, 139), (279, 71), -90), ((52, 340), (111, 46), -24), ((216, 48), (104, 39), -77), ((141, 188), (163, 62), -63)] ) 50 | # filter single place (wrong example, on the border of image frame) 51 | #self.assertEqual( removeDuplicities( [((214, 47), (54, 23), -78), ((216, 37), (76, 25), -78), 52 | # ((216, 37), (81, 28), -79), ((216, 48), (104, 39), -77),] ), [((216, 48), (104, 39), -77)] ) 53 | # well this is better example, with whole strip - WRONG SELECTION! 54 | self.assertEqual( removeDuplicities( [((155, 196), (160, 26), -63), ((141, 188), (163, 62), -63)] ), [((155, 196), (160, 26), -63)] ) 55 | 56 | def testRemoveLargeStrips( self ): 57 | # it looks like that big strips should be removed 58 | self.assertEqual( filterRectangles([((452, 143), (289, 68), 88)]), [] ) 59 | 60 | if __name__ == "__main__": 61 | unittest.main() 62 | 63 | -------------------------------------------------------------------------------- /ardrone2_test.py: -------------------------------------------------------------------------------- 1 | from ardrone2 import * 2 | import unittest 3 | import math 4 | 5 | class Ardrone2Test( unittest.TestCase ): 6 | def tupleAlmostEqual( self, a, b ): 7 | for x,y in zip(a,b): 8 | self.assertAlmostEqual(x,y, delta=0.00001, msg=str((a,b))) 9 | def testRelCoord( self ): 10 | self.assertEqual( relCoord( 0, 0, 0 ), (0,0) ) 11 | self.assertEqual( relCoord( 0.5, 0.2, 0 ), (-0.5,-0.2) ) 12 | self.tupleAlmostEqual( relCoord( 1, 0, math.radians(90) ), (0,1) ) 13 | self.tupleAlmostEqual( 14 | relCoord( 0.15139901571579156, 0.17409094203596967, math.radians(101.528)), 15 | (-0.14032242535469552, 0.18313638360865858) ) 16 | 17 | self.tupleAlmostEqual( 18 | relCoord( 0.15139901571579156, 0.17409094203596967, math.radians(90)), 19 | (-0.17409094203596967, 0.15139901571579156) ) 20 | 21 | def testSonarScan( self ): 22 | s = SonarScan() 23 | self.assert_( s.maxGap( 1.0 ) == None ) # no info 24 | 25 | s.arr = [(0, 1.), (math.radians(90), 2.0)] 26 | self.assertAlmostEqual( s.maxGap( 1.5 ), math.radians(90) ) # the best known empty direction (not enough data) 27 | self.assert_( s.maxGap( 0.5 ) == None ) # no obstacles 28 | self.assert_( s.maxGap( 2.5 ) == None ) # no free space 29 | 30 | s.arr = [(math.radians(x),y) for (x,y) in [(0,1),(90,2),(180,1),(270,1)]] 31 | self.assertAlmostEqual( s.maxGap( 1.5 ), math.radians(90) ) # the best known empty direction (not enough data) 32 | 33 | s.arr = [(math.radians(x),y) for (x,y) in [(0,1),(45,2),(90,2),(180,1),(270,1)]] 34 | self.assertAlmostEqual( s.maxGap( 1.5 ), math.radians((45+90)/2.) ) # the best known empty direction (not enough data) 35 | 36 | s.arr = [(math.radians(x),y) for (x,y) in [(0,2),(10,2),(90,1),(180,1),(270,1),(350,2)]] 37 | self.assertAlmostEqual( s.maxGap( 1.5 ), math.radians(0) ) # singularity 38 | 39 | s.arr = [(math.radians(x),y) for (x,y) in [(0,1),(10,2),(20,1),(180,1),(270,2),(350,2)]] 40 | self.assertAlmostEqual( s.maxGap( 1.5 ), math.radians((270+350)/2.-360) ) # 2nd gap 41 | 42 | self.assert_( s.maxGap( 1.5, widthLimit=math.radians(90))==None ) 43 | 44 | if __name__ == "__main__": 45 | unittest.main() 46 | 47 | -------------------------------------------------------------------------------- /cimg/cimg.c: -------------------------------------------------------------------------------- 1 | /* C-image processing with numpy arrays 2 | * base on 3 | * http://wiki.scipy.org/Cookbook/C_Extensions/NumPy_arrays */ 4 | 5 | #include "Python.h" 6 | #include "numpy/arrayobject.h" 7 | #include 8 | 9 | /* #### Globals #################################### */ 10 | 11 | static PyObject *green(PyObject *self, PyObject *args); 12 | static PyObject *avoidGreen(PyObject *self, PyObject *args); 13 | 14 | /* .... C vector utility functions ..................*/ 15 | int not_image(PyArrayObject *vec); 16 | 17 | 18 | /* ==== Set up the methods table ====================== */ 19 | static PyMethodDef cimgMethods[] = { 20 | {"green", green, METH_VARARGS}, 21 | {"avoidGreen", avoidGreen, METH_VARARGS}, 22 | {NULL, NULL} /* Sentinel - marks the end of this structure */ 23 | }; 24 | 25 | /* ==== Initialize the C_test functions ====================== */ 26 | // Module name must be _C_arraytest in compile and linked 27 | void initcimg() { 28 | (void) Py_InitModule("cimg", cimgMethods); 29 | import_array(); // Must be present for NumPy. Called first after above line. 30 | } 31 | 32 | static PyObject *green(PyObject *self, PyObject *args) 33 | { 34 | PyArrayObject *vecio; // The python objects to be extracted from the args 35 | unsigned char *ptr; // The C vectors to be created to point to the 36 | // python vectors, cin and cout point to the row 37 | // of vecin and vecout, respectively 38 | int i,n; 39 | double frac; 40 | unsigned char r,g,b; 41 | int count = 0; 42 | 43 | /* Parse tuples separately since args will differ between C fcns */ 44 | if (!PyArg_ParseTuple(args, "O!d", &PyArray_Type, &vecio, &frac)) return NULL; 45 | if (NULL == vecio) return NULL; 46 | 47 | if (not_image(vecio)) return NULL; 48 | 49 | ptr = (unsigned char*)vecio->data; 50 | 51 | /* Get vector dimension. */ 52 | n = vecio->dimensions[0] * vecio->dimensions[1]; 53 | 54 | /* Operate on the vectors */ 55 | for( i = 0; i < n; i++ ) 56 | { 57 | r = ptr[2]; 58 | g = ptr[1]; 59 | b = ptr[0]; 60 | if( g > frac * r && g > frac * b ) 61 | { 62 | ptr[0] = 0; 63 | ptr[1] = 0xFF; 64 | ptr[2] = 0; 65 | count++; 66 | } 67 | ptr += 3; 68 | } 69 | 70 | return Py_BuildValue("i", count); 71 | } 72 | 73 | 74 | static PyObject *avoidGreen(PyObject *self, PyObject *args) 75 | { 76 | PyArrayObject *vecio; // The python objects to be extracted from the args 77 | unsigned char *ptr; // The C vectors to be created to point to the 78 | // python vectors, cin and cout point to the row 79 | // of vecin and vecout, respectively 80 | int fromX, toX, fromY, toY, limit; 81 | double frac; 82 | int x, y; 83 | unsigned char r,g,b; 84 | int count = 0; 85 | 86 | /* Parse tuples separately since args will differ between C fcns */ 87 | if (!PyArg_ParseTuple(args, "O!iiiiid", &PyArray_Type, &vecio, &fromX, &toX, &fromY, &toY, &limit, &frac)) return NULL; 88 | if (NULL == vecio) return NULL; 89 | 90 | if (not_image(vecio)) return NULL; 91 | 92 | if( fromX < toX ) 93 | { 94 | for( x = fromX; x < toX; x++ ) 95 | { 96 | for( y = fromY; y < toY; y++ ) 97 | { 98 | ptr = (unsigned char*)vecio->data + 3*(x + y*vecio->dimensions[1]); 99 | r = ptr[2]; 100 | g = ptr[1]; 101 | b = ptr[0]; 102 | if( g > frac * r && g > frac * b ) 103 | { 104 | ptr[0] = 0; 105 | ptr[1] = 0xFF; 106 | ptr[2] = 0; 107 | count++; 108 | } 109 | else 110 | { 111 | ptr[0] = 0xFF; 112 | ptr[1] = 0xFF; 113 | ptr[2] = 0xFF; 114 | } 115 | } 116 | if( count > limit ) 117 | break; 118 | } 119 | } 120 | else 121 | { 122 | for( x = fromX; x > toX; x-- ) 123 | { 124 | for( y = fromY; y < toY; y++ ) 125 | { 126 | ptr = (unsigned char*)vecio->data + 3*(x + y*vecio->dimensions[1]); 127 | r = ptr[2]; 128 | g = ptr[1]; 129 | b = ptr[0]; 130 | if( g > frac * r && g > frac * b ) 131 | { 132 | ptr[0] = 0; 133 | ptr[1] = 0xFF; 134 | ptr[2] = 0; 135 | count++; 136 | } 137 | else 138 | { 139 | ptr[0] = 0xFF; 140 | ptr[1] = 0xFF; 141 | ptr[2] = 0xFF; 142 | } 143 | } 144 | if( count > limit ) 145 | break; 146 | } 147 | } 148 | 149 | return Py_BuildValue("i", x); 150 | } 151 | 152 | 153 | int not_image(PyArrayObject *vec) { 154 | if (vec->descr->type_num != NPY_UINT8 || vec->nd != 3) { 155 | PyErr_SetString(PyExc_ValueError, 156 | "In not_image: array must be of type uint8 and 3 dimensional (n)."); 157 | return 1; } 158 | return 0; 159 | } 160 | 161 | -------------------------------------------------------------------------------- /cimg/setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup, Extension 2 | 3 | module1 = Extension('cimg', 4 | sources = ['cimg.c'], 5 | include_dirs = [r'c:\Python27\Lib\site-packages\numpy\core\include'] 6 | ) 7 | 8 | setup (name = 'CImgPackage', 9 | version = '0.1', 10 | description = 'C-image helper function', 11 | ext_modules = [module1]) 12 | 13 | -------------------------------------------------------------------------------- /cvideo/_readme.txt: -------------------------------------------------------------------------------- 1 | This is helper for reading videos frame by frame with possibility to skip some 2 | in case of slow video processing. The idea is taken from cvdrone. 3 | -------------------------------------------------------------------------------- /cvideo/convert.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | //#ifndef INT64_C 4 | //#define INT64_C(c) (c ## LL) 5 | //#define UINT64_C(c) (c ## ULL) 6 | //#endif 7 | 8 | //#define _M 9 | #define _M printf( "%s(%d) : MARKER\n", __FILE__, __LINE__ ) 10 | 11 | extern "C" 12 | { 13 | #include "libavcodec/avcodec.h" 14 | #include "libavformat/avformat.h" 15 | }; 16 | 17 | 18 | AVFormatContext *fc = 0; 19 | int vi = -1, waitkey = 1; 20 | 21 | // < 0 = error 22 | // 0 = I-Frame 23 | // 1 = P-Frame 24 | // 2 = B-Frame 25 | // 3 = S-Frame 26 | int getVopType( const void *p, int len ) 27 | { 28 | if ( !p || 6 >= len ) 29 | return -1; 30 | 31 | unsigned char *b = (unsigned char*)p; 32 | 33 | // Verify NAL marker 34 | if ( b[ 0 ] || b[ 1 ] || 0x01 != b[ 2 ] ) 35 | { b++; 36 | if ( b[ 0 ] || b[ 1 ] || 0x01 != b[ 2 ] ) 37 | return -1; 38 | } // end if 39 | 40 | b += 3; 41 | 42 | // Verify VOP id 43 | if ( 0xb6 == *b ) 44 | { b++; 45 | return ( *b & 0xc0 ) >> 6; 46 | } // end if 47 | 48 | switch( *b ) 49 | { case 0x65 : return 0; 50 | case 0x61 : return 1; 51 | case 0x01 : return 2; 52 | } // end switch 53 | 54 | return -1; 55 | } 56 | 57 | void write_frame( const void* p, int len ) 58 | { 59 | if ( 0 > vi ) 60 | return; 61 | 62 | AVStream *pst = fc->streams[ vi ]; 63 | 64 | // Init packet 65 | AVPacket pkt; 66 | av_init_packet( &pkt ); 67 | pkt.flags |= ( 0 >= getVopType( p, len ) ) ? AV_PKT_FLAG_KEY : 0; 68 | pkt.stream_index = pst->index; 69 | pkt.data = (uint8_t*)p; 70 | pkt.size = len; 71 | 72 | // Wait for key frame 73 | if ( waitkey ) 74 | if ( 0 == ( pkt.flags & AV_PKT_FLAG_KEY ) ) 75 | return; 76 | else 77 | waitkey = 0; 78 | 79 | pkt.dts = AV_NOPTS_VALUE; 80 | pkt.pts = AV_NOPTS_VALUE; 81 | 82 | // av_write_frame( fc, &pkt ); 83 | av_interleaved_write_frame( fc, &pkt ); 84 | } 85 | 86 | void destroy() 87 | { 88 | waitkey = 1; 89 | vi = -1; 90 | 91 | if ( !fc ) 92 | return; 93 | 94 | _M; av_write_trailer( fc ); 95 | 96 | if ( fc->oformat && !( fc->oformat->flags & AVFMT_NOFILE ) && fc->pb ) 97 | avio_close( fc->pb ); 98 | 99 | // Free the stream 100 | _M; av_free( fc ); 101 | 102 | fc = 0; 103 | _M; 104 | } 105 | 106 | int get_nal_type( void *p, int len ) 107 | { 108 | if ( !p || 5 >= len ) 109 | return -1; 110 | 111 | unsigned char *b = (unsigned char*)p; 112 | 113 | // Verify NAL marker 114 | if ( b[ 0 ] || b[ 1 ] || 0x01 != b[ 2 ] ) 115 | { b++; 116 | if ( b[ 0 ] || b[ 1 ] || 0x01 != b[ 2 ] ) 117 | return -1; 118 | } // end if 119 | 120 | b += 3; 121 | 122 | return *b; 123 | } 124 | 125 | int create( void *p, int len ) 126 | { 127 | if ( 0x27 != (0x3F & get_nal_type( p, len )) ) 128 | { 129 | fprintf(stderr, "get_nal_type is not 0x27, exiting\n" ); 130 | return -1; 131 | } 132 | 133 | destroy(); 134 | 135 | const char *file = "test.avi"; 136 | AVCodecID codec_id = CODEC_ID_H264; 137 | // CodecID codec_id = CODEC_ID_MPEG4; 138 | int br = 1000000; 139 | int w = 640; //1280; //480; 140 | int h = 368; //720; //354; 141 | int fps = 30; //15; 142 | 143 | // Create container 144 | _M; AVOutputFormat *of = av_guess_format( 0, file, 0 ); 145 | fc = avformat_alloc_context(); 146 | fc->oformat = of; 147 | strcpy( fc->filename, file ); 148 | 149 | // Add video stream 150 | _M; AVStream *pst = av_new_stream( fc, 0 ); 151 | vi = pst->index; 152 | 153 | AVCodecContext *pcc = pst->codec; 154 | _M; avcodec_get_context_defaults3( pcc, NULL ); 155 | pcc->codec_type = AVMEDIA_TYPE_VIDEO; 156 | 157 | pcc->codec_id = codec_id; 158 | pcc->bit_rate = br; 159 | pcc->width = w; 160 | pcc->height = h; 161 | pcc->time_base.num = 1; 162 | pcc->time_base.den = fps; 163 | 164 | // Init container 165 | //UNRESOLVED!!! _M; av_set_parameters( fc, 0 ); 166 | 167 | if ( !( fc->oformat->flags & AVFMT_NOFILE ) ) 168 | avio_open( &fc->pb, fc->filename, AVIO_FLAG_WRITE ); 169 | 170 | _M; avformat_write_header( fc, NULL ); 171 | 172 | _M; return 1; 173 | } 174 | 175 | int main( int argc, char** argv ) 176 | { 177 | int f = 0, sz = 0; 178 | char fname[ 256 ] = { 0 }; 179 | char buf[ 128 * 1024 ]; 180 | 181 | av_log_set_level( AV_LOG_ERROR ); 182 | av_register_all(); 183 | 184 | do 185 | { 186 | // Raw frames in v0.raw, v1.raw, v2.raw, ... 187 | // sprintf( fname, "rawvideo/v%lu.raw", f++ ); 188 | sprintf( fname, "frames/frame%04lu.bin", f++ ); 189 | printf( "%s\n", fname ); 190 | 191 | FILE *fd = fopen( fname, "rb" ); 192 | if ( !fd ) 193 | sz = 0; 194 | else 195 | { 196 | sz = fread( buf, 1, sizeof( buf ) - FF_INPUT_BUFFER_PADDING_SIZE, fd ); 197 | if ( 0 < sz ) 198 | { 199 | memset( &buf[ sz ], 0, FF_INPUT_BUFFER_PADDING_SIZE ); 200 | 201 | if ( !fc ) 202 | create( buf, sz ); 203 | 204 | if ( fc ) 205 | write_frame( buf, sz ); 206 | 207 | } // end if 208 | 209 | fclose( fd ); 210 | 211 | } // end else 212 | 213 | } while ( 0 < sz ); 214 | 215 | destroy(); 216 | } 217 | -------------------------------------------------------------------------------- /cvideo/cvideo.cpp: -------------------------------------------------------------------------------- 1 | #include "Python.h" 2 | #include "numpy/arrayobject.h" 3 | 4 | extern "C" { 5 | #include 6 | #include 7 | #include 8 | } 9 | 10 | int not_image(PyArrayObject *vec) { 11 | if (vec->descr->type_num != NPY_UINT8 || vec->nd != 3) { 12 | PyErr_SetString(PyExc_ValueError, 13 | "In not_image: array must be of type uint8 and 3 dimensional (n)."); 14 | return 1; } 15 | return 0; 16 | } 17 | 18 | 19 | // GLOBALS FOR NOW 20 | AVCodec *codecH264, *codecMPEG4; 21 | AVCodecContext *cH264, *cMPEG4; 22 | int got_picture; 23 | AVFrame *picture; 24 | AVFrame *pictureRGB; 25 | AVPacket avpkt; 26 | struct SwsContext *img_convert_ctx; 27 | int srcX,srcY,dstX,dstY; 28 | uint8_t*bufferBGR; 29 | 30 | static PyObject *init(PyObject *self, PyObject *args) 31 | { 32 | // if (!PyArg_ParseTuple(args, "i", &g_myInt)) return NULL; 33 | 34 | avcodec_register_all(); 35 | av_init_packet(&avpkt); 36 | 37 | codecH264 = avcodec_find_decoder(CODEC_ID_H264); 38 | codecMPEG4 = avcodec_find_decoder(CODEC_ID_MPEG4); 39 | 40 | if (!codecH264 || !codecMPEG4) 41 | { 42 | fprintf(stderr, "codec not found\n"); 43 | return Py_BuildValue( "i", 0 ); 44 | } 45 | cH264 = avcodec_alloc_context3(codecH264); 46 | cMPEG4 = avcodec_alloc_context3(codecMPEG4); 47 | picture = avcodec_alloc_frame(); 48 | pictureRGB = avcodec_alloc_frame(); 49 | 50 | srcX=dstX = 640; 51 | srcY=dstY = 368; 52 | 53 | img_convert_ctx = sws_getContext(srcX, srcY,PIX_FMT_YUV420P, dstX, dstY, PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL); 54 | if (avcodec_open2(cH264, codecH264, NULL) < 0) { 55 | fprintf(stderr, "could not open codec H264\n"); 56 | return Py_BuildValue( "i", 0 ); 57 | } 58 | if (avcodec_open2(cMPEG4, codecMPEG4, NULL) < 0) { 59 | fprintf(stderr, "could not open codec MPEG\n"); 60 | return Py_BuildValue( "i", 0 ); 61 | } 62 | 63 | bufferBGR = (uint8_t*)av_mallocz(avpicture_get_size(PIX_FMT_BGR24, dstX, dstY) * sizeof(uint8_t)*10); 64 | avpicture_fill((AVPicture *)pictureRGB,bufferBGR,PIX_FMT_RGB24, dstX, dstY); 65 | 66 | return Py_BuildValue( "i", 1 ); 67 | } 68 | 69 | static PyObject *frame(PyObject *self, PyObject *args) 70 | { 71 | PyArrayObject *numpyImg; 72 | int width, height; 73 | int type; 74 | 75 | uint8_t* buf = 0; 76 | int len; 77 | if (!PyArg_ParseTuple(args, "O!is#", &PyArray_Type, &numpyImg, &type, &buf, &len)) return NULL; 78 | if (NULL == numpyImg) return NULL; 79 | if (not_image(numpyImg)) return NULL; 80 | 81 | height = numpyImg->dimensions[0]; 82 | width = numpyImg->dimensions[1]; 83 | 84 | if( width != srcX || height != srcY ) 85 | { 86 | srcX = dstX = width; 87 | srcY = dstY = height; 88 | 89 | av_freep( bufferBGR ); 90 | bufferBGR = (uint8_t*)av_mallocz(avpicture_get_size(PIX_FMT_BGR24, dstX, dstY) * sizeof(uint8_t)*10); 91 | avpicture_fill((AVPicture *)pictureRGB,bufferBGR,PIX_FMT_RGB24, dstX, dstY); 92 | 93 | sws_freeContext(img_convert_ctx); 94 | img_convert_ctx = sws_getContext(srcX, srcY,PIX_FMT_YUV420P, dstX, dstY, PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL); 95 | } 96 | 97 | avpkt.size = len; 98 | avpkt.data = buf; 99 | avpkt.flags = 0; 100 | if (type == 1) avpkt.flags=AV_PKT_FLAG_KEY; 101 | 102 | AVCodecContext *cSelected; // not allocated, just temporary pointer 103 | 104 | // if( pave.video_codec == 4 ) 105 | cSelected = cH264; 106 | //else 107 | // cSelected = cMPEG4; 108 | avcodec_decode_video2(cSelected, picture, &got_picture, &avpkt); 109 | 110 | if (got_picture > 0) 111 | { 112 | sws_scale(img_convert_ctx,picture->data, picture->linesize, 0, srcY-8,pictureRGB->data,pictureRGB->linesize); 113 | memcpy(numpyImg->data, pictureRGB->data[0], cSelected->width * ((cSelected->height == 368) ? 360 : cSelected->height) * sizeof(uint8_t) * 3); 114 | return Py_BuildValue( "i", 1 ); 115 | } 116 | return Py_BuildValue( "i", 0 ); 117 | } 118 | 119 | 120 | static PyMethodDef cvideoMethods[] = { 121 | {"init", init, METH_VARARGS}, 122 | {"frame", frame, METH_VARARGS}, 123 | {NULL, NULL} /* Sentinel - marks the end of this structure */ 124 | }; 125 | 126 | extern "C" void initcvideo() 127 | { 128 | (void) Py_InitModule("cvideo", cvideoMethods); 129 | import_array(); // Must be present for NumPy. Called first after above line. 130 | } 131 | -------------------------------------------------------------------------------- /cvideo/cvideo.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 10.00 3 | # Visual Studio 2008 4 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "cvideo", "cvideo.vcproj", "{FC3C2488-10C6-45F1-A04C-3B5F3F2FEE82}" 5 | EndProject 6 | Global 7 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 8 | Debug|Win32 = Debug|Win32 9 | Release|Win32 = Release|Win32 10 | EndGlobalSection 11 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 12 | {FC3C2488-10C6-45F1-A04C-3B5F3F2FEE82}.Debug|Win32.ActiveCfg = Debug|Win32 13 | {FC3C2488-10C6-45F1-A04C-3B5F3F2FEE82}.Debug|Win32.Build.0 = Debug|Win32 14 | {FC3C2488-10C6-45F1-A04C-3B5F3F2FEE82}.Release|Win32.ActiveCfg = Release|Win32 15 | {FC3C2488-10C6-45F1-A04C-3B5F3F2FEE82}.Release|Win32.Build.0 = Release|Win32 16 | EndGlobalSection 17 | GlobalSection(SolutionProperties) = preSolution 18 | HideSolutionNode = FALSE 19 | EndGlobalSection 20 | EndGlobal 21 | -------------------------------------------------------------------------------- /cvideo/cvideo.vcproj: -------------------------------------------------------------------------------- 1 | 2 | 11 | 12 | 15 | 16 | 17 | 18 | 19 | 26 | 29 | 32 | 35 | 38 | 41 | 53 | 56 | 59 | 62 | 70 | 73 | 76 | 79 | 82 | 85 | 88 | 91 | 92 | 100 | 103 | 106 | 109 | 112 | 115 | 126 | 129 | 132 | 135 | 144 | 147 | 150 | 153 | 156 | 159 | 162 | 165 | 166 | 167 | 168 | 169 | 170 | 175 | 178 | 179 | 180 | 185 | 186 | 191 | 192 | 193 | 194 | 195 | 196 | -------------------------------------------------------------------------------- /cvideo/main.cpp: -------------------------------------------------------------------------------- 1 | extern "C" { 2 | #include 3 | #include 4 | } 5 | 6 | // OpenCV 7 | #include 8 | #include // Video write 9 | 10 | using namespace cv; 11 | 12 | 13 | #define INBUF_SIZE 4096 14 | 15 | typedef unsigned char uint8_t; 16 | typedef unsigned short uint16_t; 17 | typedef unsigned int uint32_t; 18 | 19 | #pragma pack(push,1) 20 | typedef struct { 21 | uint8_t signature[4]; /* "PaVE" - used to identify the start of 22 | frame */ 23 | uint8_t version; /* Version code */ 24 | uint8_t video_codec; /* Codec of the following frame */ 25 | uint16_t header_size; /* Size of the parrot_video_encapsulation_t 26 | */ 27 | uint32_t payload_size; /* Amount of data following this PaVE */ 28 | uint16_t encoded_stream_width; /* ex: 640 */ 29 | uint16_t encoded_stream_height; /* ex: 368 */ 30 | uint16_t display_width; /* ex: 640 */ 31 | uint16_t display_height; /* ex: 360 */ 32 | uint32_t frame_number; /* Frame position inside the current stream 33 | */ 34 | uint32_t timestamp; /* In milliseconds */ 35 | uint8_t total_chuncks; /* Number of UDP packets containing the 36 | current decodable payload - currently unused */ 37 | uint8_t chunck_index ; /* Position of the packet - first chunk is #0 38 | - currenty unused*/ 39 | uint8_t frame_type; /* I-frame, P-frame - 40 | parrot_video_encapsulation_frametypes_t */ 41 | uint8_t control; /* Special commands like end-of-stream or 42 | advertised frames */ 43 | uint32_t stream_byte_position_lw; /* Byte position of the current payload in 44 | the encoded stream - lower 32-bit word */ 45 | uint32_t stream_byte_position_uw; /* Byte position of the current payload in 46 | the encoded stream - upper 32-bit word */ 47 | uint16_t stream_id; /* This ID indentifies packets that should be 48 | recorded together */ 49 | uint8_t total_slices; /* number of slices composing the current 50 | frame */ 51 | uint8_t slice_index ; /* position of the current slice in the frame 52 | */ 53 | uint8_t header1_size; /* H.264 only : size of SPS inside payload - 54 | no SPS present if value is zero */ 55 | uint8_t header2_size; /* H.264 only : size of PPS inside payload - 56 | no PPS present if value is zero */ 57 | uint8_t reserved2[2]; /* Padding to align on 48 bytes */ 58 | uint32_t advertised_size; /* Size of frames announced as advertised 59 | frames */ 60 | uint8_t reserved3[12]; /* Padding to align on 64 bytes */ 61 | uint32_t md_hack_68; 62 | } /*__attribute__ ((packed))*/ parrot_video_encapsulation_t; 63 | #pragma pack(pop) 64 | 65 | int main(int argc, char **argv) 66 | { 67 | bool saveVideo = false; 68 | if( argc < 2 ) 69 | { 70 | fprintf(stderr, "Missing filename\n"); 71 | exit(-1); 72 | } 73 | parrot_video_encapsulation_t pave; 74 | int h264MoveIndex = -1; 75 | 76 | AVCodec *codecH264, *codecMPEG4; 77 | AVCodecContext *cH264, *cMPEG4; 78 | int got_picture; 79 | AVFrame *picture; 80 | AVFrame *pictureRGB; 81 | AVPacket avpkt; 82 | struct SwsContext *img_convert_ctx; 83 | int srcX,srcY,dstX,dstY; 84 | 85 | avcodec_register_all(); 86 | av_init_packet(&avpkt); 87 | 88 | codecH264 = avcodec_find_decoder(CODEC_ID_H264); 89 | codecMPEG4 = avcodec_find_decoder(CODEC_ID_MPEG4); 90 | 91 | if (!codecH264 || !codecMPEG4) 92 | { 93 | fprintf(stderr, "codec not found\n"); 94 | exit(1); 95 | } 96 | cH264 = avcodec_alloc_context3(codecH264); 97 | cMPEG4 = avcodec_alloc_context3(codecMPEG4); 98 | picture = avcodec_alloc_frame(); 99 | pictureRGB = avcodec_alloc_frame(); 100 | 101 | FILE *fd = fopen( argv[1], "rb" ); 102 | if( fd == NULL ) 103 | return -1; 104 | 105 | // CODEC 106 | union { int v; char c[5];} uEx ; 107 | uEx.c[0] = 'H'; 108 | uEx.c[1] = '2'; 109 | uEx.c[2] = '6'; 110 | uEx.c[3] = '4'; 111 | uEx.c[4]='\0'; 112 | 113 | VideoWriter outputVideo; 114 | if( saveVideo ) 115 | { 116 | Size S = Size( 640, 360 ); 117 | // outputVideo.open(NAME , ex, inputVideo.get(CV_CAP_PROP_FPS),S, true); 118 | const string source = argv[1]; // the source file name 119 | string::size_type pAt = source.find_last_of('.'); // Find extension point 120 | // const string NAME = string(argv[1])+string(".avi"); 121 | const string NAME = source.substr(0, pAt) + ".avi"; // Form the new name with container 122 | // outputVideo.open( "output.avi" , uEx.v, 30,S, true); 123 | outputVideo.open( "output.avi" , -1, 30,S, true); 124 | } 125 | 126 | srcX=dstX = 640; 127 | srcY=dstY = 368; 128 | 129 | img_convert_ctx = sws_getContext(srcX, srcY,PIX_FMT_YUV420P, dstX, dstY, PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL); 130 | if (avcodec_open2(cH264, codecH264, NULL) < 0) { 131 | fprintf(stderr, "could not open codec H264\n"); 132 | exit(1); 133 | } 134 | if (avcodec_open2(cMPEG4, codecMPEG4, NULL) < 0) { 135 | fprintf(stderr, "could not open codec MPEG\n"); 136 | exit(1); 137 | } 138 | 139 | uint8_t*bufferBGR = (uint8_t*)av_mallocz(avpicture_get_size(PIX_FMT_BGR24, dstX, dstY) * sizeof(uint8_t)*10); 140 | avpicture_fill((AVPicture *)pictureRGB,bufferBGR,PIX_FMT_RGB24, dstX, dstY); 141 | 142 | 143 | IplImage *img; 144 | img = cvCreateImage(cvSize(640,360), IPL_DEPTH_8U, 3); // c is not set yet!!! 145 | if (!img) 146 | { 147 | return -2; 148 | } 149 | 150 | // Clear the image 151 | cvZero(img); 152 | 153 | uint8_t* buf = 0; 154 | int len; 155 | int type; 156 | 157 | for(;;) 158 | { 159 | len = fread(&pave, 1, sizeof(parrot_video_encapsulation_t), fd); // hack header 160 | if( len == 0 ) 161 | break; 162 | 163 | buf = (uint8_t*)malloc( pave.payload_size ); 164 | 165 | // every firmware has a little bit different header - it is necessary to parse header.size 166 | if( len < pave.header_size ) 167 | fread(buf, 1, pave.header_size-len, fd ); 168 | 169 | len = fread(buf, 1, pave.payload_size, fd); 170 | if( len < (int)pave.payload_size ) 171 | break; 172 | 173 | 174 | type = pave.frame_type; 175 | 176 | if( pave.encoded_stream_width != srcX || pave.encoded_stream_height != srcY ) 177 | { 178 | srcX = dstX = pave.display_width; 179 | srcY = dstY = pave.display_height; 180 | cvReleaseImage( &img ); 181 | img = cvCreateImage(cvSize(srcX,srcY), IPL_DEPTH_8U, 3); // c is not set yet!!! 182 | if( !img ) 183 | return -2; 184 | cvZero(img); 185 | 186 | av_freep( bufferBGR ); 187 | bufferBGR = (uint8_t*)av_mallocz(avpicture_get_size(PIX_FMT_BGR24, dstX, dstY) * sizeof(uint8_t)*10); 188 | avpicture_fill((AVPicture *)pictureRGB,bufferBGR,PIX_FMT_RGB24, dstX, dstY); 189 | 190 | sws_freeContext(img_convert_ctx); 191 | img_convert_ctx = sws_getContext(srcX, srcY,PIX_FMT_YUV420P, dstX, dstY, PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL); 192 | } 193 | 194 | avpkt.size = len; 195 | avpkt.data = buf; 196 | avpkt.flags = 0; 197 | if (type == 1) avpkt.flags=AV_PKT_FLAG_KEY; 198 | 199 | AVCodecContext *cSelected; // not allocated, just temporary pointer 200 | 201 | if( pave.video_codec == 4 ) 202 | cSelected = cH264; 203 | else 204 | cSelected = cMPEG4; 205 | avcodec_decode_video2(cSelected, picture, &got_picture, &avpkt); 206 | 207 | if (got_picture > 0) 208 | { 209 | sws_scale(img_convert_ctx,picture->data, picture->linesize, 0, srcY-8,pictureRGB->data,pictureRGB->linesize); 210 | } 211 | fprintf( stderr, "TIMESTAMP\t%d\n", pave.timestamp ); 212 | 213 | memcpy(img->imageData, pictureRGB->data[0], cSelected->width * ((cSelected->height == 368) ? 360 : cSelected->height) * sizeof(uint8_t) * 3); 214 | 215 | if( h264MoveIndex < 0 && pave.frame_type == 1 ) 216 | h264MoveIndex = 0; // init after first I-frame 217 | 218 | if( h264MoveIndex >=0 && pave.frame_type == 3 ) 219 | h264MoveIndex++; 220 | 221 | if( saveVideo ) 222 | outputVideo.write( img ); 223 | 224 | cvShowImage( "camera", img ); 225 | // int key = cvWaitKey(100); 226 | int key = cvWaitKey(1); 227 | if( true ) //key == 'p' || key == 's' || (h264MoveIndex % 3 == 2) ) 228 | { 229 | char namebuf[256]; 230 | // sprintf( namebuf, "img_%d.jpg", pave.timestamp ); 231 | sprintf( namebuf, "img_%04d.jpg", h264MoveIndex ); 232 | cvSaveImage( namebuf, img ); 233 | } 234 | if( key == 27 || key == 'q' ) 235 | break; 236 | free(buf); 237 | } 238 | fclose(fd); 239 | cvReleaseImage(&img); 240 | 241 | sws_freeContext(img_convert_ctx); 242 | avcodec_close(cH264); 243 | avcodec_close(cMPEG4); 244 | av_free(cH264); 245 | av_free(cMPEG4); 246 | av_free(picture); 247 | 248 | return 0; 249 | } 250 | -------------------------------------------------------------------------------- /cvideo/setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup, Extension 2 | 3 | module1 = Extension('cvideo', 4 | sources = ['cvideo.cpp'], 5 | include_dirs = [ r'c:\Python27\Lib\site-packages\numpy\core\include', 6 | r'm:\git\cvdrone\src\3rdparty\ffmpeg\include', 7 | "/usr/lib/python2.7/dist-packages/numpy/core/include", 8 | "/usr/include" ], 9 | define_macros = [ ('__STDC_CONSTANT_MACROS', None) ], 10 | library_dirs = [ r'm:\git\cvdrone\src\3rdparty\ffmpeg\lib' ], 11 | libraries = [ 'avcodec', 'avutil', 'swscale' ], 12 | ) 13 | 14 | setup (name = 'CVideoPackage', 15 | version = '0.1', 16 | description = 'C-video frame by frame reader', 17 | ext_modules = [module1]) 18 | 19 | -------------------------------------------------------------------------------- /cvideo/test.py: -------------------------------------------------------------------------------- 1 | import cvideo 2 | 3 | import cv2 4 | import numpy as np 5 | 6 | import sys 7 | sys.path.append( ".." ) 8 | from pave import PaVE, isIFrame, frameEncodedWidth, frameEncodedHeight 9 | 10 | print cvideo.init() 11 | 12 | img = np.zeros([720,1280,3], dtype=np.uint8) 13 | missingIFrame = True 14 | filename = sys.argv[1] 15 | pave = PaVE() 16 | pave.append( open( filename, "rb" ).read() ) 17 | header,payload = pave.extract() 18 | while len(header) > 0: 19 | w,h = frameEncodedWidth(header), frameEncodedHeight(header) 20 | if img.shape[0] != h or img.shape[1] != w: 21 | print img.shape, (w,h) 22 | img = np.zeros([h,w,3], dtype=np.uint8) 23 | missingIFrame = missingIFrame and not isIFrame(header) 24 | if not missingIFrame: 25 | assert cvideo.frame( img, isIFrame(header) and 1 or 0, payload ) 26 | cv2.imshow('image', img) 27 | if cv2.waitKey(1) & 0xFF == ord('q'): 28 | break 29 | header,payload = pave.extract() 30 | 31 | -------------------------------------------------------------------------------- /fre_drone.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Field Robot Event 2014 (ver0 - autonomous landing on heliport sign) 4 | usage: 5 | ./fre_drone.py 6 | """ 7 | import sys 8 | import datetime 9 | import multiprocessing 10 | import cv2 11 | import math 12 | import numpy as np 13 | 14 | from pave import PaVE, isIFrame, frameNumber, timestamp, correctTimePeriod, frameEncodedWidth, frameEncodedHeight 15 | 16 | from sourcelogger import SourceLogger 17 | from ardrone2 import ARDrone2, ManualControlException, manualControl, normalizeAnglePIPI, distance 18 | import viewlog 19 | from line import Line 20 | from pose import Pose 21 | 22 | from green import processAvoidGreen 23 | import cvideo 24 | 25 | #from rr_drone import downloadOldVideo 26 | MAX_ALLOWED_SPEED = 0.8 27 | 28 | ROW_WIDTH = 0.75 #0.4 29 | 30 | ###### -------------------- ready for major refactoring ------------------------- 31 | 32 | def timeName( prefix, ext ): 33 | dt = datetime.datetime.now() 34 | filename = prefix + dt.strftime("%y%m%d_%H%M%S.") + ext 35 | return filename 36 | 37 | g_pave = None 38 | g_img = None 39 | 40 | 41 | def wrapper( packet ): 42 | global g_pave 43 | global g_img 44 | if g_pave == None: 45 | g_pave = PaVE() 46 | cvideo.init() 47 | g_img = np.zeros([360,640,3], dtype=np.uint8) 48 | g_pave.append( packet ) 49 | header,payload = g_pave.extract() 50 | while payload: 51 | if isIFrame( header ): 52 | w,h = frameEncodedWidth(header), frameEncodedHeight(header) 53 | if g_img.shape[0] != h or g_img.shape[1] != w: 54 | print g_img.shape, (w,h) 55 | g_img = np.zeros([h,w,3], dtype=np.uint8) 56 | ret = cvideo.frame( g_img, isIFrame(header) and 1 or 0, payload ) 57 | frame = g_img 58 | assert ret 59 | if ret: 60 | result = processAvoidGreen( frame, debug=False ) 61 | return (frameNumber( header ), timestamp(header)), result 62 | header,payload = g_pave.extract() 63 | 64 | g_queueResults = multiprocessing.Queue() 65 | 66 | def getOrNone(): 67 | if g_queueResults.empty(): 68 | return None 69 | return g_queueResults.get() 70 | 71 | 72 | ###### -------------------------------------------------------------------------- 73 | 74 | 75 | 76 | # TODO for bottom cammera (!) 77 | def project2plane( imgCoord, coord, height, heading, angleFB, angleLR ): 78 | FOW = math.radians(70) 79 | EPS = 0.0001 80 | x,y = imgCoord[0]-1280/2, 720/2-imgCoord[1] 81 | angleLR = -angleLR # we want to compensate the turn 82 | x,y = x*math.cos(angleLR)-y*math.sin(angleLR), y*math.cos(angleLR)+x*math.sin(angleLR) 83 | h = -x/1280*FOW + heading 84 | tilt = y/1280*FOW + angleFB 85 | if tilt > -EPS: 86 | return None # close to 0.0 AND projection behind drone 87 | dist = height/math.tan(-tilt) 88 | return (coord[0]+math.cos(h)*dist , coord[1]+math.sin(h)*dist) 89 | 90 | 91 | 92 | class FieldRobotDrone( ARDrone2 ): 93 | def __init__( self, replayLog=None, speed = 0.2, skipConfigure=False, metaLog=None, console=None ): 94 | self.loggedVideoResult = None 95 | self.lastImageResult = None 96 | self.videoHighResolution = False 97 | ARDrone2.__init__( self, replayLog, speed, skipConfigure, metaLog, console ) 98 | if replayLog == None: 99 | name = timeName( "logs/src_cv2_", "log" ) 100 | metaLog.write("cv2: "+name+'\n' ) 101 | self.loggedVideoResult = SourceLogger( getOrNone, name ).get 102 | self.startVideo( wrapper, g_queueResults, record=True, highResolution=self.videoHighResolution ) 103 | else: 104 | assert metaLog 105 | self.loggedVideoResult = SourceLogger( None, metaLog.getLog("cv2:") ).get 106 | self.startVideo( record=True, highResolution=self.videoHighResolution ) 107 | 108 | def update( self, cmd="AT*COMWDG=%i,\r" ): 109 | ARDrone2.update( self, cmd ) 110 | if self.loggedVideoResult != None: 111 | self.lastImageResult = self.loggedVideoResult() 112 | 113 | 114 | def stayAtPosition( drone, desiredHeight = 1.5, timeout = 10.0 ): 115 | maxControlGap = 0.0 116 | desiredSpeed = MAX_ALLOWED_SPEED 117 | refPoint = (0,0) 118 | foundTagTime = None 119 | searchSeq = [(0,0), (2,0), (0,-2), (-2,0), (0,2), (0,0)] 120 | startTime = drone.time 121 | sx,sy,sz,sa = 0,0,0,0 122 | stepTime = drone.time 123 | while drone.time < startTime + timeout: 124 | altitude = desiredHeight 125 | if drone.altitudeData != None: 126 | altVision = drone.altitudeData[0]/1000.0 127 | altSonar = drone.altitudeData[3]/1000.0 128 | altitude = (altSonar+altVision)/2.0 129 | # TODO selection based on history? panic when min/max too far?? 130 | if abs(altSonar-altVision) > 0.5: 131 | print altSonar, altVision 132 | altitude = max( altSonar, altVision ) # sonar is 0.0 sometimes (no ECHO) 133 | 134 | sz = max( -0.2, min( 0.2, desiredHeight - altitude )) 135 | if altitude > 2.5: 136 | # wind and "out of control" 137 | sz = max( -0.5, min( 0.5, desiredHeight - altitude )) 138 | 139 | sx = 0.0 #max( 0, min( drone.speed, desiredSpeed - drone.vx )) 140 | 141 | # tiltCompensation = Pose(desiredHeight*oldAngles[0], desiredHeight*oldAngles[1], 0) # TODO real height? 142 | # print "FRAME", frameNumber/15, "[%.1f %.1f]" % (math.degrees(oldAngles[0]), math.degrees(oldAngles[1])), 143 | 144 | # if drone.battery < 10: 145 | # print "BATTERY LOW!", drone.battery 146 | 147 | 148 | # error definition ... if you substract that you get desired position or angle 149 | # error is taken from the path point of view, x-path direction, y-positive left, angle-anticlockwise 150 | errX, errY, errA = 0.0, 0.0, 0.0 151 | # if refLine: 152 | # errY = refLine.signedDistance( drone.coord ) 153 | # errA = normalizeAnglePIPI( drone.heading - refLine.angle ) 154 | 155 | if len(drone.visionTag) > 0: 156 | SCALE = 0.17/(2*74) 157 | tagX, tagY, tagDist = drone.visionTag[0][0], drone.visionTag[0][1], drone.visionTag[0][4]/100.0 158 | tiltCompensation = Pose(tagDist*drone.angleFB, tagDist*drone.angleLR, 0) 159 | pose = Pose(drone.coord[0], drone.coord[1], drone.heading).add(tiltCompensation) 160 | offset = Pose(tagDist*(480-tagY)*SCALE, tagDist*(tagX-640)*SCALE, 0.0) 161 | pose = pose.add( offset ) 162 | refPoint = (pose.x, pose.y) 163 | if foundTagTime == None: 164 | print drone.visionTag 165 | print "%.2f\t%.2f\t%.2f\t%.1f\t%.1f" % (drone.time, tagDist*(tagY-480)*SCALE, tagDist*(tagX-640)*SCALE, math.degrees(drone.angleFB), math.degrees(drone.angleLR)) 166 | print refPoint 167 | foundTagTime = drone.time 168 | else: 169 | if foundTagTime != None and drone.time - foundTagTime > 3.0: 170 | foundTagTime = None 171 | print "LOST TAG" 172 | searchSeq = [(x+drone.coord[0], y+drone.coord[1]) for (x,y) in [(0,0), (2,0), (0,-2), (-2,0), (0,2), (0,0)]] 173 | 174 | if foundTagTime == None and len(searchSeq) > 0 and drone.time - stepTime > 3.0: 175 | refPoint = searchSeq[0] 176 | searchSeq = searchSeq[1:] 177 | print "STEP", refPoint 178 | stepTime = drone.time 179 | 180 | if refPoint: 181 | pose = Pose(drone.coord[0], drone.coord[1], drone.heading) 182 | landPose = Pose( refPoint[0], refPoint[1], drone.heading ) # ignore heading for the moment 183 | diff = pose.sub( landPose ) 184 | #print diff 185 | errX, errY = diff.x, diff.y 186 | 187 | 188 | 189 | # get the height first 190 | # if drone.coord[2] < desiredHeight - 0.1 and drone.time-startTime < 5.0: 191 | # sx = 0.0 192 | # error correction 193 | # the goal is to have errY and errA zero in 1 second -> errY defines desired speed at given distance from path 194 | sx = max( -0.2, min( 0.2, -errX-drone.vx ))/2.0 195 | sy = max( -0.2, min( 0.2, -errY-drone.vy ))/2.0 196 | 197 | # there is no drone.va (i.e. derivative of heading) available at the moment ... 198 | sa = max( -0.1, min( 0.1, -errA/2.0 ))*1.35*(desiredSpeed/0.4) # originally set for 0.4=OK 199 | 200 | # print "%0.2f\t%d\t%0.2f\t%0.2f\t%0.2f" % (errY, int(math.degrees(errA)), drone.vy, sy, sa) 201 | prevTime = drone.time 202 | drone.moveXYZA( sx, sy, sz, sa ) 203 | maxControlGap = max( drone.time - prevTime, maxControlGap ) 204 | return maxControlGap 205 | 206 | def groundPose( (x,y), (xBottom,yBottom), scale ): 207 | a = math.atan2( xBottom-x, yBottom-y ) 208 | return Pose( scale*(360/2-y), scale*(640/2-x), a ) 209 | 210 | 211 | def evalRowData( rowTopBottom ): 212 | (xL,xR),(xbL,xbR) = rowTopBottom 213 | if min(xL,xbL) > 0 and max(xR,xbR) < 640: 214 | widthTop, widthBottom = xR-xL, xbR-xbL 215 | if min(widthTop, widthBottom) > 150 and max(widthTop, widthBottom) < 350: 216 | return (xL+xR)/2, (xbL+xbR)/2 217 | 218 | def followRow( drone, desiredHeight = 1.5, timeout = 10.0 ): 219 | maxControlGap = 0.0 220 | maxVideoDelay = 0.0 221 | desiredSpeed = MAX_ALLOWED_SPEED 222 | startTime = drone.time 223 | sx,sy,sz,sa = 0,0,0,0 224 | lastUpdate = None 225 | refLine = None 226 | while drone.time < startTime + timeout: 227 | altitude = desiredHeight 228 | if drone.altitudeData != None: 229 | altVision = drone.altitudeData[0]/1000.0 230 | altSonar = drone.altitudeData[3]/1000.0 231 | altitude = (altSonar+altVision)/2.0 232 | # TODO selection based on history? panic when min/max too far?? 233 | if abs(altSonar-altVision) > 0.5: 234 | # print altSonar, altVision 235 | altitude = max( altSonar, altVision ) # sonar is 0.0 sometimes (no ECHO) 236 | sz = max( -0.2, min( 0.2, desiredHeight - altitude )) 237 | if altitude > 2.5: 238 | # wind and "out of control" 239 | sz = max( -0.5, min( 0.5, desiredHeight - altitude )) 240 | 241 | if drone.lastImageResult: 242 | lastUpdate = drone.time 243 | assert len( drone.lastImageResult ) == 2 and len( drone.lastImageResult[0] ) == 2, drone.lastImageResult 244 | (frameNumber, timestamp), rowTopBottom = drone.lastImageResult 245 | viewlog.dumpVideoFrame( frameNumber, timestamp ) 246 | # keep history small 247 | videoTime = correctTimePeriod( timestamp/1000., ref=drone.time ) 248 | videoDelay = drone.time - videoTime 249 | if videoDelay > 1.0: 250 | print "!DANGER! - video delay", videoDelay 251 | maxVideoDelay = max( videoDelay, maxVideoDelay ) 252 | toDel = 0 253 | for oldTime, oldPose, oldAngles in drone.poseHistory: 254 | toDel += 1 255 | if oldTime >= videoTime: 256 | break 257 | drone.poseHistory = drone.poseHistory[:toDel] 258 | tiltCompensation = Pose(desiredHeight*oldAngles[0], desiredHeight*oldAngles[1], 0) # TODO real height? 259 | validRow = evalRowData( rowTopBottom ) 260 | print "FRAME", frameNumber/15, "[%.1f %.1f]" % (math.degrees(oldAngles[0]), math.degrees(oldAngles[1])), validRow 261 | if validRow: 262 | sp = groundPose( *rowTopBottom, scale=ROW_WIDTH/((validRow[0]+validRow[1])/2.0)) 263 | sPose = Pose( *oldPose ).add(tiltCompensation).add( sp ) 264 | refLine = Line( (sPose.x,sPose.y), (sPose.x + math.cos(sPose.heading), sPose.y + math.sin(sPose.heading)) ) 265 | 266 | errY, errA = 0.0, 0.0 267 | if refLine: 268 | errY = refLine.signedDistance( drone.coord ) 269 | errA = normalizeAnglePIPI( drone.heading - refLine.angle ) 270 | 271 | 272 | sx = max( 0, min( drone.speed, desiredSpeed - drone.vx )) 273 | sy = max( -0.2, min( 0.2, -errY-drone.vy ))/2.0 274 | sa = max( -0.1, min( 0.1, -errA/2.0 ))*1.35*(desiredSpeed/0.4) 275 | prevTime = drone.time 276 | drone.moveXYZA( sx, sy, sz, sa ) 277 | drone.poseHistory.append( (drone.time, (drone.coord[0], drone.coord[1], drone.heading), (drone.angleFB, drone.angleLR)) ) 278 | maxControlGap = max( drone.time - prevTime, maxControlGap ) 279 | return maxControlGap 280 | 281 | 282 | def competeFieldRobot( drone, desiredHeight = 1.5 ): 283 | drone.speed = 0.1 284 | maxVideoDelay = 0.0 285 | maxControlGap = 0.0 286 | 287 | try: 288 | drone.wait(1.0) 289 | drone.setVideoChannel( front=False ) 290 | drone.takeoff() 291 | drone.poseHistory = [] 292 | 293 | print "NAVI-ON" 294 | startTime = drone.time 295 | # maxControlGap = stayAtPosition( drone, desiredHeight=desiredHeight, timeout=30.0 ) 296 | maxControlGap = followRow( drone, desiredHeight=desiredHeight, timeout=25.0 ) 297 | lastUpdate = None 298 | print "NAVI-OFF", drone.time - startTime 299 | drone.hover(0.5) 300 | drone.land() 301 | drone.setVideoChannel( front=True ) 302 | except ManualControlException, e: 303 | print "ManualControlException" 304 | if drone.ctrlState == 3: # CTRL_FLYING=3 ... i.e. stop the current motion 305 | drone.hover(0.1) 306 | drone.land() 307 | drone.wait(1.0) 308 | drone.stopVideo() 309 | # print "MaxVideoDelay", maxVideoDelay 310 | print "MaxControlGap", maxControlGap 311 | print "Battery", drone.battery 312 | 313 | 314 | if __name__ == "__main__": 315 | if len(sys.argv) > 2 and sys.argv[1] == "img": 316 | imgmain( sys.argv[1:], processFrame ) 317 | sys.exit( 0 ) 318 | import launcher 319 | launcher.launch( sys.argv, FieldRobotDrone, competeFieldRobot ) 320 | 321 | -------------------------------------------------------------------------------- /green.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Ultra slow conversion to GREEN pixels (exercise to do it in C instead) 4 | 5 | ./green.py [|] 6 | """ 7 | import sys 8 | import cv2 9 | import cimg 10 | 11 | from airrace import main as imgmain # image debugging TODO move to launcher 12 | from airrace import saveIndexedImage 13 | 14 | def processGreenVer0( frame, debug=False ): 15 | for y in xrange(frame.shape[0]): 16 | for x in xrange(frame.shape[1]): 17 | b,g,r = frame[y][x] 18 | if g > 10 and g > 1.1*r and g > 1.1*b: 19 | frame[y][x] = (0, 255, 0) 20 | cv2.imshow('image', frame) 21 | 22 | def processGreen( frame, debug=False ): 23 | cimg.green( frame, 1.1 ) 24 | cv2.imshow('image', frame) 25 | 26 | """ 27 | Avoid Green 28 | - for given rectangle where direction is defined by fromX to toX count green pixels until limit is reached 29 | """ 30 | def avoidGreen0( frame, fromX, toX, fromY, toY, limit ): 31 | # code to be realized in C 32 | count = 0 33 | step = 1 if fromX < toX else -1 34 | for x in xrange(fromX, toX, step): 35 | for y in xrange(fromY, toY): 36 | b,g,r = frame[y][x] 37 | if g > 10 and g > 1.1*r and g > 1.1*b: 38 | frame[y][x] = (0, 255, 0) 39 | count +=1 40 | else: 41 | frame[y][x] = (255, 255, 255) 42 | if count > limit: 43 | break 44 | return x 45 | 46 | def avoidGreen( frame, fromX, toX, fromY, toY, limit ): 47 | "numpy implementation" 48 | return cimg.avoidGreen( frame, fromX, toX, fromY, toY, limit, 1.1 ) 49 | 50 | def stripLeftRight( frame, fromY, toY, limit ): 51 | width = frame.shape[1] 52 | xL = avoidGreen( frame, width/2, 0, fromY, toY, limit=limit ) 53 | xR = avoidGreen( frame, width/2, width, fromY, toY, limit=limit ) 54 | return xL,xR 55 | 56 | 57 | def processAvoidGreen( frame, debug=False ): 58 | height, width, colors = frame.shape 59 | offset = 0 60 | stripWidth = 70 61 | limit = 120 62 | topLR = stripLeftRight( frame, offset, offset+stripWidth, limit=limit ) 63 | bottomLR = stripLeftRight( frame, height-offset-stripWidth, height-offset, limit=limit ) 64 | if debug: 65 | cv2.imshow('image', frame) 66 | saveIndexedImage( frame ) 67 | return topLR, bottomLR 68 | 69 | if __name__ == "__main__": 70 | imgmain( sys.argv, processAvoidGreen ) #processGreen ) 71 | 72 | -------------------------------------------------------------------------------- /guide/introduction.md: -------------------------------------------------------------------------------- 1 | Flying autonomous Parrot AR Drone 2 with Python 2 | 3 | Parrot AR Drone 2 is not only toy to play with your iPhone or Android 4 | phone/tablet. Thanks to public API you can also control it from your program. 5 | In this short course we will show you how to do that in Python programming 6 | language. Our goal is autonomous behavior so beside flying and programming you 7 | may also learn how to develop autonomous robot. 8 | 9 | 10 | -------------------------------------------------------------------------------- /guide/lesson1.md: -------------------------------------------------------------------------------- 1 | Flying autonomous Parrot AR Drone 2 with Python 2 | 3 | Lesson 1 - Before you takeoff 4 | 5 | In this revised lesson we will test communication with your drone via basic functions provided by Parrot ARDrone2 6 | Wi-Fi API. Power on the AR Drone 2 and connect to it. The drone acts as Access 7 | Point with DHCP - your notebook should get IP like 192.168.1.2. 8 | 9 | In this first lesson we will not fly. For all further lessons make 10 | sure that you have enough space available, there is no wind etc. Also always 11 | fly with indoor protective cover even if you fly autonomously outside. 12 | 13 | Code: 14 | 15 | import sys 16 | sys.path.append('..') # access to drone source without installation 17 | 18 | To simplify "installation" we added these two lines to all lessons. Then you do 19 | not need to copy any files or set PYTHONPATH. All you need is _git checkout_ 20 | and test lessons one by one in _guide directory_. 21 | 22 | from ardrone2 import ARDrone2 23 | 24 | This line will import class "ARDrone2". 25 | 26 | drone = ARDrone2() 27 | 28 | Here is hidden all creation work. "drone" is now instance of class ARDrone2, 29 | with initialized connection, starting log files, setting default values for all 30 | internal variables. 31 | 32 | So what we can do if flying is too dangerous for the very first lesson? We can wait ;-) 33 | 34 | drone.wait(10.0) # active waiting for 10 seconds 35 | 36 | This is _active waiting_ so you talk to your drone (200 commands per second) 37 | and you collect current drone status. Here are two example what you can be 38 | interested in ... battery and coordinates. 39 | 40 | print drone.battery 41 | print drone.coord # X, Y, Z coordinate 42 | 43 | The battery status is reported as percentage. It is useful to print it both 44 | before and after flight. If it is too low (15%?) the drone will not take off 45 | any more and you can quickly see why. 46 | 47 | The "drone.coord" are in metres relative to start which is (0,0,0). I have not 48 | test that but I would be curious what will happen if you move the drone 49 | withing 10 seconds of waiting time. Will the coordinates change? 50 | 51 | Finally try to press any key during the test of lesson 1. The program should 52 | terminate and write that you did not handle _ManualControlException_ ... do not 53 | worry, we will fix it in the next lesson. 54 | 55 | -------------------------------------------------------------------------------- /guide/lesson1.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lesson 1 - before you takeoff ... 3 | """ 4 | import sys 5 | import os 6 | import inspect 7 | ARDRONE2_ROOT = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],".."))) 8 | if ARDRONE2_ROOT not in sys.path: 9 | sys.path.insert(0, ARDRONE2_ROOT) # access to drone source without installation 10 | 11 | from ardrone2 import ARDrone2 12 | 13 | drone = ARDrone2() 14 | 15 | drone.wait(10.0) # active waiting for 10 seconds 16 | print drone.battery 17 | print drone.coord # X, Y, Z coordinate 18 | 19 | -------------------------------------------------------------------------------- /guide/lesson2.md: -------------------------------------------------------------------------------- 1 | Flying autonomous Parrot AR Drone 2 with Python 2 | 3 | Lesson 2 - Manual emergency stop and basic functions 4 | 5 | We have revised flying in Lesson 1. It was the simplest possible code, but 6 | let's face it, it was also a dangerous one. Why? What if anything goes wrong? 7 | What will you do? What drone will do? Now is the "flying code" moved to this 8 | lesson 2. 9 | 10 | First we will introduce "ManualControlException". As soon as you press 11 | any key the normal code execution will switch to "manual" and drone will land. 12 | 13 | Note about differences in used operating systems. In Windows you can use 14 | "msvcrt.kbhit" which is not available in Linux. Workaround is usage of extra 15 | library (PyGame), which provides simple interface for keyboard scanning. 16 | 17 | 18 | Code: 19 | 20 | from ardrone2 import ARDrone2 21 | drone = ARDrone2() 22 | 23 | These bits we already know from the Lesson 1. So we have "drone" ready to 24 | accept commands and you can read current status in internal variables. 25 | 26 | drone.takeoff() 27 | 28 | Parrot provides automatic takeoff (you can have a look in the "ardrone2.py" 29 | source code and see that it is shortcut for repeated AT command "AT*REF=%i, 30 | 0b10001010101000000001000000000") until drone status changes to state 31 | "flying". 32 | 33 | drone.hover( 3.0 ) 34 | 35 | Hovering is state, when the drone is keeping the same coordinate. This is 36 | mostly safe function except moments when the drone pushes against some obstacle. 37 | The it does not have a space how to compensate the tilt and you can easily 38 | brake it (as I did my first drone when I was testing in a very small room). 39 | Make sure you have enough space or rather jump to Lesson 2, where you have 40 | some kind of "emergency stop". 41 | 42 | The parameter for hovering is time in seconds (we try to keep metric units if 43 | possible, i.e. seconds, meters and angles in radians). 44 | 45 | drone.land() 46 | 47 | The last step should be always to land. The command is similar to take off 48 | ("AT*REF") except it sets different flags and waits for changing state to 49 | "landed". 50 | 51 | ---------------------- 52 | 53 | Example of things !!NOT DO DO!! 54 | 55 | 2014-06-23: 56 | ... I just successfully completed lesson 1, against your advice inside my small office ;-). 57 | The drone managed to take off, hover for three seconds just below and somewhat against the ceiling, and then it landed safely. 58 | ... 59 | 60 | DO NOT DO THAT! ... you can easily break your new toy! Believe me, I have done that (two years ago)! 61 | 62 | -------------------------------------------------------------------------------- /guide/lesson2.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lesson 2 - manual emergency stop 3 | """ 4 | import sys 5 | sys.path.append('..') # access to drone source without installation 6 | from ardrone2 import ARDrone2, ManualControlException 7 | 8 | drone = ARDrone2() 9 | try: 10 | drone.takeoff() 11 | drone.hover(3.0) 12 | except ManualControlException, e: 13 | print "ManualControlException" 14 | drone.land() 15 | 16 | # vim: expandtab sw=4 ts=4 17 | 18 | -------------------------------------------------------------------------------- /guide/lesson3.md: -------------------------------------------------------------------------------- 1 | Flying autonomous Parrot AR Drone 2 with Python 2 | 3 | Lesson 3 - Logging 4 | 5 | Every flight is unique and you will never be able to exactly repeat it. How can 6 | you then develop anything or debug what just happened? The key part is log 7 | files - all commands and input data are logged so you can replay them any time 8 | later. Note that the program has to be deterministic and not dependent on other 9 | source (like system time - otherwise you would have to log it as source too). 10 | 11 | In this example we will replay existing logs. Note, that log files were 12 | generated automatically also in previous lessons, but we were not able to 13 | replay them yet. 14 | 15 | In this lesson we changed main code into function "testLesson3" and use 16 | "launcher" to parse parameters and call the main function. 17 | 18 | How to replay the flight? Or maybe even what are the command line parameters 19 | now? It is name of the program and reserved parameter for the main code (note, 20 | task number or name of configuration file, for example). 3rd parameter is then 21 | name of the "metalog file". What is that? It is file from "logs" directory 22 | starting with keyword "meta". There are several sources involved and if you 23 | want to be able to repeat them with identical results you have to for example 24 | also record the moment when you hit the emergency stop button/keyboard. Metalog 25 | then contains references to names of all other files. 26 | 27 | == Task 1 28 | 29 | Q: What was your battery level _before_ takeoff? 30 | 31 | A: Because print is not changing your drone behavior, you can add there: 32 | print "Battery", drone.battery 33 | Because all navdata (navigation data from the drone) are logged you can add the print(s) afterwards! 34 | 35 | 36 | 37 | == Task 2 38 | 39 | Q: What was the maximum height during your very first flight? 40 | 41 | A: Well, this will be a little bit tricky. We are logging everything even if 42 | you do not ask for it. It is something like "black box" on the airplane ... you 43 | cannot put it there after the crash. It also fulfills similar mission - if 44 | something goes wrong you can analyse it and prevent it from repetition. 45 | 46 | So back to height from your first flight. There are several sources about drone 47 | altitude (we will discuss/test it in some later lesson) and they are all merged 48 | into Z-coordinate available as 3rd variable in drone.coord tuple. You can add 49 | "print drone.coord[2]" just before you call drone.land(). 50 | 51 | Is there a problem replaying the metalog file? Actually there should be ;-). 52 | Why? Because in your first example you hover for 3 seconds while in your lesson 53 | 3 it was set to 10 seconds. You should see assert like: 54 | 55 | File "m:\git\heidi\ardrone2.py", line 147, in update 56 | assert fileCmd==cmd, "Diff from file: %s, cmd %s" % (fileCmd, cmd) 57 | AssertionError: Diff from file: AT*CONFIG_IDS=, cmd AT*FTRIM=237, 58 | 59 | This means that your code/program generates different commands then during the 60 | flight. You can experiment and change 10 to 3 to see if this assert goes away 61 | (it should). Or you add extra parameter "F" (as "False" for assert checking) 62 | and will ignore new commands and only process incoming sensor data. 63 | 64 | -------------------------------------------------------------------------------- /guide/lesson3.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lesson 3 - logging examples 3 | """ 4 | import sys 5 | sys.path.append('..') # access to drone source without installation 6 | from ardrone2 import ARDrone2, ManualControlException 7 | 8 | def testLesson3( drone ): 9 | try: 10 | drone.takeoff() 11 | drone.hover(10.0) 12 | except ManualControlException, e: 13 | print "ManualControlException" 14 | drone.land() 15 | print "Battery", drone.battery 16 | 17 | 18 | 19 | if __name__ == "__main__": 20 | import launcher 21 | launcher.launch( sys.argv, ARDrone2, testLesson3 ) 22 | 23 | 24 | # vim: expandtab sw=4 ts=4 25 | 26 | -------------------------------------------------------------------------------- /guide/lesson4.md: -------------------------------------------------------------------------------- 1 | Flying autonomous Parrot AR Drone 2 with Python 2 | 3 | Lesson 4 - Fly forward 4 | 5 | It is time to fly! (and not only hover on one place) If you did not follow our 6 | recommendation in lesson 1 you should do it now. Find some free space! 7 | Sometimes even the takeoff is a little bit wild and if you have enough space 8 | you get also enough time to hit the emergency stop (keyboard) and land. 9 | 10 | Original Parrot API uses a bit strange coordinate system. We are used to ground 11 | vehicles, where XY was enough, so we transform it. X is pointing forward, 12 | Y left and Z up. The drone starts from (0,0,0) and you can read current 13 | position from drone.coord internal variable. Distances are in meters. 14 | 15 | There are four dimensions in which you can control the drone: forward/backward, 16 | left/right, up/down, anticlockwise/clockwise. These are also four parameters in 17 | drone.moveXYZA() function. Note, that you control fractions of some 18 | pre-configured values! sx=0.1 is 10% of maximal allowed tilt. If you 19 | increase the time from 2.0s say to 10s be aware the THE DRONE WILL SPEED UP! 20 | Also if you set after that sx to zero, it will still continue to fly in previous 21 | direction! Sounds scary? 22 | 23 | One remark about following two lines: 24 | if drone.ctrlState == 3: # CTRL_FLYING=3 ... i.e. stop the current motion drone. 25 | hover(0.1) 26 | 27 | The drone is in some "state" and if you hit emergency stop, while flying really 28 | fast in some direction, you first want to stop that movement. And that's what 29 | our old "hover" will do for us. 30 | 31 | Homework 1: 32 | 33 | Change the code that the drone will land as soon as it is 2 meters from 34 | starting position. 35 | 36 | -------------------------------------------------------------------------------- /guide/lesson4.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lesson 4 - fly forward 3 | """ 4 | import sys 5 | sys.path.append('..') # access to drone source without installation 6 | from ardrone2 import ARDrone2, ManualControlException 7 | 8 | def testLesson4( drone ): 9 | try: 10 | drone.takeoff() 11 | startTime = drone.time 12 | while drone.time - startTime < 2.0: 13 | sx, sy, sz, sa = 0.1, 0.0, 0.0, 0.0 14 | drone.moveXYZA( sx, sy, sz, sa ) 15 | except ManualControlException, e: 16 | print "ManualControlException" 17 | if drone.ctrlState == 3: # CTRL_FLYING=3 ... i.e. stop the current motion 18 | drone.hover(0.1) 19 | drone.land() 20 | print "Battery", drone.battery 21 | 22 | 23 | 24 | if __name__ == "__main__": 25 | import launcher 26 | launcher.launch( sys.argv, ARDrone2, testLesson4 ) 27 | 28 | 29 | # vim: expandtab sw=4 ts=4 30 | 31 | -------------------------------------------------------------------------------- /guide/lesson5.md: -------------------------------------------------------------------------------- 1 | Flying autonomous Parrot AR Drone 2 with Python 2 | 3 | Lesson 5 - Fly at given height 4 | 5 | How was flying in previous lesson? Did you experiment with longer times? Did 6 | you crash? If yes then you are ready for next lesson - it is time to control 7 | drone height/altitude and forward speed. 8 | 9 | AR Drone 2 has several data sources to estimate current height. There is bottom 10 | looking camera, sonar and pressure sensor. Also speeds of rotors and 11 | accelerometers are probably taken into account. If you do not want to worry 12 | about details (95% of users) you can simply use Z coordinate (i.e. 13 | "drone.coord[2]"). This works fine, but you should be aware that it is not 14 | perfect. It is just estimate integrated over your flight time. Also it is like 15 | "global altitude" with reference to your start point. What would you expect if 16 | you fly on uneven terrain? 17 | 18 | [Here](http://www.youtube.com/watch?v=qhmc--pTRJM) you can see my quadcopter 19 | Heidi crashing into ground. Our garden is not absolutely flat and if you fly at 20 | 1m at some point you will hit it. 21 | 22 | Another memorable experience is from [Air Race 23 | contest](http://www.youtube.com/watch?v=brrHXWxjO9k) where Isabelle, our second 24 | drone, was supposed to autonomously fly 8 figures. It was OK at the beginning 25 | but started to drop height over time (you can move the video to 6:20 and see 26 | yourself). Why? because we blindly trusted "drone.coord[2]". The drone does not 27 | know that it still the same floor. Once a while somebody opened the door and 28 | due to small breeze the pressure changed. The error grows. 29 | 30 | So what you can do? Well, then you have to take care of details what is in the 31 | reality integrated. You know more about the environment. You know that the 32 | floor is flat or that you actually want to rather fly at 1.5m _from the ground_ 33 | and not at given level. You can access directly row readings of sonar and 34 | camera height estimation. They are available at "drone.altitudeData". The data 35 | do not have to be always available - the drone does not have to send them with 36 | 200Hz frequency. That is why you see " if drone.altitudeData != None:" in the 37 | code. Also these are raw values, in millimeters ... we may change this in some 38 | later ARDrone2 class revision, because over time we see that these data are 39 | really useful. 40 | 41 | If you fly in reasonable conditions the estimation from sonar and from camera 42 | are very close. But as soon as you start to fly above terrain with texture 43 | (field with maize, for example) and you will feel the wind breeze you can be 44 | surprised :-). That is explanation for 45 | 46 | if abs(altSonar-altVision) > 0.5: 47 | print altSonar, altVision 48 | altitude = max( altSonar, altVision ) # sonar is 0.0 sometimes (no ECHO) 49 | 50 | Also sometimes you do not get echo from sonar and the reading will be 0. 51 | 52 | If you flew more than 5 seconds in previous lesson you probably found out how 53 | much is the drone speeding up. It is not safe and you do not want it fast 54 | anyway. This time we have simple forward speed control: 55 | 56 | sx = max( 0, min( drone.speed, desiredSpeed - drone.vx )) 57 | 58 | TODO refactoring - drone.vx and desiredSpeed are both in metric units (meters 59 | per second), but sx and drone.speed are strange Parrot fractions of maximal 60 | allowed tilt. This code works, but the units are not matching ... 61 | 62 | 63 | Homework 64 | 65 | Try to fly up and down the stairs. 66 | 67 | -------------------------------------------------------------------------------- /guide/lesson5.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lesson 5 - fly at given height 3 | """ 4 | import sys 5 | sys.path.append('..') # access to drone source without installation 6 | from ardrone2 import ARDrone2, ManualControlException 7 | 8 | def testLesson5( drone ): 9 | desiredSpeed = 0.8 # in meters per second 10 | desiredHeight = 1.5 11 | try: 12 | drone.takeoff() 13 | startTime = drone.time 14 | while drone.time - startTime < 10.0: 15 | 16 | altitude = desiredHeight 17 | if drone.altitudeData != None: 18 | altVision = drone.altitudeData[0]/1000.0 19 | altSonar = drone.altitudeData[3]/1000.0 20 | altitude = (altSonar+altVision)/2.0 21 | if abs(altSonar-altVision) > 0.5: 22 | print altSonar, altVision 23 | altitude = max( altSonar, altVision ) # sonar is 0.0 sometimes (no ECHO) 24 | 25 | sz = max( -0.2, min( 0.2, desiredHeight - altitude )) 26 | if altitude > 2.5: 27 | # wind and "out of control" 28 | sz = max( -0.5, min( 0.5, desiredHeight - altitude )) 29 | sx = max( 0, min( drone.speed, desiredSpeed - drone.vx )) 30 | sy, sa = 0.0, 0.0 31 | drone.moveXYZA( sx, sy, sz, sa ) 32 | 33 | except ManualControlException, e: 34 | print "ManualControlException" 35 | if drone.ctrlState == 3: # CTRL_FLYING=3 ... i.e. stop the current motion 36 | drone.hover(0.1) 37 | drone.land() 38 | print "Battery", drone.battery 39 | 40 | 41 | 42 | if __name__ == "__main__": 43 | import launcher 44 | launcher.launch( sys.argv, ARDrone2, testLesson5 ) 45 | 46 | 47 | # vim: expandtab sw=4 ts=4 48 | 49 | -------------------------------------------------------------------------------- /guide/lesson6.md: -------------------------------------------------------------------------------- 1 | Flying autonomous Parrot AR Drone 2 with Python 2 | 3 | Lesson 6 - Video recording 4 | 5 | In this lesson you will learn how to record video via drone onboard camera(s). 6 | It will takeoff, rotate 360 degrees in place and land. 7 | 8 | You already know how to turn (last parameter in "drone.moveXYZA()" function) so 9 | new is how to start video recording and how to limit the rotation. 10 | 11 | AR Drone 2 has two cameras - one in the front and the second pointing down. The 12 | first camera is much better and it is meant mainly for recording of user 13 | videos. The down pointing camera is primarily for internal use (drone 14 | motion and height estimatimation for low altitudes). 15 | 16 | There are two *video channels*: one for video recording and one for quick video 17 | preview (with lower quality). If you are interested, you may check the source 18 | code - basically it opens new TCP connection on port 5555 (VIDEO_PORT) and/or 19 | on port 5553 (VIDEO_RECORDER_PORT) and reads video data. With these two lines 20 | you start video recording and select front camera (default). 21 | 22 | drone.startVideo( record=True, highResolution=True ) 23 | drone.setVideoChannel( front=True ) 24 | 25 | When the drone lands you should close the connection by calling 26 | 27 | drone.stopVideo() 28 | 29 | And that is all. Feel free to experiment with different resolutions and 30 | front/bottom camera selection. Note, that in current Parrot AR Drone 2 firmware 31 | (2.4.x) it is not possible to select one camera for preview channel and second 32 | camera for recording. 33 | 34 | And what about the 360deg scanning?! Well, you are right! What would be the 35 | simplest solution for that task? We will implement "version 0", "the simplest 36 | thing that could possibly work", and that is to send constant rotation commands 37 | until given time expires. Are you disappointed? Try it first! 38 | 39 | There are several important details you should learn from this version 0: 40 | - drone does not stay in place 41 | - even the height is varying so you maybe had to cut the fly with emergency stop 42 | - heading is a bit random (you can add print drone.heading and replay your 43 | attempt from log file) 44 | 45 | I hope that now you are a little bit convinced now that even the simplest solution 46 | was not that useless. This corresponds to [eXtreme 47 | programming](http://en.wikipedia.org/wiki/Extreme_programming) practises - test 48 | first. You get real data and much better idea what is going on. On your log files 49 | you can new repeat your test flight many times and write version 1 (homework). 50 | It shoud 51 | keep given hight (say 1.5 meter above the ground), stay in place (you can use 52 | drone.coord and reference point when you entered the scan function), plus rotate 53 | 360 degrees. 54 | 55 | The drone heading is a result of data fusion (at least compass + gyros). It 56 | takes some time before the estimate converge to some reasonable value ... so you 57 | may want first to go to 1.5 meter height and then start to trust heading 58 | readings. And a small _cherry on the top_ ... the data fusion is so good, 59 | that it took us a month (?) before we realized that compass was broken on 60 | Isabelle1 - there was probably some bad connection so sometimes we got the data sometimes 61 | not. Gyroscopes took over very soon ... tough one. Do you trust your drone 62 | compass? If not you can dump row compass data: "drone.magneto". The whole story 63 | is [here](http://robotika.cz/competitions/robotchallenge/2014/en#140312). 64 | 65 | 66 | Final note about line 67 | 68 | scanVersion0( drone ) 69 | 70 | Why is it this way and not 71 | 72 | drone.scanVersion0() 73 | 74 | ? This is because we are calling function "scanVersion0()" with parameter 75 | "drone" and this function is not part of ARDrone2 class. You would probably prefer 76 | second style, but in that case we would need to extend ARDrone2 class and add 77 | there new function. 78 | -------------------------------------------------------------------------------- /guide/lesson6.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lesson 6 - record video from starting area 3 | """ 4 | import sys 5 | sys.path.append('..') # access to drone source without installation 6 | from ardrone2 import ARDrone2, ManualControlException 7 | 8 | def scanVersion0( drone, timeout=10.0 ): 9 | startTime = drone.time 10 | while drone.time - startTime < timeout: 11 | sx, sy, sz, sa = 0.0, 0.0, 0.0, 0.3 12 | drone.moveXYZA( sx, sy, sz, sa ) 13 | drone.hover(0.1) # stop rotation 14 | 15 | 16 | def testLesson6( drone ): 17 | try: 18 | drone.startVideo( record=True, highResolution=True ) 19 | drone.setVideoChannel( front=True ) 20 | drone.takeoff() 21 | scanVersion0( drone ) 22 | except ManualControlException, e: 23 | print "ManualControlException" 24 | if drone.ctrlState == 3: # CTRL_FLYING=3 ... i.e. stop the current motion 25 | drone.hover(0.1) 26 | drone.land() 27 | drone.stopVideo() 28 | print "Battery", drone.battery 29 | 30 | 31 | 32 | if __name__ == "__main__": 33 | import launcher 34 | launcher.launch( sys.argv, ARDrone2, testLesson6 ) 35 | 36 | 37 | # vim: expandtab sw=4 ts=4 38 | 39 | -------------------------------------------------------------------------------- /guide/lesson7.md: -------------------------------------------------------------------------------- 1 | Flying autonomous Parrot AR Drone 2 with Python 2 | 3 | Lesson 7 - Hovering above oriented roundel 4 | 5 | In this lesson you will learn how to use automatically detected tags in 6 | particular "oriented roundel". The sign reminds heliport and in some older 7 | version was even navigation mode FLYING_MODE_HOVER_ON_TOP_OF_ORIENTED_ROUNDEL. 8 | Unfortunately, without notice, this option was removed so in this lesson we 9 | will learn how to do it ourselves. 10 | 11 | In your drone box you should already have the sign - if not you can download it 12 | [here](http://ardrone2.parrot.com/media/cms_page_media/38/special_target_1.pdf) 13 | and print it on A4 paper. I recommend to glue it on the box or tape it on the 14 | floor because otherwise it will flew away during takeoff ;-). 15 | 16 | The code is still "WORK IN PROGRESS" and what will probably not change is the 17 | used description of detected video tags. In SDK you can find this: 18 | 19 | * xc[i], yc[i]: X and Y coordinates of detected tag or oriented roundel #i 20 | inside the picture, with (0; 0) being the top-left corner, and (1000; 1000) 21 | the right-bottom corner regardless the picture resolution or the source 22 | camera. 23 | 24 | * width[i], height[i]: Width and height of the detection bounding-box (tag or 25 | oriented roundel #i), when applicable. 26 | 27 | * dist[i]: Distance from camera to detected tag or oriented roundel #i in 28 | centimeters, when applicable. 29 | 30 | * orientation_angle[i] : Angle of the oriented roundel #i in degrees in the 31 | screen, when applicable. 32 | 33 | In the lesson 7 code we try to keep the tag in the middle of the camera image, 34 | i.e. (500,500). Things which we have to consider is the distance in centimeters 35 | (4th parameter), drone angles and its current speed. The goal is to have zero 36 | speed drone.vx, drone.vy and the roundel in the center. 37 | 38 | Example of "work in progres" video: 39 | https://www.youtube.com/watch?&v=bGds3axS6xQ 40 | 41 | = Issues = 42 | 43 | * Sonar altitude readings sometimes fails (returns 0) and in that case only 44 | vision from bottom camera gives some hints about current distance to the 45 | ground. 46 | 47 | * During takeoff the drone can reach even several meters (depends on weather 48 | conditions) so extra MAX_ALTITUDE was added. 49 | 50 | -------------------------------------------------------------------------------- /guide/lesson7.py: -------------------------------------------------------------------------------- 1 | """ 2 | Lesson 7 - hover above oriented roundel sign 3 | """ 4 | import sys 5 | import os 6 | import inspect 7 | import math 8 | 9 | ARDRONE2_ROOT = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],".."))) 10 | if ARDRONE2_ROOT not in sys.path: 11 | sys.path.insert(0, ARDRONE2_ROOT) # access to drone source without installation 12 | 13 | from ardrone2 import ARDrone2, ManualControlException, normalizeAnglePIPI 14 | 15 | MAX_ALTITUDE = 3.0 16 | 17 | def getXYZAGoalCmd( drone, goal, goalHeading=None, maxSpeed=0.3 ): 18 | frac = 0.3 19 | dxWorld = goal[0] - drone.coord[0] 20 | dyWorld = goal[1] - drone.coord[1] 21 | c = math.cos(drone.heading) 22 | s = math.sin(drone.heading) 23 | dx = c*dxWorld + s*dyWorld # inverse rotation 24 | dy = -s*dxWorld + c*dyWorld 25 | sx = max( -maxSpeed, min( maxSpeed, frac*(dx - drone.vx) )) 26 | sy = max( -maxSpeed, min( maxSpeed, frac*(dy - drone.vy) )) 27 | sa = 0.0 28 | if goalHeading: 29 | sa = max( -maxSpeed, min( maxSpeed, frac*(normalizeAnglePIPI(goalHeading - drone.heading)) )) 30 | return sx, sy, 0.0, sa 31 | 32 | 33 | 34 | def hoverAboveRoundel( drone, timeout=6000.0 ): 35 | startTime = drone.time 36 | maxSpeed = 0.1 37 | maxSpeedUpDown = 0.3 38 | frac = 0.1 39 | scaleX, scaleY = 0.38, 0.38 # camera FOW at 1m above the ground 40 | detectedCount = 0 41 | goal = (0.0, 0.0) #None 42 | goalHeading = None 43 | while drone.time - startTime < timeout: 44 | sx, sy, sz, sa = 0.0, 0.0, 0.0, 0.0 45 | if drone.visionTag: 46 | # xc[i], yc[i]: X and Y coordinates of detected tag or oriented roundel #i inside the picture, 47 | # with (0; 0) being the top-left corner, and (1000; 1000) the right-bottom corner regardless 48 | # the picture resolution or the source camera 49 | x, y = drone.visionTag[0][:2] 50 | dist = drone.visionTag[0][4]/100. 51 | angle = math.radians(drone.visionTag[0][5] - 270) # we use circle for the body and line for the tail -> 270deg offset 52 | detectedCount = min(100, detectedCount + 1) 53 | # TODO verify signs and scale 54 | dx = dist*(scaleY*(500-y)/500.0 + drone.angleFB) 55 | dy = dist*(scaleX*(500-x)/500.0) # - drone.angleLR) 56 | c = math.cos(drone.heading) 57 | s = math.sin(drone.heading) 58 | goal = (drone.coord[0] + c*dx - s*dy, 59 | drone.coord[1] + s*dx + c*dy ) 60 | goalHeading = normalizeAnglePIPI( drone.heading + angle ) # TODO check sign 61 | # print drone.visionTag, "%.2f %.2f %.1f %.1f" % (dx, dy, math.degrees(drone.angleFB), math.degrees(drone.angleLR)) 62 | else: 63 | detectedCount = max(0, detectedCount - 1) 64 | 65 | if goal: 66 | sx,sy,sz,sa = getXYZAGoalCmd( drone, goal, goalHeading=goalHeading ) 67 | 68 | sz = 0.0 69 | if drone.altitudeData != None: 70 | altVision = drone.altitudeData[0]/1000.0 71 | altSonar = drone.altitudeData[3]/1000.0 72 | if drone.altitudeData[3] == 0: 73 | # no sonar echo 74 | minAlt = altVision 75 | else: 76 | minAlt = min(altSonar, altVision) 77 | 78 | if minAlt > MAX_ALTITUDE: 79 | # too high to reliably detect anything 80 | sz = -maxSpeedUpDown 81 | 82 | # print altSonar, altVision, 83 | if detectedCount == 0: 84 | # try to move up 85 | if max(altSonar, altVision) < 2.0: 86 | sz = maxSpeedUpDown 87 | elif detectedCount == 100: 88 | if minAlt > 1.0: 89 | sz = -maxSpeedUpDown 90 | # print "\t%.2f %.2f %.2f %.2f" % (sx, sy, sz, sa) 91 | drone.moveXYZA( sx, sy, sz, sa ) 92 | drone.hover(0.1) # stop motion 93 | 94 | def testLesson7( drone ): 95 | try: 96 | drone.startVideo( record=True, highResolution=False ) 97 | drone.setVideoChannel( front=False ) 98 | # drone.startVideo( record=True, highResolution=True ) 99 | # drone.setVideoChannel( front=True ) 100 | drone.takeoff() 101 | hoverAboveRoundel( drone ) 102 | except ManualControlException, e: 103 | print "ManualControlException" 104 | if drone.ctrlState == 3: # CTRL_FLYING=3 ... i.e. stop the current motion 105 | drone.hover(0.1) 106 | drone.land() 107 | drone.stopVideo() 108 | print "Battery", drone.battery 109 | 110 | 111 | if __name__ == "__main__": 112 | import launcher 113 | launcher.launch( sys.argv, ARDrone2, testLesson7 ) 114 | 115 | 116 | # vim: expandtab sw=4 ts=4 117 | 118 | -------------------------------------------------------------------------------- /guide/overview.md: -------------------------------------------------------------------------------- 1 | Flying autonomous Parrot AR Drone 2 with Python 2 | 3 | # Basic level 4 | 5 | - [Lesson 1 - before you takeoff ...](lesson1.md) 6 | - [Lesson 2 - manual control, takeoff, hover, land](lesson2.md) 7 | - [Lesson 3 - logging](lesson3.md) 8 | - [Lesson 4 - fly forward](lesson4.md) 9 | - [Lesson 5 - fly at given height](lesson5.md) 10 | - [Lesson 6 - video recording](lesson6.md) 11 | 12 | # Advanced level (cv2) 13 | - flying 8 figure 14 | - simple road detection 15 | 16 | 17 | # C-extensions 18 | - navigation in the field 19 | 20 | -------------------------------------------------------------------------------- /guide/todo.txt: -------------------------------------------------------------------------------- 1 | - installation (set PYTHONPATH to heidi) 2 | 3 | - OpenCV2, PyGame 4 | 5 | - linux, windows 6 | 7 | -------------------------------------------------------------------------------- /h264drone.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Experiment with H264 codec parsing for AR Drone 2.0 autonomous navigation 4 | usage: 5 | ./h264drone.py [ [F]] 6 | """ 7 | from ardrone2 import ARDrone2, ManualControlException, manualControl 8 | from sourcelogger import SourceLogger 9 | 10 | import sys 11 | import datetime 12 | import struct 13 | import os 14 | import time 15 | from threading import Thread,Event,Lock 16 | import multiprocessing 17 | 18 | import viewlog 19 | from pave import PaVE 20 | 21 | # import from the h264-drone-vision repository (https://github.com/robotika/h264-drone-vision) 22 | sys.path.append( ".."+os.sep+"h264-drone-vision") 23 | import h264 24 | h264.verbose = False 25 | from h264nav import quadrantMotion 26 | 27 | MAX_COUNT = 1000 28 | MIN_STEP = 100 29 | 30 | 31 | def timeName( prefix, ext ): 32 | dt = datetime.datetime.now() 33 | filename = prefix + dt.strftime("%y%m%d_%H%M%S.") + ext 34 | return filename 35 | 36 | 37 | class PacketProcessor( Thread ): 38 | def __init__( self ): 39 | Thread.__init__( self ) 40 | self.setDaemon( True ) 41 | self.lock = Lock() 42 | self.pave = PaVE() 43 | self.readyForProcessing = "" 44 | self._lastResult = None 45 | self.timestamp = None 46 | self.frameNum = None 47 | self.shouldIRun = Event() 48 | self.shouldIRun.set() 49 | self.start() 50 | 51 | def process( self, packet ): 52 | self.pave.append( packet ) # re-packing 53 | header,payload = self.pave.extract() # TODO multiple packets 54 | if header != "": 55 | if len(header) >= 28: 56 | version, codec, headerSize, payloadSize = struct.unpack_from("BBHI", header, 4 ) 57 | assert version == 3, version 58 | assert codec == 4, codec 59 | frameNum, timestamp = struct.unpack_from("II", header, 20 ) 60 | if len(payload) == payloadSize: 61 | self.lock.acquire() 62 | # if len( self.readyForProcessing ) > 0: 63 | # print "skipping", len(self.readyForProcessing) 64 | self.readyForProcessing = payload[:] 65 | self.timestamp = timestamp 66 | self.frameNum = frameNum 67 | self.lock.release() 68 | else: 69 | # this looks like frequent case - PaVE is probably also in the middle of the packets 70 | print "BAD PACKET", (len(payload), headerSize, payloadSize) 71 | 72 | def run(self): 73 | while True: #self.shouldIRun.isSet(): 74 | if len( self.readyForProcessing) > 0: 75 | self.lock.acquire() 76 | tmp = self.readyForProcessing 77 | timestamp = self.timestamp 78 | frameNum = self.frameNum 79 | self.readyForProcessing = "" 80 | self.lock.release() 81 | mv = h264.parseFrame( tmp ) 82 | self.lock.acquire() 83 | self._lastResult = frameNum, timestamp, quadrantMotion( mv ) 84 | self.lock.release() 85 | print len(mv), self._lastResult 86 | 87 | def lastResult(self): 88 | self.lock.acquire() 89 | ret = self._lastResult 90 | self.lock.release() 91 | return ret 92 | 93 | def requestStop(self): 94 | self.shouldIRun.clear() 95 | 96 | g_pp = None 97 | 98 | def wrapper( packet ): 99 | # print "Packet", len(packet) 100 | global g_pp 101 | if g_pp == None: 102 | g_pp = PacketProcessor() 103 | g_pp.process( packet ) 104 | return g_pp.lastResult() 105 | 106 | 107 | def dummyPacketProcessor( packet ): 108 | print len(packet) 109 | packetLog = open("packet.log", "a") 110 | packetLog.write( repr( packet ) + '\n' ) 111 | packetLog.flush() 112 | 113 | def replayPacketLog( filename, packetProcessor ): 114 | for line in open(filename): 115 | packet = eval(line) 116 | packetProcessor( packet ) 117 | 118 | 119 | # very very ugly :( 120 | queueResults = multiprocessing.Queue() 121 | 122 | def getOrNone(): 123 | if queueResults.empty(): 124 | return None 125 | return queueResults.get() 126 | 127 | def h264drone( replayLog, metaLog, desiredSpeed = 1.0, timeout = 5.0 ): 128 | drone = ARDrone2( replayLog, metaLog=metaLog ) 129 | if replayLog: 130 | for line in metaLog: # TODO refactoring 131 | print "XXLINE", line.strip() 132 | if line.startswith("h264:"): 133 | loggedResult = SourceLogger( None, line.split()[1].strip() ).get 134 | break 135 | drone.startVideo( record=False ) 136 | else: 137 | name = timeName( "logs/src_h264_", "log" ) 138 | metaLog.write("h264: "+name+'\n' ) 139 | loggedResult = SourceLogger( getOrNone, name ).get 140 | drone.startVideo( wrapper, queueResults, record=False ) 141 | 142 | if drone.userEmergencyLanding: 143 | drone.reset() 144 | try: 145 | drone.wait(1.0) 146 | drone.takeoff( enabledCorrections = False ) 147 | # TODO some flying 148 | startTime = drone.time 149 | vz = 0.0 150 | while drone.time-startTime < timeout: 151 | # print "SPEED", drone.vx 152 | if drone.vx > desiredSpeed: 153 | drone.moveXYZA( 0.0, 0.0, vz, 0.0 ) 154 | else: 155 | drone.moveXYZA( drone.speed, 0.0, vz, 0.0 ) 156 | tmp = loggedResult() 157 | if tmp != None: 158 | print "QUEUE", drone.time, tmp 159 | frameNum, timestamp, (left, right, up, down ) = tmp 160 | vz = 0.0 161 | if left + right < MAX_COUNT: # limited max number of movements in the whole image 162 | if down > up + MIN_STEP: 163 | # move up 164 | vz = drone.speed 165 | if up > down + MIN_STEP: 166 | # move down 167 | vz = -drone.speed 168 | 169 | drone.land() 170 | drone.wait(1.0) 171 | except ManualControlException, e: 172 | print "ManualControlException" 173 | manualControl( drone ) 174 | if drone.ctrlState == 3: # CTRL_FLYING=3 ... i.e. stop the current motion 175 | drone.hover(0.1) 176 | drone.land() 177 | drone.wait(1.0) 178 | drone.stopVideo() 179 | drone.halt() 180 | 181 | if __name__ == "__main__": 182 | if len(sys.argv) < 2: 183 | print __doc__ 184 | sys.exit(2) 185 | 186 | # TODO unified launcher, similar to Eduro 187 | if len(sys.argv) > 3 and sys.argv[3] == 'F': 188 | g_checkAssert = False 189 | replayLog = None 190 | metaLog = None 191 | if len(sys.argv) > 2: 192 | if "meta" in sys.argv[2]: 193 | metaLog = open(sys.argv[2]) 194 | for line in metaLog: 195 | if line.startswith("navdata:"): 196 | replayLog = line.split()[1].strip() 197 | break 198 | else: 199 | replayLog=sys.argv[2] 200 | viewlog.viewLogFile = open( "view.log", "w" ) 201 | viewlog.dumpSharpsGeometry( [(0.18, 0.0, 0.0)] ) # front sonar 202 | else: # create new metaLog 203 | metaLog = open( datetime.datetime.now().strftime("logs/meta_%y%m%d_%H%M%S.log"), "w" ) 204 | metaLog.write( str(sys.argv) + "\n" ) 205 | metaLog.flush() 206 | if sys.argv[1] == "replay": 207 | for replayLog in sys.argv[2:]: 208 | drone = ARDrone2( replayLog, skipConfigure=True ) 209 | try: 210 | while True: 211 | drone.update() 212 | if drone.altitudeData: 213 | print "%d\t%.3f\t" % (drone.ctrlState, drone.coord[2]) + "\t".join([str(x) for x in drone.altitudeData]) 214 | except EOFError: 215 | pass 216 | else: 217 | h264drone( replayLog=replayLog, metaLog=metaLog ) 218 | 219 | -------------------------------------------------------------------------------- /launcher.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Universal launcher for various competitions/drones 4 | usage: 5 | ./.py [ [F]] 6 | """ 7 | import datetime 8 | import viewlog 9 | import ardrone2 10 | from metalog import MetaLog 11 | 12 | # only as wrapper for kbhit 13 | import sys 14 | if sys.platform == 'linux2': 15 | import pygame 16 | 17 | 18 | def linuxKbHit(): 19 | "workaround for msvc.kbhit, requires pygame" 20 | events = pygame.event.get() 21 | for event in events: 22 | if event.type == pygame.KEYDOWN: 23 | return True 24 | return False 25 | 26 | 27 | def launch(cmd_args, robotFactory, task, configFn = None, canArgs={}): 28 | ''' 29 | Launches a robot to the given file. 30 | 31 | Parameters: 32 | cmd_args ... List of parameters in the "configFile [--file logfile [F|FF]]" form. 33 | robotFactory ... Function (or class) which can be called with the following named 34 | parameters and creates an instance of the robot: 35 | robot = robotFactory(can=..., replyLog=...) 36 | task ... Factory which called as "t = task(robot, configFileName, verbose=...)" 37 | creates a runable instance. Ie. "t()" runs the show. 38 | configFn ... A method called in the robot's preconfigration mode. 39 | canArgs ... Named arguments passed down to the CAN. 40 | ''' 41 | if len(cmd_args) < 2: 42 | print __doc__ 43 | # sys.exit(2) 44 | return 45 | 46 | # TODO unified launcher, similar to Eduro 47 | if len(cmd_args) > 3 and cmd_args[3] in ['F','FL']: 48 | ardrone2.g_checkAssert = False 49 | if len(cmd_args) > 3 and cmd_args[3] == 'FL': 50 | ardrone2.g_relogCmd = True 51 | replayLog = None 52 | metaLog = None 53 | console = None 54 | if len(cmd_args) > 2: 55 | if "meta" in cmd_args[2]: 56 | metaLog = MetaLog( cmd_args[2] ) 57 | replayLog = metaLog.getLog( "navdata:" ) 58 | else: 59 | replayLog=cmd_args[2] 60 | viewlog.viewLogFile = open( "view.log", "w" ) 61 | viewlog.dumpSharpsGeometry( [(0.18, 0.0, 0.0)] ) # front sonar 62 | else: # create new metaLog 63 | metaLog = open( datetime.datetime.now().strftime("logs/meta_%y%m%d_%H%M%S.log"), "w" ) 64 | metaLog.write( str(cmd_args) + "\n" ) 65 | metaLog.flush() 66 | if sys.platform == 'linux2': 67 | pygame.init() 68 | screen = pygame.display.set_mode((100,100)) 69 | console = linuxKbHit 70 | 71 | if cmd_args[1] == "replay": 72 | for replayLog in cmd_args[2:]: 73 | drone = ARDrone2( replayLog, skipConfigure=True ) 74 | try: 75 | while True: 76 | drone.update() 77 | if drone.altitudeData: 78 | print "%d\t%.3f\t" % (drone.ctrlState, drone.coord[2]) + "\t".join([str(x) for x in drone.altitudeData]) 79 | except EOFError: 80 | pass 81 | else: 82 | task( robotFactory( replayLog=replayLog, metaLog=metaLog, console=console ) ) 83 | 84 | -------------------------------------------------------------------------------- /line.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Geometry line helper based on Zefyros project. 4 | """ 5 | 6 | import math 7 | 8 | def distance( planar1, planar2 ): 9 | "distane two planar points" 10 | x = planar1[0] - planar2[0] 11 | y = planar1[1] - planar2[1] 12 | return math.hypot(x, y) 13 | 14 | def pointAtPolyLineDist( pts, dist ): 15 | # return point on polyline (planar) in given distance 16 | if len(pts) == 0: 17 | return None 18 | if len(pts) == 1: 19 | return pts[0] 20 | 21 | prev = pts[0] 22 | distFromStart = 0 23 | for p in pts[1:]: 24 | distFromStart += distance( prev, p ) 25 | if dist < distFromStart: 26 | break 27 | prev = p 28 | if distance( prev, p ) < 0.000001: 29 | return p 30 | # resize (p - prev) to dist-distFromStart 31 | assert( dist < distFromStart ) 32 | frac = (distFromStart - dist)/distance( prev, p ) 33 | return ( p[0] + frac*(prev[0]-p[0]), p[1] + frac*(prev[1]-p[1]) ) 34 | 35 | 36 | class Line: 37 | 38 | def __init__( self, start, end ): 39 | self.start = start 40 | self.end = end 41 | self.angle = math.atan2( end[1] - start[1], end[0] - start[0] ) 42 | self._a = -math.sin( self.angle ) 43 | self._b = math.cos( self.angle ) 44 | self._c = -self._a * start[0] - self._b * start[1] 45 | self._perpC = self._b * end[0] - self._a * end[1] # perpendicular line in end position 46 | self.length = distance( start, end ) 47 | 48 | def signedDistance( self, pos ): 49 | return self._a * pos[0] + self._b * pos[1] + self._c 50 | 51 | def distanceToFinishLine( self, pos ): 52 | return - self._b * pos[0] + self._a * pos[1] + self._perpC; 53 | 54 | def finished( self, pos ): 55 | return self.distanceToFinishLine( pos ) <= 0 56 | 57 | def snap( self, pos ): 58 | dist = self.distanceToFinishLine( pos ) 59 | return (self.end[0] - self._b * dist, self.end[1] + self._a * dist ) 60 | 61 | def nearest( self, pos ): 62 | "return abs distance and type 0=start, 1=end, -1=inside of line" 63 | dist = self.distanceToFinishLine( pos ) 64 | if dist <= 0: 65 | return self.end, distance(pos,self.end), 1 66 | if dist >= self.length: 67 | return self.start, distance(pos,self.start), 0 68 | return self.snap(pos), math.fabs(self.signedDistance(pos)), -1 69 | 70 | def intersect( self, line ): 71 | # ax+by+c=0 72 | det = float(self._a*line._b - self._b*line._a) 73 | detA = -self._c*line._b + self._b*line._c 74 | detB = -self._a*line._c + self._c*line._a 75 | if abs(det) < 0.00001: 76 | return None 77 | return (detA/det, detB/det) 78 | 79 | -------------------------------------------------------------------------------- /metalog.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | MetaLog - binding of multiple log files 4 | """ 5 | import os.path 6 | 7 | class MetaLog: 8 | def __init__( self, filename ): 9 | self.filename = filename 10 | self.f = open( self.filename ) 11 | 12 | def getLog( self, prefix ): 13 | for line in self.f: 14 | print "LINE", line.strip() 15 | if line.startswith( prefix ): 16 | ret = line.split()[1].strip() 17 | assert ret.startswith("logs/") 18 | return os.path.dirname( self.filename ) + os.sep + ret[4:] 19 | return None # not found 20 | -------------------------------------------------------------------------------- /navdata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Verbose output of logged navdata*.gz files 4 | usage: 5 | ./navdata.py 6 | """ 7 | import struct 8 | import sys 9 | import gzip 10 | 11 | """ 12 | #define NAVDATA_PORT 5554 13 | #define NAVDATA_HEADER 0x55667788 14 | #define NAVDATA_BUFFER_SIZE 2048 15 | 16 | NAVDATA_DEMO_TAG = 0, 17 | NAVDATA_VISION_DETECT_TAG = 16, 18 | NAVDATA_IPHONE_ANGLES_TAG = 18, 19 | NAVDATA_CKS_TAG = 0xFFFF 20 | 21 | 22 | NAVDATA_OPTION( navdata_time_t, navdata_time , NAVDATA_TIME_TAG ) 1 23 | NAVDATA_OPTION( navdata_raw_measures_t, navdata_raw_measures , NAVDATA_RAW_MEASURES_TAG ) 2 24 | NAVDATA_OPTION( navdata_phys_measures_t, navdata_phys_measures , NAVDATA_PHYS_MEASURES_TAG ) 3 25 | NAVDATA_OPTION( navdata_gyros_offsets_t, navdata_gyros_offsets , NAVDATA_GYROS_OFFSETS_TAG ) 4 26 | NAVDATA_OPTION( navdata_euler_angles_t, navdata_euler_angles , NAVDATA_EULER_ANGLES_TAG ) 5 27 | NAVDATA_OPTION( navdata_references_t, navdata_references , NAVDATA_REFERENCES_TAG ) 6 28 | NAVDATA_OPTION( navdata_trims_t, navdata_trims , NAVDATA_TRIMS_TAG ) 7 29 | NAVDATA_OPTION( navdata_rc_references_t, navdata_rc_references , NAVDATA_RC_REFERENCES_TAG ) 8 30 | NAVDATA_OPTION( navdata_pwm_t, navdata_pwm , NAVDATA_PWM_TAG ) 9 31 | NAVDATA_OPTION( navdata_altitude_t, navdata_altitude , NAVDATA_ALTITUDE_TAG ) 10 32 | NAVDATA_OPTION( navdata_vision_raw_t, navdata_vision_raw , NAVDATA_VISION_RAW_TAG ) 11 33 | NAVDATA_OPTION( navdata_vision_of_t, navdata_vision_of , NAVDATA_VISION_OF_TAG ) 12 34 | NAVDATA_OPTION( navdata_vision_t, navdata_vision , NAVDATA_VISION_TAG ) 13 35 | NAVDATA_OPTION( navdata_vision_perf_t , navdata_vision_perf , NAVDATA_VISION_PERF_TAG ) 14 36 | NAVDATA_OPTION( navdata_trackers_send_t, navdata_trackers_send , NAVDATA_TRACKERS_SEND_TAG ) 15 37 | NAVDATA_OPTION( navdata_vision_detect_t, navdata_vision_detect , NAVDATA_VISION_DETECT_TAG ) 16 38 | NAVDATA_OPTION( navdata_watchdog_t , navdata_watchdog , NAVDATA_WATCHDOG_TAG ) 17 39 | NAVDATA_OPTION( navdata_adc_data_frame_t, navdata_adc_data_frame , NAVDATA_ADC_DATA_FRAME_TAG ) 18 40 | NAVDATA_OPTION( navdata_video_stream_t, navdata_video_stream , NAVDATA_VIDEO_STREAM_TAG ) 19 41 | NAVDATA_OPTION( navdata_games_t, navdata_games , NAVDATA_GAMES_TAG ) 20 42 | NAVDATA_OPTION( navdata_pressure_raw_t, navdata_pressure_raw , NAVDATA_PRESSURE_RAW_TAG ) 21 43 | NAVDATA_OPTION( navdata_magneto_t, navdata_magneto , NAVDATA_MAGNETO_TAG ) 22 44 | NAVDATA_OPTION( navdata_wind_speed_t, navdata_wind_speed , NAVDATA_WIND_TAG ) 23 45 | NAVDATA_OPTION( navdata_kalman_pressure_t,navdata_kalman_pressure , NAVDATA_KALMAN_PRESSURE_TAG ) 24 46 | NAVDATA_OPTION( navdata_hdvideo_stream_t ,navdata_hdvideo_stream , NAVDATA_HDVIDEO_STREAM_TAG ) 25 47 | NAVDATA_OPTION( navdata_wifi_t ,navdata_wifi , NAVDATA_WIFI_TAG ) 26 48 | 49 | 50 | typedef struct _navdata_magneto_t { 51 | uint16_t tag; 52 | uint16_t size; 53 | 54 | int16_t mx; 55 | int16_t my; 56 | int16_t mz; 57 | vector31_t magneto_raw; // magneto in the body frame, in mG 58 | vector31_t magneto_rectified; 59 | vector31_t magneto_offset; 60 | float32_t heading_unwrapped; 61 | float32_t heading_gyro_unwrapped; 62 | float32_t heading_fusion_unwrapped; 63 | char magneto_calibration_ok; 64 | uint32_t magneto_state; 65 | float32_t magneto_radius; 66 | float32_t error_mean; 67 | float32_t error_var; 68 | 69 | }_ATTRIBUTE_PACKED_ navdata_magneto_t; 70 | 71 | 72 | """ 73 | NAVDATA_DEMO_TAG = 0 74 | NAVDATA_TIME_TAG = 1 75 | NAVDATA_RAW_MEASURES_TAG = 2 76 | NAVDATA_PWM_TAG = 9 77 | NAVDATA_ALTITUDE_TAG = 10 78 | NAVDATA_TRACKERS_SEND_TAG = 15 79 | NAVDATA_VISION_DETECT_TAG = 16 80 | NAVDATA_IPHONE_ANGLES_TAG = 18 81 | NAVDATA_PRESSURE_RAW_TAG = 21 82 | NAVDATA_MAGNETO_TAG = 22 83 | NAVDATA_GPS_TAG = 27 84 | NAVDATA_CKS_TAG = 0xFFFF 85 | 86 | # drone_state['navdata_bootstrap'] = _[1] >> 11 & 1 # Navdata bootstrap : (0) options sent in all or demo mode, (1) no navdata options sent */ 87 | # drone_state['com_watchdog_mask'] = _[1] >> 30 & 1 # Communication Watchdog : (1) com problem, (0) Com is ok */ 88 | 89 | # drone_state['command_mask'] = _[1] >> 6 & 1 # Control command ACK : (0) None, (1) one received */ 90 | 91 | def parseTimeTag( data, offset ): 92 | "uint32_t - the 11 most significant bits represents the seconds, and the 21 least significant bits are the microseconds." 93 | # tag == NAVDATA_TIME_TAG: 94 | utime = struct.unpack_from("I", data, offset+4)[0] 95 | assert utime & (1<<20) == 0, str( (hex(utime), header[2]) ) 96 | return (utime >> 21) + (utime & 0xFFFFF)/1000000.0 97 | 98 | 99 | def parseDemoTag( data, offset ): 100 | values = struct.unpack_from("IIfffifff", data, offset+4) 101 | values = dict(zip(['ctrl_state', 'battery', 'theta', 'phi', 'psi', 'altitude', 'vx', 'vy', 'vz'], values)) 102 | values['ctrl_state'] >>= 16 103 | return values 104 | 105 | def parseTrackersSendTag( data, offset ): 106 | values = struct.unpack_from("iii"*30, data, offset+4) # locked, (x,y) 107 | return values 108 | 109 | 110 | def parseVisionDetectTag( data, offset ): 111 | "tag == NAVDATA_VISION_DETECT_TAG" 112 | countTypes = struct.unpack_from("IIIII", data, offset+4) 113 | x = struct.unpack_from("IIII", data, offset+4+20) 114 | y = struct.unpack_from("IIII", data, offset+4+36) 115 | height = struct.unpack_from("IIII", data, offset+4+52) 116 | width = struct.unpack_from("IIII", data, offset+4+68) 117 | dist = struct.unpack_from("IIII", data, offset+4+84) 118 | oriAngle = struct.unpack_from("ffff", data, offset+4+100) 119 | # size 328, skip 3x3 matrix + 3x1 matrix = 12*4 = 48 bytes *4 NB_NAVDATA_DETECTION_RESULTS = 192 120 | cameraSource = struct.unpack_from("IIII", data, offset+4+116+192) 121 | return (countTypes, x, y, height, width, dist, oriAngle, cameraSource) 122 | 123 | def parseRawMeasuresTag( data, offset ): 124 | """tag == NAVDATA_RAW_MEASURES_TAG 125 | typedef struct _navdata_raw_measures_t { 126 | uint16_t tag; 127 | uint16_t size; 128 | 129 | // +12 bytes 130 | uint16_t raw_accs[NB_ACCS]; // filtered accelerometers 131 | int16_t raw_gyros[NB_GYROS]; // filtered gyrometers 132 | int16_t raw_gyros_110[2]; // gyrometers x/y 110 deg/s 133 | uint32_t vbat_raw; // battery voltage raw (mV) 134 | uint16_t us_debut_echo; 135 | uint16_t us_fin_echo; 136 | uint16_t us_association_echo; 137 | uint16_t us_distance_echo; 138 | uint16_t us_courbe_temps; 139 | uint16_t us_courbe_valeur; 140 | uint16_t us_courbe_ref; 141 | uint16_t flag_echo_ini; 142 | // TODO: uint16_t frame_number; // from ARDrone_Magneto 143 | uint16_t nb_echo; 144 | uint32_t sum_echo; 145 | int32_t alt_temp_raw; 146 | int16_t gradient; 147 | }_ATTRIBUTE_PACKED_ navdata_raw_measures_t;""" 148 | # 52 bytes = 2*18+3*4=36+12=48 (+4header) 149 | raw = struct.unpack_from("=HHHhhhhhIhhhhhhhhhIih", data, offset+4) 150 | return raw[:3] 151 | 152 | 153 | def parseAltitudeTag( data, offset ): 154 | """ tag == NAVDATA_ALTITUDE_TAG 155 | typedef struct _navdata_altitude_t { 156 | uint16_t tag; 157 | uint16_t size; 158 | 159 | int32_t altitude_vision; 160 | float32_t altitude_vz; 161 | int32_t altitude_ref; 162 | int32_t altitude_raw; 163 | 164 | float32_t obs_accZ; 165 | float32_t obs_alt; 166 | vector31_t obs_x; 167 | uint32_t obs_state; 168 | vector21_t est_vb; 169 | uint32_t est_state ; 170 | 171 | }_ATTRIBUTE_PACKED_ navdata_altitude_t; 172 | """ 173 | # (altVision, altVz, altRef, altRaw) = 174 | return struct.unpack_from("ifiiff", data, offset+4) 175 | 176 | def parseMagnetoTag( data, offset ): 177 | """ tag == NAVDATA_MAGNETO_TAG 178 | typedef struct _navdata_magneto_t { 179 | uint16_t tag; 180 | uint16_t size; 181 | 182 | int16_t mx; 183 | int16_t my; 184 | int16_t mz; 185 | vector31_t magneto_raw; // magneto in the body frame, in mG 186 | vector31_t magneto_rectified; 187 | vector31_t magneto_offset; 188 | float32_t heading_unwrapped; 189 | float32_t heading_gyro_unwrapped; 190 | float32_t heading_fusion_unwrapped; 191 | char magneto_calibration_ok; 192 | uint32_t magneto_state; 193 | float32_t magneto_radius; 194 | float32_t error_mean; 195 | float32_t error_var; 196 | 197 | }_ATTRIBUTE_PACKED_ navdata_magneto_t; 198 | """ 199 | return struct.unpack_from("=hhhffffffffffffcIfff", data, offset+4) 200 | 201 | def parsePressureRawTag( data, offset ): 202 | """ tag == NAVDATA_PRESSURE_RAW_TAG 203 | // split next struc into magneto_navdata_t and pressure_navdata_t 204 | typedef struct _navdata_pressure_raw_t { 205 | uint16_t tag; 206 | uint16_t size; 207 | 208 | int32_t up; 209 | int16_t ut; 210 | int32_t Temperature_meas; 211 | int32_t Pression_meas; 212 | }_ATTRIBUTE_PACKED_ navdata_pressure_raw_t; 213 | """ 214 | return struct.unpack_from("=ihii", data, offset+4) 215 | 216 | def parsePWMTag( data, offset ): 217 | """ tag == NAVDATA_PWM_TAG: 218 | typedef struct _navdata_pwm_t { 219 | uint16_t tag; 220 | uint16_t size; 221 | 222 | uint8_t motor1; 223 | uint8_t motor2; 224 | uint8_t motor3; 225 | uint8_t motor4; 226 | uint8_t sat_motor1; 227 | uint8_t sat_motor2; 228 | uint8_t sat_motor3; 229 | uint8_t sat_motor4; 230 | float32_t gaz_feed_forward; 231 | float32_t gaz_altitude; 232 | float32_t altitude_integral; 233 | float32_t vz_ref; 234 | int32_t u_pitch; 235 | int32_t u_roll; 236 | int32_t u_yaw; 237 | float32_t yaw_u_I; 238 | int32_t u_pitch_planif; 239 | int32_t u_roll_planif; 240 | int32_t u_yaw_planif; 241 | float32_t u_gaz_planif; 242 | uint16_t current_motor1; 243 | uint16_t current_motor2; 244 | uint16_t current_motor3; 245 | uint16_t current_motor4; 246 | //WARNING: new navdata (FC 26/07/2011) 247 | float32_t altitude_prop; 248 | float32_t altitude_der; 249 | }_ATTRIBUTE_PACKED_ navdata_pwm_t; 250 | """ 251 | return struct.unpack_from("=BBBBBBBBffffiiifiiifHHHHff", data, offset+4) 252 | 253 | def parseGPSTag( data, offset ): 254 | # info taken from 255 | # https://github.com/paparazzi/paparazzi/blob/55e3d9d79119f81ed0b11a59487280becf13cf40/sw/airborne/boards/ardrone/at_com.h#L157 256 | """ 257 | //Navdata gps packet 258 | typedef double float64_t; //TODO: Fix this nicely, but this is only used here 259 | typedef float float32_t; //TODO: Fix this nicely, but this is only used here 260 | typedef struct _navdata_gps_t { 261 | uint16_t tag; /*!< Navdata block ('option') identifier */ 262 | uint16_t size; /*!< set this to the size of this structure */ 263 | float64_t lat; /*!< Latitude */ 264 | float64_t lon; /*!< Longitude */ 265 | float64_t elevation; /*!< Elevation */ 266 | float64_t hdop; /*!< hdop */ 267 | int32_t data_available; /*!< When there is data available */ 268 | uint8_t unk_0[8]; 269 | float64_t lat0; /*!< Latitude ??? */ 270 | float64_t lon0; /*!< Longitude ??? */ 271 | float64_t lat_fuse; /*!< Latitude fused */ 272 | float64_t lon_fuse; /*!< Longitude fused */ 273 | uint32_t gps_state; /*!< State of the GPS, still need to figure out */ 274 | uint8_t unk_1[40]; 275 | float64_t vdop; /*!< vdop */ 276 | float64_t pdop; /*!< pdop */ 277 | float32_t speed; /*!< speed */ 278 | uint32_t last_frame_timestamp; /*!< Timestamp from the last frame */ 279 | float32_t degree; /*!< Degree */ 280 | float32_t degree_mag; /*!< Degree of the magnetic */ 281 | uint8_t unk_2[16]; 282 | struct{ 283 | uint8_t sat; 284 | uint8_t cn0; 285 | }channels[12]; 286 | int32_t gps_plugged; /*!< When the gps is plugged */ 287 | uint8_t unk_3[108]; 288 | float64_t gps_time; /*!< The gps time of week */ 289 | uint16_t week; /*!< The gps week */ 290 | uint8_t gps_fix; /*!< The gps fix */ 291 | uint8_t num_sattelites; /*!< Number of sattelites */ 292 | uint8_t unk_4[24]; 293 | float64_t ned_vel_c0; /*!< NED velocity */ 294 | float64_t ned_vel_c1; /*!< NED velocity */ 295 | float64_t ned_vel_c2; /*!< NED velocity */ 296 | float64_t pos_accur_c0; /*!< Position accuracy */ 297 | float64_t pos_accur_c1; /*!< Position accuracy */ 298 | float64_t pos_accur_c2; /*!< Position accuracy */ 299 | float32_t speed_acur; /*!< Speed accuracy */ 300 | float32_t time_acur; /*!< Time accuracy */ 301 | uint8_t unk_5[72]; 302 | float32_t temprature; 303 | float32_t pressure; 304 | } __attribute__ ((packed)) navdata_gps_t; 305 | """ 306 | return struct.unpack_from("=dd", data, offset+4) # minimal (lat, lon) 307 | 308 | 309 | def parseNavData( packet ): 310 | offset = 0 311 | header = struct.unpack_from("IIII", packet, offset) 312 | print "ID", header[2], hex(header[1]), "ACK", header[1] >> 6 & 1, "WDOG", header[1] >> 30 & 1 313 | time,alt,ctrl = None,None,None 314 | offset += struct.calcsize("IIII") 315 | while True: 316 | tag, size = struct.unpack_from("HH", packet, offset) 317 | offset += struct.calcsize("HH") 318 | values = [] 319 | print tag, size 320 | for i in range(size-struct.calcsize("HH")): 321 | values.append(struct.unpack_from("c", packet, offset)[0]) 322 | offset += struct.calcsize("c") 323 | if tag == NAVDATA_CKS_TAG: 324 | break 325 | if tag == NAVDATA_DEMO_TAG: 326 | values = parseDemoTag( "ABCD" + "".join(values), 0 ) 327 | print values 328 | print "CTRL STATE", values['ctrl_state'] 329 | alt = values['altitude'] 330 | ctrl = values['ctrl_state'] 331 | #CTRL_DEFAULT=0, CTRL_INIT=1, CTRL_LANDED=2, CTRL_FLYING=3, CTRL_HOVERING=4, CTRL_TEST=5, 332 | #CTRL_TRANS_TAKEOFF=6, CTRL_TRANS_GOTOFIX=7, CTRL_TRANS_LANDING=8, CTRL_TRANS_LOOPING=9 333 | 334 | if tag == NAVDATA_TIME_TAG: 335 | time = parseTimeTag("ABCD"+"".join(values), 0) 336 | print "TIME\t%f\t%s" % (time, hex(struct.unpack_from("I", "".join(values))[0])) 337 | 338 | if tag == NAVDATA_RAW_MEASURES_TAG: 339 | # 52 bytes = 2*18+3*4=36+12=48 (+4header) 340 | raw = struct.unpack_from("=HHHhhhhhIhhhhhhhhhIih", "".join(values)) 341 | print "RAW", "\t".join( [str(x) for x in raw] ) 342 | print parseRawMeasuresTag( "ABCD"+"".join(values), 0 ) 343 | 344 | if tag == NAVDATA_ALTITUDE_TAG: 345 | (altVision, altVz, altRef, altRaw) = struct.unpack_from("ifii", "".join(values)) 346 | # print "ALT", (altVision, altVz, altRef, altRaw) 347 | 348 | if tag == NAVDATA_TRACKERS_SEND_TAG: 349 | print "TRACKERS", parseTrackersSendTag( "ABCD"+"".join(values), 0 ) 350 | 351 | 352 | if tag == NAVDATA_MAGNETO_TAG: 353 | magneto = parseMagnetoTag("ABCD"+ "".join(values), 0 ) 354 | values = dict(zip(['mx', 'my', 'mz', 355 | 'magneto_raw_x', 'magneto_raw_y', 'magneto_raw_z', 356 | 'magneto_rectified_x', 'magneto_rectified_y', 'magneto_rectified_z', 357 | 'magneto_offset_x', 'magneto_offset_y', 'magneto_offset_z', 358 | 'heading_unwrapped', 'heading_gyro_unwrapped', 'heading_fusion_unwrapped', 359 | 'magneto_calibration_ok', 'magneto_state', 'magneto_radius', 360 | 'error_mean', 'error_var'], magneto)) 361 | # print "compass:\t%d\t%d\t%d\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%f" % tuple([values[x] for x in ['mx', 'my', 'mz', 362 | # 'magneto_raw_x', 'magneto_raw_y', 'magneto_raw_z', 363 | # 'magneto_rectified_x', 'magneto_rectified_y', 'magneto_rectified_z', 364 | # 'magneto_offset_x', 'magneto_offset_y', 'magneto_offset_z', 365 | # ]]) 366 | print "compass:\t"+"\t".join( [str(x) for x in magneto] ) 367 | 368 | if tag == NAVDATA_VISION_DETECT_TAG: 369 | (countTypes, x, y, height, width, dist, oriAngle, cameraSource) = parseVisionDetectTag( "ABCD" + "".join(values), 0 ) 370 | if countTypes[0] > 0: 371 | print "TAG %.3f" % time, countTypes, x[0], y[0], height[0], width[0], dist[0]/100.0, oriAngle[0], cameraSource[1] 372 | else: 373 | print "TAG %.3f" % time, countTypes 374 | if tag == NAVDATA_PWM_TAG: 375 | print "PWM", parsePWMTag( "ABCD" + "".join(values), 0 ) 376 | 377 | if tag == NAVDATA_GPS_TAG: 378 | print "GPS", parseGPSTag( "ABCD" + "".join(values), 0 ) 379 | if time and ctrl != 2: 380 | print "ALT\t%f\t%d\t%d" % (time, ctrl, alt) 381 | return offset 382 | 383 | 384 | if __name__ == "__main__": 385 | if len(sys.argv) < 2: 386 | print __doc__ 387 | sys.exit(2) 388 | 389 | filename = sys.argv[1] 390 | if filename.endswith(".gz"): 391 | packet = gzip.open(filename, "rb").read() 392 | else: 393 | packet = open(filename, "rb").read() 394 | 395 | offset = 0 396 | while offset < len(packet): 397 | #for i in xrange(3): 398 | offset += parseNavData( packet[offset:] ) 399 | 400 | -------------------------------------------------------------------------------- /onboard/parse_navdata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Parse on-board collected binary data from /dev/ttyO1 4 | usage: 5 | ./parse_navdata.py 6 | """ 7 | # ref C-code navdata_measure_t is available at 8 | # https://github.com/paparazzi/paparazzi/blob/master/sw/airborne/boards/ardrone/navdata.h 9 | 10 | import sys 11 | import struct 12 | 13 | NAVDATA_PACKET_SIZE = 60 14 | NAVDATA_START_BYTE = 0x3A 15 | 16 | 17 | def checksum( packet ): 18 | # does not match yet :( 19 | s = 0 20 | while len(packet) > 0: 21 | s += struct.unpack(" NAVDATA_PACKET_SIZE: 28 | packet = data[:NAVDATA_PACKET_SIZE] 29 | assert packet[0] == chr(NAVDATA_START_BYTE) 30 | chSum = struct.unpack("H", packet[58:60])[0] 31 | assert checksum( packet[2:58] ) == chSum, (checksum( packet[2:58] ), chSum) 32 | header, frameNumber = struct.unpack("HH", packet[:4]) 33 | assert header == NAVDATA_START_BYTE 34 | ax,ay,az = struct.unpack("HHH", packet[4:10]) 35 | vx,vy,vz = struct.unpack("hhh", packet[10:16]) 36 | tempAcc, tempGyro = struct.unpack("HH", packet[16:20]) 37 | sonar = struct.unpack("HHHHH", packet[20:30]) 38 | time, value, ref = struct.unpack("HHH", packet[30:36]) 39 | numEcho, sumEcho, gradient = struct.unpack(" 11 | #include 12 | #include 13 | #include // for baud rates and options 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | 21 | #include 22 | 23 | 24 | 25 | int init() 26 | { 27 | int navdata_fd = 0; 28 | 29 | // Check if the FD isn't already initialized 30 | if (navdata_fd <= 0) { 31 | navdata_fd = open("/dev/ttyO1", O_RDWR | O_NOCTTY); //O_NONBLOCK doesn't work 32 | 33 | if (navdata_fd < 0) { 34 | printf("[navdata] Unable to open navdata board connection(/dev/ttyO1)\n"); 35 | //return FALSE; 36 | } 37 | 38 | // Update the settings of the UART connection 39 | fcntl(navdata_fd, F_SETFL, 0); //read calls are non blocking 40 | //set port options 41 | struct termios options; 42 | //Get the current options for the port 43 | tcgetattr(navdata_fd, &options); 44 | //Set the baud rates to 460800 45 | cfsetispeed(&options, B460800); 46 | cfsetospeed(&options, B460800); 47 | 48 | options.c_cflag |= (CLOCAL | CREAD); //Enable the receiver and set local mode 49 | options.c_iflag = 0; //clear input options 50 | options.c_lflag = 0; //clear local options 51 | options.c_oflag &= ~OPOST; //clear output options (raw output) 52 | 53 | //Set the new options for the port 54 | tcsetattr(navdata_fd, TCSANOW, &options); 55 | } 56 | 57 | // Stop acquisition 58 | // navdata_cmd_send(NAVDATA_CMD_STOP); 59 | // return TRUE; 60 | return navdata_fd; 61 | } 62 | 63 | 64 | 65 | /** 66 | * Read from fd even while being interrupted 67 | */ 68 | ssize_t full_read(int fd, uint8_t *buf, size_t count) 69 | { 70 | /* Apologies for illiteracy, but we can't overload |read|.*/ 71 | size_t readed = 0; 72 | 73 | while (readed < count) { 74 | ssize_t n = read(fd, buf + readed, count - readed); 75 | if (n < 0) { 76 | if (errno == EAGAIN || errno == EWOULDBLOCK) { 77 | continue; 78 | } 79 | return n; 80 | } 81 | readed += n; 82 | } 83 | return readed; 84 | } 85 | 86 | 87 | 88 | int main() 89 | { 90 | // First we try to kill the program.elf and its respawner if it is running (done here because initializes first) 91 | system("killall -9 program.elf.respawner.sh; killall -9 program.elf"); 92 | 93 | uint8_t buf[256]; 94 | 95 | int fd = init(); 96 | if( fd == 0 ) 97 | { 98 | fprintf( stderr, "ERROR opening device\n" ); 99 | return -1; 100 | } 101 | int i; 102 | FILE *out = fopen("navdata.bin", "wb"); 103 | for( i = 0; i < 100; i++ ) 104 | { 105 | size_t num = full_read( fd, buf, 256 ); 106 | fwrite( buf, sizeof(uint8_t), num, out ); 107 | } 108 | fclose( out ); 109 | return 0; 110 | } 111 | 112 | -------------------------------------------------------------------------------- /pave.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | re-pack PaVE (Parrot Video Encapsulation) packets 4 | ./pave.py 5 | """ 6 | import sys 7 | import struct 8 | 9 | class PaVE: 10 | def __init__( self ): 11 | self.buf = "" 12 | 13 | def append( self, packet ): 14 | self.buf += packet 15 | 16 | 17 | def extract( self ): 18 | "return single packet (header, payload)" 19 | if not self.buf.startswith("PaVE"): 20 | if "PaVE" in self.buf: 21 | self.buf = self.buf[ self.buf.index("PaVE") : ] 22 | 23 | if len(self.buf) < 4+1+1+2+4: 24 | # at least struct of version and header and payload sizes must be ready 25 | return "","" 26 | 27 | if not self.buf.startswith( "PaVE" ): 28 | return "","" 29 | 30 | version, codec, headerSize, payloadSize = struct.unpack_from("BBHI", self.buf, 4 ) 31 | if len(self.buf) < headerSize + payloadSize: 32 | return "","" 33 | 34 | ret = self.buf[:headerSize], self.buf[headerSize:headerSize+payloadSize] 35 | self.buf = self.buf[headerSize + payloadSize : ] 36 | return ret 37 | 38 | def isIFrame( header ): 39 | "return True if I-Frame" 40 | return struct.unpack_from("B", header, 30)[0] == 1 41 | 42 | def frameEncodedWidth( header ): 43 | return struct.unpack_from("H", header, 12)[0] 44 | 45 | def frameEncodedHeight( header ): 46 | return struct.unpack_from("H", header, 14)[0] 47 | 48 | 49 | def frameNumber( header ): 50 | return struct.unpack_from("I", header, 20)[0] 51 | 52 | def timestamp( header ): 53 | return struct.unpack_from("I", header, 24)[0] 54 | 55 | def correctTimePeriod( value, ref, period=2048 ): 56 | "align drone time with video time" 57 | dt = value-ref 58 | while dt <= -period/2: 59 | dt += period 60 | while dt > period/2: 61 | dt -= period 62 | return ref+dt 63 | 64 | if __name__ == "__main__": 65 | if len(sys.argv) < 3: 66 | print __doc__ 67 | sys.exit(2) 68 | 69 | filename, size = sys.argv[1], int(sys.argv[2]) 70 | merger = PaVE() 71 | f = open( filename, "rb" ) 72 | tmp = f.read( size ) 73 | while tmp != "": 74 | merger.append( tmp ) 75 | header, payload = merger.extract() 76 | while payload != "": 77 | print isIFrame(header), frameNumber(header), timestamp(header), len(payload) 78 | header, payload = merger.extract() 79 | tmp = f.read( size ) 80 | 81 | -------------------------------------------------------------------------------- /pave_test.py: -------------------------------------------------------------------------------- 1 | from pave import * 2 | import unittest 3 | 4 | def buildPacket( payload ): 5 | return "PaVE" + struct.pack("BBHI", 3, 4, 12, len(payload)), payload 6 | 7 | 8 | class PaVETest( unittest.TestCase ): 9 | def testEmpty( self ): 10 | p = PaVE() 11 | p.append( "" ) 12 | self.assertEqual( p.extract(), ("","") ) 13 | 14 | def testDummyCompletePacket( self ): 15 | p = PaVE() 16 | h,s = buildPacket( "ABCDE" ) 17 | p.append( h+s ) 18 | self.assertEqual( p.extract(), (h,s) ) 19 | self.assertEqual( p.extract(), ("","") ) # nothing is left 20 | 21 | def testPartialPacket( self ): 22 | p = PaVE() 23 | h,s = buildPacket( "ABCDE" ) 24 | p.append( (h+s)[:5] ) 25 | self.assertEqual( p.extract(), ("","") ) # not ready yet 26 | p.append( (h+s)[5:] ) 27 | self.assertEqual( p.extract(), (h,s) ) 28 | self.assertEqual( p.extract(), ("","") ) # nothing is left 29 | 30 | def testCorruptedStart( self ): 31 | p = PaVE() 32 | h,s = buildPacket( "There will be corrupted few bytes in front ..." ) 33 | p.append( "blabla" + h + s ) 34 | self.assertEqual( p.extract(), (h,s) ) # skip non-PaVE part 35 | 36 | def testTwoPackes( self ): 37 | p = PaVE() 38 | h1,s1 = buildPacket( "First packet" ) 39 | h2,s2 = buildPacket( "Second packet" ) 40 | p. append( h1 + s1 + h2 + s2 ) 41 | self.assertEqual( p.extract(), (h1,s1) ) 42 | self.assertEqual( p.extract(), (h2,s2) ) 43 | 44 | def testBadBytesBetweenTwoPackes( self ): 45 | p = PaVE() 46 | h1,s1 = buildPacket( "First packet" ) 47 | h2,s2 = buildPacket( "Second packet" ) 48 | p. append( h1 + s1 + "bad bytes" + h2 + s2 ) 49 | self.assertEqual( p.extract(), (h1,s1) ) 50 | self.assertEqual( p.extract(), (h2,s2) ) 51 | 52 | def testCorrectTimePeriod( self ): 53 | self.assertAlmostEqual( correctTimePeriod( 0.0, ref=0.0 ), 0.0, 5 ) 54 | self.assertAlmostEqual( correctTimePeriod( 7806.415, ref=1662.753082 ), 1662.753082-0.338082, 5 ) 55 | 56 | if __name__ == "__main__": 57 | unittest.main() 58 | 59 | -------------------------------------------------------------------------------- /paveproxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Socket for filtering PaVE (Parrot Video Encapsulation) packets 4 | ./paveproxy.py 5 | """ 6 | import sys 7 | import struct 8 | import socket 9 | 10 | from threading import Thread,Event,Lock 11 | from pave import PaVE 12 | 13 | HOST = 'localhost' # access URL for cv2.VideoCapture 14 | PORT = 50007 15 | BUFFER_SIZE = 1024 16 | 17 | class PaVEProxy( Thread ): 18 | def __init__( self, readCbk ): 19 | Thread.__init__( self ) 20 | self.setDaemon(True) 21 | self.lock = Lock() 22 | self.shouldIRun = Event() 23 | self.shouldIRun.set() 24 | self.serverSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 25 | self.socket, self.addr = None, None 26 | self.readCbk = readCbk 27 | self.pave = PaVE() 28 | self.start() 29 | 30 | def run( self ): 31 | "wait only for the first client - delayed binding" 32 | self.serverSocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 33 | self.serverSocket.bind((HOST, PORT)) 34 | self.serverSocket.listen(1) 35 | self.socket, self.addr = self.serverSocket.accept() 36 | while self.shouldIRun.isSet(): 37 | self.pave.append( self.readCbk( BUFFER_SIZE ) ) 38 | header,payload = self.pave.extract() 39 | if payload: 40 | self.socket.send( payload ) 41 | 42 | def term( self ): 43 | self.shouldIRun.clear() 44 | if self.socket==None: 45 | print "KILLING run()" 46 | tmpSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 47 | tmpSocket.connect((HOST, PORT)) 48 | tmpSocket.close() 49 | print "DONE" 50 | 51 | 52 | if __name__ == "__main__": 53 | if len(sys.argv) < 2: 54 | print __doc__ 55 | sys.exit(2) 56 | 57 | -------------------------------------------------------------------------------- /paveproxy_test.py: -------------------------------------------------------------------------------- 1 | from paveproxy import * 2 | from pave_test import buildPacket 3 | import unittest 4 | from StringIO import StringIO 5 | 6 | 7 | class PaVEProxyTest( unittest.TestCase ): 8 | def testUsage( self ): 9 | header,payload = buildPacket("Hello World!") 10 | pp = PaVEProxy( StringIO( header+payload ).read ) 11 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 12 | s.connect((HOST, PORT)) 13 | s.settimeout( 0.1 ) 14 | data = None 15 | try: 16 | data = s.recv(1024) 17 | except socket.timeout: 18 | pass 19 | self.assertEqual( data, payload ) 20 | pp.term() 21 | 22 | 23 | 24 | 25 | 26 | if __name__ == "__main__": 27 | unittest.main() 28 | 29 | -------------------------------------------------------------------------------- /pose.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pose - position and orientation 3 | """ 4 | import math 5 | 6 | class Pose: 7 | def __init__( self, x=0.0, y=0.0, heading=0.0 ): 8 | self.x, self.y, self.heading = x, y, heading 9 | 10 | def __str__( self ): 11 | return "(%.2f, %.2f, %d)" % (self.x, self.y, math.degrees(self.heading)) 12 | 13 | def add( self, pose ): 14 | x = self.x + pose.x * math.cos( self.heading ) - pose.y * math.sin( self.heading ) 15 | y = self.y + pose.x * math.sin( self.heading ) + pose.y * math.cos( self.heading ) 16 | heading = self.heading + pose.heading 17 | return Pose(x, y, heading) 18 | 19 | def sub( self, pose ): 20 | heading = self.heading - pose.heading 21 | dx = self.x - pose.x 22 | dy = self.y - pose.y 23 | x = dx * math.cos( -pose.heading ) - dy * math.sin( -pose.heading ) 24 | y = dx * math.sin( -pose.heading ) + dy * math.cos( -pose.heading ) 25 | return Pose(x, y, heading) 26 | 27 | def coord( self ): 28 | return (self.x, self.y) 29 | 30 | def __iter__( self ): 31 | return self.iterator() 32 | 33 | def iterator( self ): 34 | yield self.x 35 | yield self.y 36 | yield self.heading 37 | 38 | -------------------------------------------------------------------------------- /pose_test.py: -------------------------------------------------------------------------------- 1 | from pose import * 2 | import unittest 3 | 4 | 5 | class PoseTest( unittest.TestCase ): 6 | def testStr( self ): 7 | self.assertEqual( str(Pose()), "(0.00, 0.00, 0)" ) 8 | self.assertEqual( str(Pose(1.234, 5, math.radians(45))), "(1.23, 5.00, 45)" ) 9 | 10 | def testAdd( self ): 11 | self.assertEqual( str(Pose(1,2,math.radians(90)).add( Pose(3,4,math.radians(45))) ), 12 | str(Pose( 1-4, 2+3, math.radians(90+45) )) ) 13 | 14 | def testSub( self ): 15 | self.assertEqual( str(Pose(1,2,3)), str(Pose(1,2,3).sub(Pose(0,0,0))) ) 16 | self.assertEqual( str(Pose(10,1,math.radians(45)).sub(Pose(9,2,0))), str(Pose(1,-1,math.radians(45))) ) 17 | A = Pose(1,2,0.3) # the base coordinate system 18 | B = Pose(4,5,0.6) 19 | C = A.add(B) 20 | self.assertEqual( str(B), str(C.sub(A)) ) 21 | self.assertNotEqual( str(A), str(C.sub(B)) ) # not symertic operation, like for matrices 22 | 23 | def testTuple( self ): 24 | self.assertEqual( tuple(Pose(1,2,3)), (1,2,3) ) 25 | 26 | def testCoord( self ): 27 | self.assertEqual( Pose(1,2,3).coord(), (1,2) ) 28 | 29 | def testStripDiffBug( self ): 30 | A = Pose(1.03, -3.39, math.radians(-18)).add(Pose(-0.09, 0.04, math.radians(-14))) 31 | B = Pose(1.34, -3.51, math.radians(-21)).add(Pose(-0.03, -0.02, math.radians(-19))) 32 | self.assertEqual( str(B.sub(A)), "(0.40, 0.02, -8)" ) # orig: (0.82, -0.10, -7) 33 | 34 | if __name__ == "__main__": 35 | unittest.main() 36 | 37 | -------------------------------------------------------------------------------- /ro.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | RoboOrienteering libraries 4 | usage: 5 | ro.py 6 | """ 7 | import sys 8 | import math 9 | from line import distance 10 | from route import Convertor 11 | 12 | def angleTo( f, t ): 13 | if math.fabs(f[0]-t[0]) < 0.0001 and math.fabs(f[1]-t[1]) < 0.0001: 14 | return 0 15 | return math.atan2( t[1]-f[1], t[0]-f[0] ) 16 | 17 | 18 | def loadWaypoints( filename ): 19 | ret = [] 20 | for line in open(filename): 21 | s = line.split() 22 | if len(s) == 3: 23 | print s 24 | ret.append( (float(s[2]), float(s[1]) ) ) 25 | return ret 26 | 27 | def waypoints2dirDist( waypoints ): 28 | "convert GPS waypoints to desired heading and distance" 29 | conv = Convertor( waypoints[0] ) 30 | ret = [] 31 | for start,goal in zip(waypoints[:-1],waypoints[1:]): 32 | ret.append( ( 33 | angleTo( conv.geo2planar(start), conv.geo2planar(goal) ), 34 | distance( conv.geo2planar(start), conv.geo2planar(goal) ) 35 | ) ) 36 | return ret 37 | 38 | if __name__ == "__main__": 39 | if len(sys.argv) < 2: 40 | print __doc__ 41 | sys.exit(2) 42 | filename = sys.argv[1] 43 | for dirDist in waypoints2dirDist( loadWaypoints( filename ) ): 44 | print math.degrees(dirDist[0]), dirDist[1] 45 | 46 | -------------------------------------------------------------------------------- /route.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Route with geo2plan and plan2geo conversions + snap position. 4 | # + KML support './route.py ' 5 | ./route.py 6 | """ 7 | 8 | import math 9 | from line import * 10 | 11 | EARTHRADIUS = 6.380935276e+06 12 | SUIDARHTRAE = 1.567168380e-07 13 | DEGRAD = 1.7453292519943295769236907684886e-02 14 | RADDEG = 5.7295779513082320876798154814105e+01 15 | 16 | 17 | def loadKML( filename ): 18 | "load coordinates sections from KML file" 19 | from xml.dom import minidom 20 | xmldoc = minidom.parse(filename) 21 | pts = [] 22 | for sec in xmldoc.getElementsByTagName('coordinates'): 23 | coordTxt = sec.firstChild.data.encode('ascii') 24 | for rec in coordTxt.split(): 25 | pts.append( tuple([float(x) for x in rec.split(',')][:2]) ) 26 | return pts 27 | 28 | def saveKML( pts, filename, description, color='7fff00ff', width = 10 ): 29 | import rndf2kml 30 | outFile = open( filename, "w" ) 31 | rndf2kml.writeBegin( outFile, color=color, width = width ) 32 | rndf2kml.writePolyLine( outFile, description, pts ) 33 | rndf2kml.writeEnd( outFile ) 34 | outFile.close() 35 | 36 | def loadLatLonPtsFromStr( str ): 37 | lines = str.split("\n") 38 | pts = [] 39 | for line in lines: 40 | if len(line) > 0 and line[0] != '#': 41 | assert( len(line.split()) == 2 ) 42 | lat, lon = line.split() 43 | pts.append( (float(lon), float(lat)) ) 44 | return pts 45 | 46 | def loadLatLonPts( filename ): 47 | file = open( filename, "r" ) 48 | return loadLatLonPtsFromStr( file.read() ) 49 | 50 | def saveLatLonPts( pts, filename, note = None ): 51 | file = open( filename, "w" ) 52 | if note: 53 | file.write( "# " + note + "\n" ) 54 | for p in pts: 55 | file.write( "%f %f\n" % (p[1], p[0]) ) 56 | file.close() 57 | 58 | 59 | class Convertor: 60 | "convert lat/lon and planar coordinates" 61 | def __init__( self, refPoint = (16.60906, 49.2060633333) ): 62 | self._cosMulti = max(0.001, EARTHRADIUS*math.cos(refPoint[1]*DEGRAD)); 63 | self.refPoint = refPoint 64 | 65 | def geo2planar( self, pos ): 66 | return ((pos[0] - self.refPoint[0]) * DEGRAD * self._cosMulti, (pos[1] - self.refPoint[1]) * DEGRAD * EARTHRADIUS) 67 | 68 | def planar2geo( self, pos ): 69 | return (self.refPoint[0] + pos[0] * RADDEG/ self._cosMulti, self.refPoint[1] + pos[1] * RADDEG * SUIDARHTRAE) 70 | 71 | 72 | class DummyConvertor: 73 | "convert 1:1 for coordinates already in meters" 74 | def geo2planar( self, pos ): 75 | return pos 76 | 77 | def planar2geo( self, pos ): 78 | return pos 79 | 80 | 81 | class Route: 82 | "defined route for navigation" 83 | def __init__( self, pts = [], conv = None, isLoop = None ): 84 | if conv == None: 85 | conv = Convertor() 86 | self.conv = conv 87 | self.pts = [ self.conv.geo2planar(p) for p in pts ] 88 | if isLoop == None and len(self.pts) > 1: 89 | isLoop = (distance(self.pts[0], self.pts[-1]) < 5.0) 90 | self.isLoop = isLoop 91 | 92 | def length( self ): 93 | if not self.pts: 94 | return None 95 | sum = 0 96 | prev = self.pts[0] 97 | for p in self.pts: 98 | sum += distance( prev, p ) 99 | prev = p 100 | return sum 101 | 102 | def findNearestEx( self, pos ): 103 | if not self.pts: 104 | return None 105 | ref = self.conv.geo2planar( pos ) 106 | index = 0 107 | best = prev = self.pts[0] 108 | bestDist = distance( ref, best ) 109 | bestIndex = 0 110 | for p in self.pts[1:]: 111 | pos, dist, type = Line(prev, p).nearest( ref ) 112 | if dist < bestDist: 113 | bestDist = dist 114 | best = pos 115 | if type < 0: 116 | bestIndex = -index -1 117 | else: 118 | bestIndex = index + type 119 | prev = p 120 | index += 1 121 | return self.conv.planar2geo( best ), bestDist, bestIndex 122 | 123 | def findNearest( self, pos ): 124 | # filtered version, returning only (x,y) coordinate 125 | near = self.findNearestEx( pos ) 126 | if near: 127 | return near[0] # position 128 | return None 129 | 130 | def routeSplit( self, pos ): 131 | snap, dist, index = self.findNearestEx( pos ) 132 | if index < 0: 133 | # inside line -> split line 134 | first = [self.conv.planar2geo(p) for p in self.pts[:-index]] + [snap] 135 | second = [snap]+[self.conv.planar2geo(p) for p in self.pts[-index:]] 136 | else: 137 | # some end point 138 | first = [self.conv.planar2geo(p) for p in self.pts[:index+1]] 139 | second = [self.conv.planar2geo(p) for p in self.pts[index:]] 140 | return first, second 141 | 142 | def pointAtDist( self, dist ): 143 | # return point on route in given distance 144 | if len(self.pts) < 1: 145 | return None 146 | return self.conv.planar2geo( pointAtPolyLineDist( self.pts, dist ) ) 147 | 148 | def turnAngleAt( self, pos, radius = 2.0 ): 149 | first, second = self.routeSplit( pos ) 150 | assert( len(first) >= 1 ) 151 | assert( len(second) >= 1 ) 152 | if self.isLoop: 153 | geoPts = [self.conv.planar2geo(x) for x in self.pts] 154 | first = geoPts + first 155 | second = second + geoPts 156 | if len(first) == 1 or len(second) == 1: 157 | return 0.0 # start or end of route 158 | nextPos = pointAtPolyLineDist( [ self.conv.geo2planar(x) for x in second ], radius ) 159 | currPos = self.conv.geo2planar( second[0] ) 160 | first.reverse() 161 | prevPos = pointAtPolyLineDist( [ self.conv.geo2planar(x) for x in first ], radius ) 162 | toNext = math.atan2( nextPos[1]-currPos[1], nextPos[0]-currPos[0] ) 163 | toPrev = math.atan2( currPos[1]-prevPos[1], currPos[0]-prevPos[0] ) 164 | ret = toNext - toPrev 165 | if ret < -math.pi: 166 | ret += 2*math.pi 167 | if ret > math.pi: 168 | ret -= 2*math.pi 169 | return ret 170 | 171 | 172 | if __name__ == "__main__": 173 | import sys 174 | if len( sys.argv ) < 3: 175 | print __doc__ 176 | sys.exit(-1) 177 | 178 | pts = loadKML( sys.argv[1] ) 179 | saveLatLonPts( pts, sys.argv[2], note = "Converted from "+sys.argv[1] ) 180 | 181 | """ 182 | if len( sys.argv ) < 4: 183 | print __doc__ 184 | else: 185 | route = Route( loadKML( sys.argv[1] ) ) 186 | pts = loadKML( sys.argv[2] ) 187 | snap = [ route.findNearest( p ) for p in pts ] 188 | saveKML( snap, sys.argv[3], 'Test of snapped output', color='7f0000ff', width = 10 ) 189 | """ 190 | -------------------------------------------------------------------------------- /rr_drone.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Robotem Rovne competition in Pisek. See www.kufr.cz 4 | usage: 5 | ./rr_drone.py 6 | """ 7 | # based on airrace_drone.py 8 | import sys 9 | import datetime 10 | import multiprocessing 11 | import cv2 12 | import math 13 | import numpy as np 14 | 15 | from pave import PaVE, isIFrame, frameNumber, timestamp, correctTimePeriod, frameEncodedWidth, frameEncodedHeight 16 | 17 | from sourcelogger import SourceLogger 18 | from ardrone2 import ARDrone2, ManualControlException, manualControl, normalizeAnglePIPI, distance 19 | import viewlog 20 | from line import Line 21 | from pose import Pose 22 | 23 | from airrace import main as imgmain # image debugging TODO move to launcher 24 | from airrace import saveIndexedImage, FRAMES_PER_INDEX 25 | 26 | import cvideo 27 | 28 | MAX_ALLOWED_SPEED = 0.8 29 | MAX_ALLOWED_VIDEO_DELAY = 2.0 # in seconds, then it will wait (desiredSpeed = 0.0) 30 | 31 | MIN_ROAD_AREA = 18000 # filter out small areas 32 | 33 | ROAD_WIDTH_MIN = 1.0 # RR-Pisek 2.5 34 | ROAD_WIDTH_MAX = 5.0 # RR-Pisek 6.0 35 | ROAD_WIDTH_VARIANCE = 2.0 36 | 37 | ROAD_WIDTH = 2.6 # garden, 4.35 # expected width in Pisek (was 3.0) 38 | DEFAULT_REF_LINE = None # TODO fill proper Line((0,0), magicEnd) 39 | MAX_REF_LINE_ANGLE = math.radians(20) 40 | MAX_REF_LINE_DIST = 2.0 41 | 42 | g_mser = None 43 | 44 | def approx4pts( poly ): 45 | ret = cv2.approxPolyDP( poly, epsilon = 30, closed=True ) 46 | if len(ret) == 4: 47 | return ret 48 | if len(ret) < 4: 49 | return cv2.approxPolyDP( poly, epsilon = 20, closed=True ) 50 | return cv2.approxPolyDP( poly, epsilon = 50, closed=True ) 51 | 52 | def trapezoid2line( t ): 53 | if len(t) == 4: 54 | if (max( t[0][1], t[1][1] ) < min (t[2][1], t[3][1])) or (min( t[0][1], t[1][1] ) > max (t[2][1], t[3][1])) : 55 | begin,end = [((t[0][0]+t[1][0])/2, (t[0][1]+t[1][1])/2), ((t[2][0]+t[3][0])/2, (t[2][1]+t[3][1])/2)] 56 | else: 57 | begin,end = [((t[0][0]+t[3][0])/2, (t[0][1]+t[3][1])/2), ((t[2][0]+t[1][0])/2, (t[2][1]+t[1][1])/2)] 58 | if begin[1] < end[1]: 59 | begin,end = end,begin 60 | return [begin,end] 61 | 62 | def rect2BLBRTRTL( rect ): 63 | if len(rect) == 4: 64 | s = [(x,y) for y,x in sorted( [(y,x) for x,y in rect] )] # sort be Y 65 | if s[0][0] < s[1][0]: 66 | TL,TR = s[:2] 67 | else: 68 | TR,TL = s[:2] 69 | if s[2][0] < s[3][0]: 70 | BL,BR = s[2:] 71 | else: 72 | BR,BL = s[2:] 73 | return BL,BR,TR,TL 74 | 75 | def drawArrow( img, pt1, pt2, color, thickness=1 ): 76 | # inspiration from http://stackoverflow.com/questions/10161351/opencv-how-to-plot-velocity-vectors-as-arrows-in-using-single-static-image 77 | cv2.line(img, pt1, pt2, color, thickness) 78 | l = math.hypot(pt1[0]-pt2[0], pt1[1]-pt2[1]) 79 | if l > 0.1: 80 | spinSize = l/4. 81 | spinAngle = math.radians(15) 82 | angle = math.atan2(pt1[1]-pt2[1], pt1[0]-pt2[0]) 83 | pt = int(pt2[0] + spinSize * math.cos(angle+spinAngle)), int(pt2[1] + spinSize * math.sin(angle+spinAngle)) 84 | cv2.line(img, pt, pt2, color, thickness) 85 | pt = int(pt2[0] + spinSize * math.cos(angle-spinAngle)), int(pt2[1] + spinSize * math.sin(angle-spinAngle)) 86 | cv2.line(img, pt, pt2, color, thickness) 87 | 88 | def processFrame( frame, debug=False ): 89 | global g_mser 90 | result = [] 91 | allHulls = [] 92 | selectedHulls = [] 93 | # for stripOffset in [ -100, 0, +100, +200 ]: 94 | for stripOffset in [ +100 ]: 95 | midY = frame.shape[0]/2+stripOffset 96 | stripWidth = 120 97 | if g_mser == None: 98 | g_mser = cv2.MSER( _delta = 10, _min_area=100, _max_area=stripWidth*1000 ) 99 | imgStrip = frame[ midY:midY+stripWidth, 0:frame.shape[1] ] 100 | b,g,r = cv2.split( imgStrip ) 101 | gray = b 102 | contours = g_mser.detect(gray, None) 103 | hulls = [] 104 | selected = None 105 | for cnt in contours: 106 | (x1,y1),(x2,y2) = np.amin( cnt, axis=0 ), np.amax( cnt, axis=0 ) 107 | if y1 == 0 and y2 == stripWidth-1: # i.e. whole strip 108 | if x1 > 0 and x2 < frame.shape[1]-1: 109 | hull = cv2.convexHull(cnt.reshape(-1, 1, 2)) 110 | for h in hull: 111 | h[0][1] += midY 112 | hulls.append( hull ) 113 | # select the one with the smallest area 114 | if len(cnt) >= MIN_ROAD_AREA: 115 | rect = rect2BLBRTRTL([(a[0][0],a[0][1]) for a in approx4pts( hull )] ) 116 | if rect != None: 117 | bl,br,tr,tl = rect 118 | tmpInt = Line(bl,tl).intersect( Line(br,tr) ) 119 | if tmpInt != None: 120 | p = tuple([int(x) for x in tmpInt]) 121 | if br[0]-bl[0] > tr[0]-tl[0] and p[1] > 0 and p[1] < frame.shape[0]: 122 | # make sure that direction is forward and in the image 123 | result.append( rect ) 124 | # if selected == None or selected[0] > len(cnt): 125 | # selected = len(cnt), hull, rect 126 | # if selected: 127 | # result.append( selected[2] ) 128 | if debug: 129 | allHulls.extend( hulls ) 130 | if selected != None: 131 | selectedHulls.append( selected[1] ) 132 | 133 | if debug: 134 | cv2.polylines(frame, allHulls, 2, (0, 255, 0), 2) 135 | if len(selectedHulls) > 0: 136 | cv2.polylines(frame, selectedHulls, 2, (0, 0, 0), 2) 137 | # for trapezoid in result: 138 | # cv2.drawContours( frame,[np.int0(trapezoid)],0,(255,0,0),2) 139 | # for trapezoid in result: 140 | # bl,br,tr,tl = trapezoid 141 | # p = tuple([int(x) for x in Line(bl,tl).intersect( Line(br,tr) )]) 142 | # cv2.circle( frame, p, 10, (0,255,128), 3 ) 143 | # navLine = trapezoid2line( trapezoid ) 144 | # if navLine: 145 | # drawArrow(frame, navLine[0], navLine[1], (0,0,255), 4) 146 | cv2.imshow('image', frame) 147 | saveIndexedImage( frame ) 148 | return result 149 | 150 | def timeName( prefix, ext ): 151 | dt = datetime.datetime.now() 152 | filename = prefix + dt.strftime("%y%m%d_%H%M%S.") + ext 153 | return filename 154 | 155 | g_pave = None 156 | g_processingEnabled = True # shared multiprocess variable (!) 157 | 158 | g_img = None 159 | 160 | 161 | def wrapper( packet ): 162 | global g_pave 163 | global g_img 164 | if g_pave == None: 165 | g_pave = PaVE() 166 | cvideo.init() 167 | g_img = np.zeros([720,1280,3], dtype=np.uint8) 168 | g_pave.append( packet ) 169 | header,payload = g_pave.extract() 170 | while payload: 171 | if isIFrame( header ): 172 | w,h = frameEncodedWidth(header), frameEncodedHeight(header) 173 | if g_img.shape[0] != h or g_img.shape[1] != w: 174 | print g_img.shape, (w,h) 175 | g_img = np.zeros([h,w,3], dtype=np.uint8) 176 | ret = cvideo.frame( g_img, isIFrame(header) and 1 or 0, payload ) 177 | frame = g_img 178 | assert ret 179 | if ret: 180 | result = None 181 | if g_processingEnabled: 182 | result = processFrame( frame, debug=False ) 183 | return (frameNumber( header ), timestamp(header)), result 184 | header,payload = g_pave.extract() 185 | 186 | g_queueResults = multiprocessing.Queue() 187 | 188 | def getOrNone(): 189 | if g_queueResults.empty(): 190 | return None 191 | return g_queueResults.get() 192 | 193 | 194 | def project2plane( imgCoord, coord, height, heading, angleFB, angleLR ): 195 | FOW = math.radians(70) 196 | EPS = 0.0001 197 | x,y = imgCoord[0]-1280/2, 720/2-imgCoord[1] 198 | angleLR = -angleLR # we want to compensate the turn 199 | x,y = x*math.cos(angleLR)-y*math.sin(angleLR), y*math.cos(angleLR)+x*math.sin(angleLR) 200 | h = -x/1280*FOW + heading 201 | tilt = y/1280*FOW + angleFB 202 | if tilt > -EPS: 203 | return None # close to 0.0 AND projection behind drone 204 | dist = height/math.tan(-tilt) 205 | return (coord[0]+math.cos(h)*dist , coord[1]+math.sin(h)*dist) 206 | 207 | 208 | def roadWidthArr( roadPts ): 209 | "points are expected to be 4 already sorted by BLBRTRRL image order" 210 | assert len(roadPts) == 4, roadPts 211 | bl,br,tr,tl = roadPts 212 | lineL = Line(bl,tl) 213 | lineR = Line(br,tr) 214 | return abs(lineR.signedDistance(bl)),abs(lineL.signedDistance(br)),abs(lineL.signedDistance(tr)),abs(lineR.signedDistance(tl)) 215 | 216 | def roadWidth( roadPts ): 217 | return roadWidthArr( roadPts)[0] 218 | 219 | def chooseBestWidth( rects, coord, height, heading, angleFB, angleLR, debugImg=None, color=None ): 220 | best = None 221 | for rect in rects: 222 | navLine = trapezoid2line( rect ) 223 | if navLine: 224 | start = project2plane( navLine[0], coord, height, heading, angleFB, angleLR ) 225 | end = project2plane( navLine[1], coord, height, heading, angleFB, angleLR ) 226 | if start and end: 227 | preRefLine = Line(start, end) 228 | obst = [] 229 | for p in rect2BLBRTRTL(rect): 230 | p2d = project2plane( p, coord, height, heading, angleFB, angleLR ) 231 | if p2d: 232 | obst.append( p2d ) 233 | print "COORD", p2d 234 | if len( obst ) == 4: 235 | widthArr = roadWidthArr( obst ) 236 | print "(%.1f %.1f %.1f %.1f)" % widthArr, 237 | widthMin,widthMax = min(widthArr), max(widthArr) 238 | if widthMin >= ROAD_WIDTH_MIN and widthMax <= ROAD_WIDTH_MAX and widthMax-widthMin <= ROAD_WIDTH_VARIANCE: 239 | w = (widthArr[0]+widthArr[1])/2.0 240 | if best == None or abs(best[0]-ROAD_WIDTH) > abs(w-ROAD_WIDTH): 241 | best = w,preRefLine,rect 242 | if best == None: 243 | print " Width: None" 244 | return None 245 | print " Width %.2f" % best[0] 246 | if debugImg != None: 247 | cv2.drawContours( debugImg, [np.int0(best[2])],0,(255,0,0),2 ) 248 | navLine = trapezoid2line( best[2] ) 249 | if navLine: 250 | if color == None: 251 | color = (0,0,255) 252 | drawArrow( debugImg, navLine[0], navLine[1], color, 4) 253 | 254 | return best[1] 255 | 256 | class RobotemRovneDrone( ARDrone2 ): 257 | def __init__( self, replayLog=None, speed = 0.2, skipConfigure=False, metaLog=None, console=None ): 258 | self.loggedVideoResult = None 259 | self.lastImageResult = None 260 | self.videoHighResolution = True 261 | ARDrone2.__init__( self, replayLog, speed, skipConfigure, metaLog, console ) 262 | if replayLog == None: 263 | name = timeName( "logs/src_cv2_", "log" ) 264 | metaLog.write("cv2: "+name+'\n' ) 265 | self.loggedVideoResult = SourceLogger( getOrNone, name ).get 266 | self.startVideo( wrapper, g_queueResults, record=True, highResolution=self.videoHighResolution ) 267 | else: 268 | assert metaLog 269 | self.loggedVideoResult = SourceLogger( None, metaLog.getLog("cv2:") ).get 270 | self.startVideo( record=True, highResolution=self.videoHighResolution ) 271 | 272 | def update( self, cmd="AT*COMWDG=%i,\r" ): 273 | ARDrone2.update( self, cmd ) 274 | if self.loggedVideoResult != None: 275 | self.lastImageResult = self.loggedVideoResult() 276 | 277 | 278 | def downloadOldVideo( drone, timeout ): 279 | "download video if it is delayed more than MAX_VIDEO_DELAY" 280 | global g_processingEnabled 281 | restoreProcessing = g_processingEnabled 282 | print "Waiting for video to start ..." 283 | startTime = drone.time 284 | while drone.time < startTime + timeout: 285 | if drone.lastImageResult != None: 286 | (frameNumber, timestamp), rects = drone.lastImageResult 287 | videoTime = correctTimePeriod( timestamp/1000., ref=drone.time ) 288 | videoDelay = drone.time - videoTime 289 | print videoDelay 290 | if videoDelay < MAX_ALLOWED_VIDEO_DELAY: 291 | print "done" 292 | break 293 | g_processingEnabled = False 294 | drone.update() 295 | g_processingEnabled = restoreProcessing 296 | 297 | def competeRobotemRovne( drone, desiredHeight = 1.5 ): 298 | drone.speed = 0.1 299 | maxVideoDelay = 0.0 300 | maxControlGap = 0.0 301 | desiredSpeed = MAX_ALLOWED_SPEED 302 | refLine = DEFAULT_REF_LINE 303 | 304 | try: 305 | drone.wait(1.0) 306 | drone.setVideoChannel( front=True ) 307 | downloadOldVideo( drone, timeout=20.0 ) 308 | drone.takeoff() 309 | poseHistory = [] 310 | startTime = drone.time 311 | while drone.time < startTime + 1.0: 312 | drone.update("AT*PCMD=%i,0,0,0,0,0\r") # drone.hover(1.0) 313 | # TODO sonar/vision altitude 314 | poseHistory.append( (drone.time, (drone.coord[0], drone.coord[1], drone.heading), (drone.angleFB, drone.angleLR), None) ) 315 | magnetoOnStart = drone.magneto[:3] 316 | print "NAVI-ON" 317 | startTime = drone.time 318 | sx,sy,sz,sa = 0,0,0,0 319 | lastUpdate = None 320 | while drone.time < startTime + 600.0: 321 | 322 | altitude = desiredHeight 323 | if drone.altitudeData != None: 324 | altVision = drone.altitudeData[0]/1000.0 325 | altSonar = drone.altitudeData[3]/1000.0 326 | altitude = (altSonar+altVision)/2.0 327 | # TODO selection based on history? panic when min/max too far?? 328 | if abs(altSonar-altVision) > 0.5: 329 | print altSonar, altVision 330 | altitude = max( altSonar, altVision ) # sonar is 0.0 sometimes (no ECHO) 331 | 332 | sz = max( -0.2, min( 0.2, desiredHeight - altitude )) 333 | if altitude > 2.5: 334 | # wind and "out of control" 335 | sz = max( -0.5, min( 0.5, desiredHeight - altitude )) 336 | 337 | sx = max( 0, min( drone.speed, desiredSpeed - drone.vx )) 338 | 339 | if drone.lastImageResult: 340 | lastUpdate = drone.time 341 | assert len( drone.lastImageResult ) == 2 and len( drone.lastImageResult[0] ) == 2, drone.lastImageResult 342 | (frameNumber, timestamp), rects = drone.lastImageResult 343 | viewlog.dumpVideoFrame( frameNumber, timestamp ) 344 | 345 | # keep history small 346 | videoTime = correctTimePeriod( timestamp/1000., ref=drone.time ) 347 | videoDelay = drone.time - videoTime 348 | if videoDelay > 1.0: 349 | print "!DANGER! - video delay", videoDelay 350 | maxVideoDelay = max( videoDelay, maxVideoDelay ) 351 | toDel = 0 352 | for oldTime, oldPose, oldAngles, oldAltitude in poseHistory: 353 | toDel += 1 354 | if oldTime >= videoTime: 355 | break 356 | poseHistory = poseHistory[:toDel] 357 | 358 | tiltCompensation = Pose(desiredHeight*oldAngles[0], desiredHeight*oldAngles[1], 0) # TODO real height? 359 | print "FRAME", frameNumber/15, "[%.1f %.1f]" % (math.degrees(oldAngles[0]), math.degrees(oldAngles[1])), 360 | # print "angle", math.degrees(drone.angleFB-oldAngles[0]), math.degrees(drone.angleLR-oldAngles[1]) 361 | if rects != None and len(rects) > 0: 362 | # note, that "rects" could be None, when video processing is OFF (g_processingEnabled==False) 363 | if oldAltitude == None: 364 | oldAltitude = altitude 365 | debugImg = None 366 | if drone.replayLog != None: 367 | debugFilename = "tmp_%04d.jpg" % (frameNumber/FRAMES_PER_INDEX) 368 | debugImg = cv2.imread( debugFilename ) 369 | line = chooseBestWidth( rects, coord=oldPose[:2], height=oldAltitude, 370 | heading=oldPose[2], angleFB=oldAngles[0], angleLR=oldAngles[1], debugImg=debugImg ) 371 | if line: 372 | rejected = False 373 | if refLine != None: 374 | print "%.1fdeg %.2fm" % (math.degrees(normalizeAnglePIPI(refLine.angle - line.angle)), refLine.signedDistance( line.start )) 375 | if abs(normalizeAnglePIPI(refLine.angle - line.angle)) < MAX_REF_LINE_ANGLE and \ 376 | abs(refLine.signedDistance( line.start )) < MAX_REF_LINE_DIST: 377 | refLine = line 378 | else: 379 | rejected = True 380 | else: 381 | refLine = line 382 | viewlog.dumpBeacon( line.start, index=3 ) 383 | viewlog.dumpBeacon( line.end, index=3 ) 384 | if drone.replayLog != None: 385 | if rejected: 386 | # redraw image with yellow color 387 | chooseBestWidth( rects, coord=oldPose[:2], height=oldAltitude, 388 | heading=oldPose[2], angleFB=oldAngles[0], angleLR=oldAngles[1], debugImg=debugImg, color=(0,255,255) ) 389 | cv2.imwrite( debugFilename, debugImg ) 390 | else: 391 | print rects 392 | 393 | desiredSpeed = MAX_ALLOWED_SPEED 394 | if videoDelay > MAX_ALLOWED_VIDEO_DELAY: 395 | desiredSpeed = 0.0 396 | 397 | if drone.battery < 10: 398 | print "BATTERY LOW!", drone.battery 399 | 400 | # height debugging 401 | #print "HEIGHT\t%d\t%d\t%.2f\t%d\t%d\t%d\t%d\t%d\t%d" % tuple([max([0]+[w for ((x,y),(w,h),a) in rects])] + list(drone.altitudeData[:4]) + list(drone.pressure) ) 402 | 403 | # error definition ... if you substract that you get desired position or angle 404 | # error is taken from the path point of view, x-path direction, y-positive left, angle-anticlockwise 405 | errY, errA = 0.0, 0.0 406 | if refLine: 407 | errY = refLine.signedDistance( drone.coord ) 408 | errA = normalizeAnglePIPI( drone.heading - refLine.angle ) 409 | 410 | # get the height first 411 | if drone.coord[2] < desiredHeight - 0.1 and drone.time-startTime < 5.0: 412 | sx = 0.0 413 | # error correction 414 | # the goal is to have errY and errA zero in 1 second -> errY defines desired speed at given distance from path 415 | sy = max( -0.2, min( 0.2, -errY-drone.vy ))/2.0 416 | 417 | # there is no drone.va (i.e. derivative of heading) available at the moment ... 418 | sa = max( -0.1, min( 0.1, -errA/2.0 ))*1.35*(desiredSpeed/0.4) # originally set for 0.4=OK 419 | 420 | # print "%0.2f\t%d\t%0.2f\t%0.2f\t%0.2f" % (errY, int(math.degrees(errA)), drone.vy, sy, sa) 421 | prevTime = drone.time 422 | drone.moveXYZA( sx, sy, sz, sa ) 423 | maxControlGap = max( drone.time - prevTime, maxControlGap ) 424 | poseHistory.append( (drone.time, (drone.coord[0], drone.coord[1], drone.heading), (drone.angleFB, drone.angleLR), altitude) ) 425 | print "NAVI-OFF", drone.time - startTime 426 | drone.hover(0.5) 427 | drone.land() 428 | drone.setVideoChannel( front=True ) 429 | except ManualControlException, e: 430 | print "ManualControlException" 431 | if drone.ctrlState == 3: # CTRL_FLYING=3 ... i.e. stop the current motion 432 | drone.hover(0.1) 433 | drone.land() 434 | drone.wait(1.0) 435 | drone.stopVideo() 436 | print "MaxVideoDelay", maxVideoDelay 437 | print "MaxControlGap", maxControlGap 438 | print "Battery", drone.battery 439 | 440 | 441 | if __name__ == "__main__": 442 | if len(sys.argv) > 2 and sys.argv[1] == "img": 443 | imgmain( sys.argv[1:], processFrame ) 444 | sys.exit( 0 ) 445 | import launcher 446 | launcher.launch( sys.argv, RobotemRovneDrone, competeRobotemRovne ) 447 | 448 | -------------------------------------------------------------------------------- /rr_drone_test.py: -------------------------------------------------------------------------------- 1 | from rr_drone import * 2 | import unittest 3 | 4 | class RRDoneTest( unittest.TestCase ): 5 | def testApprox4pts( self ): 6 | poly = [(983, 579), (261, 579), (262, 578), (266, 576), (314, 553), 7 | (358, 534), (370, 529), (448, 498), (481, 488), (587, 460), (749, 460), 8 | (817, 485), (838, 494), (920, 536), (944, 552)] 9 | trapezoid = [(a[0][0], a[0][1]) for a in approx4pts( np.int0(poly) )] 10 | self.assertEqual( len(trapezoid), 4 ) 11 | self.assertTrue( trapezoid, [(983, 579), (261,579), (587, 460), (749, 460)] ) 12 | 13 | def testTrapezoid2line( self ): 14 | self.assertEqual( trapezoid2line( [(983, 579), (261,579), (587, 460), (749, 460)] ), 15 | [((983+261)/2,579), ((587+749)/2, 460)] ) 16 | self.assertEqual( sorted(trapezoid2line( [(1246, 460), (1231, 570), (1152, 572), (1155, 460)] )), 17 | sorted([((1231+1152)/2,571), ((1246+1155)/2, 460)]) ) 18 | 19 | def testProject2plane( self ): 20 | (x,y) = project2plane( imgCoord=(1280/2, 720/2), coord=(0.0, 0.0), height = 1.5, 21 | heading=math.radians(45), angleFB=math.radians(-30), angleLR=0 ) 22 | self.assertAlmostEqual( x, y, 5 ) # because of 45 deg 23 | self.assertAlmostEqual( x, (1.5*math.sqrt(3))*(math.sqrt(2)/2.), 5 ) 24 | self.assertEqual( project2plane( imgCoord=(1280/2, 720/2), coord=(0.0, 0.0), height = 1.5, 25 | heading=math.radians(45), angleFB=math.radians(0), angleLR=0 ), None ) 26 | (x,y) = project2plane( imgCoord=(200, 720/2), coord=(0.0, 0.0), height = 1.5, 27 | heading=0, angleFB=0, angleLR=math.radians(-10) ) # angleLR is positive for turn right 28 | 29 | def testTrapezoid2lineBug( self ): 30 | "the line is oriented from drone to horizon" 31 | begin,end = trapezoid2line( [(739, 497), (690, 579), (316, 570), (631, 460)] ) 32 | self.assertTrue( begin[1] > end[1] ) # in image coordinates 33 | 34 | def testRect2BLBRTRTL( self ): 35 | BL,BR,TR,TL = rect2BLBRTRTL( [(739, 497), (690, 579), (316, 570), (631, 460)] ) 36 | self.assertEqual( BL, (316, 570) ) 37 | sortedRect = rect2BLBRTRTL( [(775, 565), (438, 577), (329, 466), (643, 460)] ) 38 | 39 | def testRoadWidth( self ): 40 | self.assertEqual( roadWidth([(1.0, 0.0), (2.6, 0.5), (2.6, 5.0), (1.0, 6.0)]), 1.6 ) 41 | self.assertAlmostEqual( roadWidth([(1.07, 3.69), (2.63, 2.57), (4.10, 5.08), (2.88, 6.02)]), 1.9, 1 ) 42 | 43 | if __name__ == "__main__": 44 | unittest.main() 45 | 46 | -------------------------------------------------------------------------------- /sonar.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Open telnet session to drone and get sonar readings 4 | usage: 5 | ./sonar.py 6 | """ 7 | import sys 8 | from telnetlib import Telnet 9 | from threading import Thread,Event,Lock 10 | import datetime 11 | 12 | #SONAR_EXE = "/data/video/sonar-732-arm" 13 | SONAR_EXE = "/data/video/sonar-130610-2034-arm" 14 | 15 | class Sonar( Thread ): 16 | def __init__( self ): 17 | Thread.__init__(self) 18 | self.setDaemon(True) 19 | self.lock = Lock() 20 | self.shouldIRun = Event() 21 | self.shouldIRun.set() 22 | self._data = None 23 | self.session = None 24 | self.log = open( datetime.datetime.now().strftime("logs/sonar_%y%m%d_%H%M%S.log"), "w" ) 25 | 26 | def run(self): 27 | if self.session == None: 28 | self.session = Telnet( '192.168.1.1' ) 29 | self.checkDriver() 30 | self.startSonar() 31 | self.sonarIndex = 0 32 | while self.shouldIRun.isSet(): 33 | self.update() 34 | 35 | def data( self ): 36 | self.lock.acquire() 37 | xy = self._data 38 | self.lock.release() 39 | return xy 40 | 41 | def requestStop( self ): 42 | self.shouldIRun.clear() 43 | 44 | def execute( self, cmd, verbose=True ): 45 | self.session.write( cmd + '\n' ) 46 | ret = [] 47 | while 1: 48 | line = self.session.read_until('\n', timeout=1.0).strip() 49 | self.log.write(str(line)+'\n') 50 | self.log.flush() 51 | if line == '#': 52 | break 53 | print line 54 | ret.append( line ) 55 | return ret 56 | 57 | def checkDriver( self ): 58 | self.execute( "killall %s" % SONAR_EXE.split('/')[-1] ) 59 | if self.execute( "ls /dev/ttyACM0" )[-1].find("No such file or directory") != -1: 60 | self.execute( "lsusb" ) 61 | self.execute( "insmod /data/video/driver2/cdc-acm.ko" ) 62 | self.execute( "stty -F /dev/ttyACM0 intr ^- quit ^- erase ^- kill ^- eof ^- start ^- stop ^- susp ^- rprnt ^- werase ^- lnext ^- flush ^-" ) 63 | self.execute( "stty -F /dev/ttyACM0 -brkint -icrnl -imaxbel -opost -onlcr -isig -icanon -iexten -echo -echoe -echok -echoctl -echoke" ) 64 | 65 | 66 | def startSonar( self ): 67 | self.session.write( SONAR_EXE + '\n' ) 68 | 69 | 70 | def killSonar( self ): 71 | tmpSession = Telnet( '192.168.1.1' ) 72 | tmpSession.write('killall %s\n' % SONAR_EXE.split('/')[-1]) 73 | while 1: 74 | line = self.session.read_until('\n', timeout=1.0).strip() 75 | if line == '#': 76 | break 77 | tmpSession.close() 78 | 79 | 80 | def update( self ): 81 | line = self.session.read_until('\n', timeout=1.0).strip() 82 | self.log.flush() 83 | self.log.write(str(line)+'\n') 84 | s = line.split() 85 | if len(s) >= 2 and s[0] == "sonar:": 86 | self._data = self.sonarIndex, int(line.split()[1]) 87 | self.log.write(str(self._data)+'\n') 88 | self.log.flush() 89 | self.sonarIndex += 1 90 | 91 | def close( self ): 92 | self.killSonar() 93 | self.session.close() 94 | 95 | def testSonar( filename=None ): 96 | s = Sonar() 97 | for i in xrange(10): 98 | s.update() 99 | print s.data() 100 | s.close() 101 | 102 | if __name__ == "__main__": 103 | if len(sys.argv) < 2: 104 | print __doc__ 105 | sys.exit(2) 106 | testSonar( filename=sys.argv[1] ) 107 | 108 | -------------------------------------------------------------------------------- /sourcelogger.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | SourceLogger from Eduro project 4 | """ 5 | 6 | class SourceLogger: 7 | def __init__( self, sourceGet, filename ): 8 | self.sourceGet = sourceGet 9 | self.counter = 0 10 | self.prevData = None 11 | if self.sourceGet != None: 12 | self.file = open( filename, 'w' ) 13 | else: 14 | self.file = open( filename ) 15 | try: 16 | self.counterLimit = int(self.file.readline()) 17 | except ValueError: 18 | # case when given device was not started 19 | print "EMPTY FILE!!!" 20 | self.counterLimit = 10000 # "infinity" 21 | 22 | 23 | def get( self ): 24 | if self.sourceGet != None: 25 | data = self.sourceGet() 26 | if data != None and data != self.prevData: 27 | self.file.write( str(self.counter) + "\n" ) 28 | self.file.write( repr(data) + "\n" ) 29 | self.file.flush() 30 | self.counter = 1 31 | self.prevData = data 32 | return self.prevData 33 | else: 34 | if self.counter >= self.counterLimit: 35 | self.counter = 1 36 | self.prevData = eval( self.file.readline() ) 37 | nextCnt = self.file.readline() 38 | self.counterLimit = float('inf') if nextCnt == '' else int(nextCnt) 39 | return self.prevData 40 | self.counter += 1 41 | return None 42 | 43 | def generator( self ): 44 | assert( self.sourceGet is None ) 45 | while 1: 46 | line = self.file.readline() 47 | if len(line) == 0: 48 | break 49 | self.prevData = eval( line ) 50 | self.counterLimit = int(self.file.readline()) 51 | yield self.prevData 52 | 53 | def __del__( self ): 54 | if self.sourceGet != None: 55 | self.file.write( str(self.counter) + "\n" ) 56 | self.file.close() 57 | 58 | -------------------------------------------------------------------------------- /striploc.py: -------------------------------------------------------------------------------- 1 | """ 2 | Strips Localisation for Air Race 3 | """ 4 | from pose import Pose 5 | from ardrone2 import normalizeAnglePIPI # TODO refactoring 6 | from line import Line 7 | 8 | import math 9 | 10 | REF_CIRCLE_RADIUS = 1.25 # TODO measure in real arena! 11 | REF_LINE_CROSSING_ANGLE = math.radians(50) # angle for selection of proper strip 12 | 13 | LINE_OFFSET = 0.4 # asymetric line navigation 14 | NUM_TRANSITION_STEPS = 5 # to switch from left/right orientation, also max limits 15 | MAX_CIRCLE_OFFSET = 0.3 # for smooth transition, can be different 16 | 17 | 18 | 19 | PATH_STRAIGHT = 'I' 20 | PATH_TURN_RIGHT = 'R' 21 | PATH_TURN_LEFT = 'L' 22 | 23 | 24 | class StripsLocalisation: 25 | def __init__( self ): 26 | self.lastStripPose = None 27 | self.pathType = PATH_TURN_LEFT 28 | self.pathPose = None 29 | self.pathUpdated = False 30 | self.refCircle = None 31 | self.refLine = None 32 | self.countLR = 0 33 | self.crossing = False 34 | 35 | 36 | def diff2pathType( self, dx, dy, da ): 37 | da = normalizeAnglePIPI(da) 38 | if (0.25 < dx < 0.48) and abs(da) < math.radians(50) and (abs(dy) < 0.25): 39 | if abs(da) < math.radians(10): 40 | return PATH_STRAIGHT 41 | elif da > 0: 42 | return PATH_TURN_LEFT 43 | else: 44 | return PATH_TURN_RIGHT 45 | return None 46 | 47 | 48 | def isSameStrip( self, pose1, pose2 ): 49 | dx,dy,da = pose1.sub(pose2) 50 | da = normalizeAnglePIPI(da) 51 | if abs(dx) < 0.2 and abs(dy) < 0.1 and abs(da) < math.radians(10): 52 | return True 53 | return False 54 | 55 | 56 | def updateFrame( self, pose, frameStrips, verbose=False ): 57 | if verbose: 58 | print pose, [str(p) for p in frameStrips] 59 | self.pathUpdated = False 60 | self.crossing = False 61 | for i in xrange(len(frameStrips)): 62 | for j in xrange(len(frameStrips)): 63 | if i != j: 64 | (dx,dy,da) = frameStrips[i].sub(frameStrips[j]) 65 | pt = self.diff2pathType( dx, dy, da ) 66 | if pt: 67 | sPose = pose.add( frameStrips[i] ) 68 | if pt != PATH_STRAIGHT or self.refLine == None or \ 69 | abs(normalizeAnglePIPI( self.refLine.angle - sPose.heading )) < REF_LINE_CROSSING_ANGLE: 70 | self.pathType = pt 71 | self.pathPose = sPose # also j should be OK 72 | self.pathUpdated = True 73 | break 74 | else: 75 | print "SKIPPED2" 76 | self.crossing = True 77 | 78 | if (len(frameStrips) >= 1 and not self.pathUpdated) and self.lastStripPose != None: 79 | for fs in frameStrips: 80 | for lsp in self.lastStripPose: 81 | sPose = pose.add( fs ) 82 | (dx,dy,da) = sPose.sub( lsp ) 83 | pt = self.diff2pathType( dx, dy, da ) 84 | if pt: 85 | if pt != PATH_STRAIGHT or self.refLine == None or \ 86 | abs(normalizeAnglePIPI( self.refLine.angle - sPose.heading )) < REF_LINE_CROSSING_ANGLE: 87 | self.pathType = pt 88 | self.pathPose = sPose 89 | self.pathUpdated = True 90 | break 91 | else: 92 | print "SKIPPED1" 93 | 94 | if not self.pathUpdated: 95 | if len(frameStrips) == 1 and self.lastStripPose != None: 96 | fs = frameStrips[0] 97 | sPose = pose.add( fs ) 98 | for lsp in self.lastStripPose: 99 | if self.isSameStrip( sPose, lsp ): 100 | # the self.pathType is the same only the pose is updated 101 | self.pathPose = sPose 102 | self.pathUpdated = True 103 | if not self.pathUpdated: 104 | for lsp in self.lastStripPose: 105 | print sPose.sub(lsp) 106 | 107 | if len(frameStrips) > 0: 108 | self.lastStripPose = [] 109 | for fs in frameStrips: 110 | sPose = pose.add( fs ) 111 | if verbose and self.lastStripPose != None: 112 | print [str(sPose.sub( lsp )) for lsp in self.lastStripPose] 113 | self.lastStripPose.append( sPose ) 114 | 115 | if self.pathType == PATH_TURN_LEFT: 116 | self.countLR = max( -NUM_TRANSITION_STEPS, self.countLR - 1 ) 117 | if self.pathType == PATH_TURN_RIGHT: 118 | self.countLR = min( NUM_TRANSITION_STEPS, self.countLR + 1 ) 119 | 120 | if self.pathPose: 121 | sPose = self.pathPose 122 | if self.pathType == PATH_TURN_LEFT: 123 | circCenter = sPose.add( Pose(0.0, REF_CIRCLE_RADIUS, 0 )).coord() 124 | self.refCircle = circCenter, REF_CIRCLE_RADIUS - MAX_CIRCLE_OFFSET*self.countLR/float(NUM_TRANSITION_STEPS) 125 | elif self.pathType == PATH_TURN_RIGHT: 126 | circCenter = sPose.add( Pose(0.0, -REF_CIRCLE_RADIUS, 0 )).coord() 127 | self.refCircle = circCenter, REF_CIRCLE_RADIUS + MAX_CIRCLE_OFFSET*self.countLR/float(NUM_TRANSITION_STEPS) 128 | else: 129 | self.refCircle = None 130 | if self.pathType == PATH_STRAIGHT: 131 | if self.refLine == None or abs(normalizeAnglePIPI( self.refLine.angle - sPose.heading )) < REF_LINE_CROSSING_ANGLE: 132 | offset = Pose() 133 | if self.countLR > 0: 134 | offset = Pose( 0, LINE_OFFSET, 0 ) 135 | if self.countLR < 0: 136 | offset = Pose( 0, -LINE_OFFSET, 0 ) 137 | sPose = sPose.add( offset ) 138 | self.refLine = Line( (sPose.x-0.15*math.cos(sPose.heading), sPose.y-0.15*math.sin(sPose.heading)), 139 | (sPose.x+0.15*math.cos(sPose.heading), sPose.y+0.15*math.sin(sPose.heading)) ) 140 | else: 141 | self.refLine = None 142 | return self.pathUpdated 143 | 144 | 145 | def getRefCircleLine( self, pose ): 146 | "return best fitting circle/line for given position (which is now ignored)" 147 | return self.refCircle, self.refLine 148 | -------------------------------------------------------------------------------- /striploc_test.py: -------------------------------------------------------------------------------- 1 | from striploc import * 2 | import unittest 3 | 4 | 5 | class StripsLocalisationTest( unittest.TestCase ): 6 | def testUsage( self ): 7 | loc = StripsLocalisation() 8 | loc.updateFrame( Pose(), [] ) 9 | self.assertEqual( loc.pathType, PATH_TURN_LEFT ) 10 | self.assertEqual( loc.pathPose, None ) 11 | 12 | def testSingleFramePathType( self ): 13 | loc = StripsLocalisation() 14 | loc.updateFrame( Pose(), [ Pose(-0.17, 0.19, math.radians(36)), Pose(0.18, 0.37, math.radians(20))] ) 15 | self.assertEqual( loc.pathType, PATH_TURN_RIGHT ) # note, that later this may fail do to incorrect basePose 16 | self.assertEqual( str(loc.pathPose), "(0.18, 0.37, 20)" ) # but could be also the other 17 | 18 | def testFollowingFrames( self ): 19 | "single strip detected in two following frames" 20 | loc = StripsLocalisation() 21 | loc.updateFrame( Pose(2.40, -5.13, math.radians(-74)), [Pose(-0.05, -0.04, math.radians(-8))] ) 22 | loc.updateFrame( Pose(2.45, -5.53, math.radians(-83)), [Pose(-0.04, -0.12, math.radians(-6))] ) 23 | self.assertEqual( loc.pathType, PATH_STRAIGHT ) 24 | self.assertEqual( str(loc.pathPose), str(Pose(2.45, -5.53, math.radians(-83)).add(Pose(-0.04, -0.12, math.radians(-6)))) ) 25 | 26 | def testTurnBug( self ): 27 | # video_rec_140318_185412.bin : picture 20 28 | loc = StripsLocalisation() 29 | loc.updateFrame( Pose(0.54, -3.06, math.radians(-19)), [Pose(0.01, 0.00, math.radians(-20))] ) 30 | loc.updateFrame( Pose(0.97, -3.08, math.radians(-3)), [Pose(-0.13, -0.22, math.radians(-43)), Pose(0.09, -0.36, math.radians(-42))] ) 31 | self.assertEqual( loc.pathType, PATH_STRAIGHT ) 32 | 33 | def testMultistripRobustness( self ): 34 | loc = StripsLocalisation() 35 | loc.updateFrame( Pose(2.33, -3.38, math.radians(69)), [Pose(-0.04, 0.03, math.radians(-2))] ) 36 | loc.updateFrame( Pose(2.42, -3.02, math.radians(68)), [Pose(0.07, -0.06, math.radians(-4)), Pose(0.25, -0.47, math.radians(69))] ) 37 | self.assertEqual( loc.pathType, PATH_STRAIGHT ) 38 | 39 | def testFilterWrongStrips( self ): 40 | loc = StripsLocalisation() 41 | loc.pathType = PATH_TURN_RIGHT # just enforce different value than L 42 | loc.updateFrame( Pose(-0.69, -0.98, math.radians(252)), [Pose(0.07, 0.12, math.radians(10)), Pose(0.30, -0.48, math.radians(-61))] ) 43 | loc.updateFrame( Pose(-0.71, -1.38, math.radians(268)), [Pose(-0.28, -0.69, math.radians(-77)), Pose(0.14, 0.13, math.radians(8))] ) 44 | self.assertEqual( loc.pathType, PATH_TURN_LEFT ) 45 | 46 | def testTrippleStripPreference( self ): 47 | # meta_140320_165035.log:270 48 | loc = StripsLocalisation() 49 | loc.updateFrame( Pose(3.30, 1.97, math.radians(257)), [Pose(-0.17, -0.28, math.radians(13)), Pose(0.12, -0.25, math.radians(27))] ) 50 | loc.updateFrame( Pose(3.48, 1.43, math.radians(269)), [Pose(-0.20, -0.38, math.radians(33)), Pose(0.04, -0.23, math.radians(48)), Pose(0.29, 0.06, math.radians(49))] ) 51 | self.assertEqual( loc.pathType, PATH_STRAIGHT ) 52 | self.assertTrue( loc.pathUpdated ) 53 | 54 | def testTransitionLR( self ): 55 | # meta_140320_165035.log:268 (similar case also frame=66) 56 | loc = StripsLocalisation() 57 | loc.updateFrame( Pose(3.03, 2.21, math.radians(269)), [Pose(-0.22, 0.10, math.radians(1)), Pose(0.15, 0.16, math.radians(16)), Pose(0.34, 0.80, math.radians(-32))] ) 58 | self.assertEqual( loc.pathType, PATH_TURN_LEFT ) 59 | self.assertTrue( loc.pathUpdated ) 60 | 61 | def testIsSameStrip( self ): 62 | loc = StripsLocalisation() 63 | self.assertTrue( loc.isSameStrip( Pose(0,0,0), Pose(0.15,0.05,math.radians(1)))) 64 | self.assertFalse( loc.isSameStrip( Pose(0,1.0,0), Pose(0.3,0.05,math.radians(1)))) 65 | # real examples 66 | # 9: 0.186620639305 0.0527981159447 -0.81 67 | # 35: 0.170391659423 0.0771475958297 -2.136 68 | # 41: 0.156540370398 -0.00496615873208 4.462 69 | 70 | # FAILED: 71 | # 123: 0.14906722044 -0.118410346849 1.394 72 | # 174: 0.219441940085 0.0828970285437 0.937 73 | # 185: 0.152514448606 -0.100774758903 -1.427 (bad middle strip) 74 | # 234: 0.210778241227 -0.0182408657026 359.693 75 | 76 | def testRefCircleLine( self ): 77 | loc = StripsLocalisation() 78 | self.assertTrue( loc.getRefCircleLine( Pose() ), (None, None) ) 79 | 80 | if __name__ == "__main__": 81 | unittest.main() 82 | 83 | -------------------------------------------------------------------------------- /video.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | Extract MPEG-2 video from AR Drone 2.0 stream 4 | usage: 5 | ./video.py [] 6 | """ 7 | import sys 8 | import os 9 | import struct 10 | import socket 11 | 12 | VIDEO_SOCKET_TIMEOUT=1.0 13 | 14 | def nextCmd( q ): 15 | try: 16 | return q.get(block=False) 17 | except: 18 | return None 19 | 20 | def logVideoStream( hostPortPair, filename, queueCmd, packetProcessor=None, queueResults=None, flushWholeVideo=False ): 21 | "wait for termination command and otherwise log data" 22 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # TCP 23 | s.connect( hostPortPair ) 24 | s.settimeout( VIDEO_SOCKET_TIMEOUT ) 25 | f = open( filename, "wb" ) 26 | cmd = None 27 | prevResult = None 28 | timeoutCount = 0 29 | while cmd == None: 30 | try: 31 | data = s.recv(10240) 32 | f.write(data) 33 | f.flush() 34 | if packetProcessor and len(data) > 0: 35 | tmp = packetProcessor( data ) 36 | if prevResult != tmp and queueResults: 37 | queueResults.put( tmp ) 38 | prevResult = tmp 39 | except socket.timeout: 40 | timeoutCount += 1 41 | print "Video %s TIMEOUT(%d)" % (filename, timeoutCount) 42 | if timeoutCount > 1: 43 | # TODO revise try/except, s.connect failure 44 | s.close() 45 | s.connect( hostPortPair ) 46 | s.settimeout( VIDEO_SOCKET_TIMEOUT ) 47 | cmd = nextCmd( queueCmd ) 48 | print "VIDEO EXITING" 49 | if flushWholeVideo: 50 | while True: 51 | try: 52 | data = s.recv(10240) 53 | f.write(data) 54 | f.flush() 55 | except socket.timeout: 56 | print "REC Video Completed (TIMEOUT)" 57 | break 58 | f.close() 59 | s.close() 60 | 61 | def convertVideo( filename, outDir, frameIndex = 0 ): 62 | fin = open( filename, "rb") 63 | save = False 64 | data = fin.read(4) 65 | while len(data) > 0: 66 | assert data == "PaVE" # PaVE (Parrot Video Encapsulation) 67 | print "version", ord(fin.read(1)) 68 | print "codec", ord(fin.read(1)) 69 | headerSize = struct.unpack_from("H", fin.read(2))[0] 70 | print "header size", headerSize 71 | payloadSize = struct.unpack_from("I", fin.read(4))[0] 72 | print "payload size", payloadSize 73 | buf = fin.read(headerSize - 12) 74 | print "BUF", len(buf) 75 | arr = struct.unpack_from("HHHHIIBBBBIIHBBBBBBI",buf) # resolution, index, chunk, type, (ignored III reserved) 76 | print "ARR", arr 77 | print "Frame number", arr[4] 78 | if arr[8]==1: 79 | save = True 80 | # print "chunk", struct.unpack_from("",buf[16:]) 81 | payload = fin.read(payloadSize) 82 | print "Payload Data:", [ord(c) for c in payload[:8]] 83 | if save: 84 | fout = open(outDir+os.sep+"frame%04d.bin" % frameIndex, "wb") 85 | fout.write(payload) 86 | fout.close() 87 | frameIndex += 1 88 | data = fin.read(4) 89 | 90 | 91 | if __name__ == "__main__": 92 | if len(sys.argv) < 3: 93 | print __doc__ 94 | sys.exit(2) 95 | frameIndex = 0 96 | if len(sys.argv) > 3: 97 | frameIndex = int(sys.argv[3]) 98 | convertVideo( sys.argv[1], sys.argv[2], frameIndex=frameIndex ) 99 | 100 | -------------------------------------------------------------------------------- /viewer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | simple tool for robot path analysis 4 | viewer.py [--startIndex=] [ []] 5 | """ 6 | 7 | import sys 8 | 9 | import os, math, pygame 10 | from pygame.locals import * 11 | # ------- video loading ---------- 12 | import pave 13 | import cv2 14 | 15 | # hack for RR only 16 | from rr_drone import processFrame 17 | 18 | printPosition = True 19 | printEncoders = True 20 | 21 | size = 1200, 660 22 | offset = 150, 150 23 | absOffset = 0, 0 # offset of original (x,y) coordinate system (Robotour stromovka = 1049800.66, 5580126.84 ) 24 | 25 | sample_color = (255,0,0) 26 | sample_size = 20 27 | track_color = (0,255,0) 28 | track_size = 2 29 | tile_size = 0.5 30 | 31 | def deg(degAngle): return math.pi*degAngle/180.0 32 | 33 | def draw(surface, samples): 34 | for s in samples: 35 | surface.fill(sample_color, (to_px(s[0]), to_px(s[1]), sample_size, sample_size)) 36 | 37 | def to_px(x): return int(x*150*scale) 38 | 39 | def _scr(x,y): return (to_px(x)+offset[0], size[1]-to_px(y)-offset[1]) 40 | 41 | def scr(x,y): return _scr(x-absOffset[0], y-absOffset[1]) 42 | 43 | def scr1( point ) : return scr( point[0], point[1] ) 44 | #------------------------------------------------- 45 | 46 | def getCombinedPose( pose, sensorPose ): 47 | x = pose[0] + sensorPose[0] * math.cos( pose[2] ) - sensorPose[1] * math.sin( pose[2] ) 48 | y = pose[1] + sensorPose[0] * math.sin( pose[2] ) + sensorPose[1] * math.cos( pose[2] ) 49 | heading = sensorPose[2] + pose[2] 50 | return (x, y, heading) 51 | 52 | def loadVideoFrame( filename ): 53 | assert ":" in filename, filename 54 | frameNumber = int(filename.split(':')[-1]) 55 | filename = ":".join(filename.split(":")[:-1]) 56 | f = open(filename,"rb") 57 | proc = pave.PaVE() 58 | while True: 59 | packet = f.read(10000) 60 | if (packet) == 0: 61 | break 62 | proc.append( packet ) 63 | header,payload = proc.extract() 64 | while payload: 65 | if pave.isIFrame( header ) and pave.frameNumber( header ) == frameNumber: 66 | tmpFile = open( "tmp.bin", "wb" ) 67 | tmpFile.write( payload ) 68 | tmpFile.flush() 69 | tmpFile.close() 70 | cap = cv2.VideoCapture( "tmp.bin" ) 71 | ret, frame = cap.read() 72 | processFrame( frame, debug=True ) # hack!! 73 | cv2.imwrite( "test.jpg", frame ) # dirty solution how to get image from OpenCV to Pygame :( 74 | return pygame.image.load( "test.jpg" ) 75 | header,payload = proc.extract() 76 | return None # frame not found 77 | 78 | def loadData( filename ): 79 | # geometry = ( ( -0.02, 0.04, deg(90) ), ( -0.02, -0.04, deg(-90) ) ) 80 | # geometry = ( ( -0.09, 0.14, deg(90) ), ( -0.09, -0.14, deg(-90) ) ) 81 | # geometry = ( ( -0.09, 0.14, deg(90) ), ( -0.09, -0.14, deg(-90) ), 82 | # ( 0.05, 0.10, deg(0) ), ( 0.05, -0.10, deg(0) ) ) 83 | map = [] 84 | geometry = [] 85 | posesSet = [] 86 | poses = [] 87 | scans = [] 88 | image = None 89 | video = None 90 | camdir = None 91 | compass = None 92 | readingMap = False 93 | for line in open( filename ): 94 | arr = line.rstrip().split() 95 | if not line.rstrip(): 96 | continue 97 | if arr[0] == "Map": 98 | readingMap = True 99 | elif arr[0] == "Poses": 100 | readingMap = False 101 | if poses: 102 | posesSet.append( (poses, scans, image, camdir, compass) ) 103 | poses = [] 104 | scans = [] 105 | image = None 106 | camdir = None 107 | compass = None 108 | elif arr[0] == "Compass": 109 | readingMap = False 110 | compass = float(arr[1]) 111 | elif arr[0] == "Geometry": 112 | geometry = [] 113 | tmp = arr[1:] 114 | while len( tmp ) > 0: 115 | geometry.append( (float(tmp[0]), float(tmp[1]), float(tmp[2])) ) 116 | tmp = tmp[3:] 117 | elif arr[0] == "Ranger": 118 | readingMap = False 119 | i = 0 120 | for a in arr[4:]: 121 | scans.append( ( getCombinedPose( (float(arr[1]), float(arr[2]), float(arr[3])), geometry[i] ), float(a)) ) 122 | i += 1 123 | elif arr[0] == "Beacon": 124 | assert( int(arr[1]) == 1 ) 125 | index = len(arr)>4 and int(arr[4]) or 0 126 | if len(arr) == 7: 127 | scans.append( ( ( float(arr[2]), float(arr[3]), 0.0 ), -1.5, [int(c) for c in arr[-3:]]) ) # color param 128 | else: 129 | scans.append( ( ( float(arr[2]), float(arr[3]), 0.0 ), -1.0-index/10.0) ) 130 | elif arr[0] == "Puck": 131 | assert( int(arr[1]) == 1 ) 132 | scans.append( ( ( float(arr[2]), float(arr[3]), 0.0 ), -2.0) ) 133 | elif arr[0] == "RefPose": 134 | assert( int(arr[1]) == 1 ) 135 | scans.append( ( ( float(arr[2]), float(arr[3]), 0.0 ), -3.0) ) 136 | elif arr[0] == "Image": 137 | image = arr[1] 138 | elif arr[0] == "Video": 139 | video = arr[1] 140 | elif arr[0] == "Frame": 141 | assert video != None 142 | image = video + ":" + arr[1] # frame number 143 | elif arr[0] == "ImageResult": 144 | try: 145 | camdir = float(arr[1]) 146 | except: #it cannot be interpreted as a direction 147 | pass 148 | else: 149 | if readingMap: 150 | map.append( ((float(arr[0]), float(arr[1])), (float(arr[2]),float(arr[3]))) ) 151 | else: 152 | poses.append( (float(arr[0]), float(arr[1]), float(arr[2])) ) 153 | posesSet.append( (poses,scans,image,camdir, compass) ) 154 | return posesSet, map 155 | 156 | 157 | def drawPoses( foreground, poses ): 158 | d = 0.3 159 | for (x,y,heading) in poses: 160 | pygame.draw.circle( foreground, (0,0,255), scr(x,y),5 ) 161 | pygame.draw.line( foreground, (255,255,0), scr(x,y), scr(x+d*math.cos(heading),y+d*math.sin(heading)),5) 162 | 163 | def drawCompass( foreground, scanArr ): 164 | if scanArr[4]: 165 | x,y,heading = scanArr[0][0] 166 | heading = scanArr[4] 167 | d = 0.4 168 | pygame.draw.line( foreground, (0,0,255), scr(x,y), scr(x+d*math.cos(heading),y+d*math.sin(heading)),5) 169 | 170 | def drawScans( foreground, scans, shouldDrawSensors, shouldDrawBeacons ): 171 | for row in scans: 172 | color = None 173 | if len(row) == 2: 174 | ((x,y,heading),range) = row 175 | else: 176 | ((x,y,heading),range, color) = row 177 | if range >= 0: 178 | if range > 0: 179 | if shouldDrawSensors: 180 | col = (0,255,0) 181 | ptSize = 6 182 | if range > 2.0: 183 | col = (0,100,0) 184 | ptSize = 3 185 | pygame.draw.circle( foreground, col, scr(x+range*math.cos(heading),y+range*math.sin(heading)), ptSize ) 186 | pygame.draw.circle( foreground, (255,0,0), scr(x,y), 4 ) 187 | elif range > -1.99: 188 | # beacons (not nice) 189 | if shouldDrawBeacons: 190 | if color == None: 191 | pygame.draw.circle( foreground, (255,0,int((-1-range)*255*3.3)), scr(x,y), 6 ) 192 | else: 193 | pygame.draw.circle( foreground, color, scr(x,y), 6 ) 194 | elif range > -2.5: 195 | # pucks (not nice2) 196 | pygame.draw.circle( foreground, (255,0,168), scr(x,y), 3 ) 197 | else: 198 | # ref MCL pose (not nice3) 199 | pygame.draw.circle( foreground, (255,0,0), scr(x,y), 1 ) 200 | 201 | def drawMap( foreground, map ): 202 | for m in map: 203 | pygame.draw.line( foreground, (0,255,255), scr1(m[0]), scr1(m[1]),5) 204 | 205 | def drawImage( foreground, imgFileName, camdir ): 206 | if imgFileName: 207 | # imgFileName = 'D:\\md\\hg\\eduro-logs\\100619-rychnov\\pes1\\cam100619_145404_000.jpg' 208 | if "video_rec" in imgFileName: 209 | camera = loadVideoFrame( imgFileName ) 210 | else: 211 | camera = pygame.image.load( imgFileName ).convert() 212 | cameraView = pygame.transform.scale( camera, (320, 180) ) 213 | if camdir is not None: 214 | color = (0xFF, 0x00, 0x00) 215 | start_pos = (160, 180) 216 | length = 80 217 | end_pos = (160 - length * math.sin(camdir), 180 - length * math.cos(camdir)) 218 | width = 2 219 | pygame.draw.line(cameraView, color, start_pos, end_pos, width) 220 | foreground.blit( cameraView, (size[0]-320,0) ) 221 | 222 | def drawTiles( background ): 223 | background.fill((35, 35, 35)) 224 | tile_size_px = to_px(tile_size) 225 | for x in range(0, size[0] + tile_size_px, tile_size_px): 226 | for y in range(0, size[1] + tile_size_px, tile_size_px): 227 | if (x / tile_size_px) % 2 == (y / tile_size_px) % 2: 228 | background.fill((235, 235, 235), Rect(x, size[1] - y, tile_size_px, tile_size_px)) 229 | # draw scale 230 | pygame.draw.line( background, (255,0,0), (20,size[1]-20), (20+to_px(1.0),size[1]-20),3) 231 | pygame.draw.line( background, (255,0,0), (20,size[1]-10), (20,size[1]-30),1) 232 | pygame.draw.line( background, (255,0,0), (20+to_px(1.0),size[1]-10), (20+to_px(1.0),size[1]-30),1) 233 | 234 | 235 | def main( filename, scale = 1.0, startIndex = None ): 236 | 237 | # load pygame 238 | pygame.init() 239 | os.environ['SDL_VIDEO_CENTERED'] = '1' 240 | #screen = pygame.display.set_mode(size, NOFRAME, 0) 241 | screen = pygame.display.set_mode(size) 242 | 243 | # create backgroud 244 | background = pygame.Surface(screen.get_size()) 245 | drawTiles(background) 246 | 247 | # create foreground 248 | foreground = pygame.Surface(screen.get_size()) 249 | foreground.set_colorkey((0,0,0)) 250 | index = 0 251 | 252 | # display everything 253 | screen.blit(background, (0, 0)) 254 | screen.blit(foreground, (0, 0)) 255 | pygame.display.flip() 256 | 257 | pygame.event.set_blocked(MOUSEMOTION) # keep our queue cleaner 258 | pygame.key.set_repeat ( 200, 20 ) 259 | 260 | shouldDrawTiles = True 261 | shouldDrawMap = True 262 | shouldDrawSensors = False 263 | shouldDrawBeacons = False 264 | shouldRefreshNow = False 265 | 266 | posesScanSet, map = loadData( filename ) 267 | drawPoses( foreground, posesScanSet[0][0] ) 268 | drawScans( foreground, posesScanSet[0][1], shouldDrawSensors, shouldDrawBeacons ) 269 | 270 | screen.blit(background, (0, 0)) 271 | screen.blit(foreground, (0, 0)) 272 | pygame.display.set_caption("Viewer init ...") 273 | pygame.display.flip() 274 | 275 | global absOffset 276 | if startIndex != None: 277 | index = startIndex 278 | if index < len(posesScanSet): 279 | # move robot into center 280 | pose = posesScanSet[index][0][0] 281 | absOffset = pose[:2] 282 | else: 283 | # skip first non-moving part 284 | index = 0 285 | for s in posesScanSet: 286 | pose = s[0][0] 287 | index += 1 288 | if math.fabs(pose[0])+math.fabs(pose[1]) > 1.0: 289 | absOffset = pose[:2] 290 | break 291 | if index >= len(posesScanSet): 292 | index = len(posesScanSet) - 1 293 | print index, len(posesScanSet), absOffset 294 | 295 | imgFileName = None 296 | lastImgFileName = None 297 | while 1: 298 | if imgFileName: 299 | t = str(imgFileName)+' ***' 300 | lastImgFileName = imgFileName 301 | else: 302 | t = str(lastImgFileName) 303 | 304 | pygame.display.set_caption("Index: %d, sensors %s, img %s" % (index, shouldDrawSensors and "on" or "off", t) ) 305 | shouldRefreshNow = False 306 | event = pygame.event.wait() 307 | if event.type == QUIT: return 308 | if event.type == KEYDOWN: 309 | if event.key in (K_ESCAPE,K_q): return 310 | if event.key == K_p: 311 | if index + 1 < len( posesScanSet ): 312 | index += 1 313 | if event.key == K_o: 314 | if index + 10 < len( posesScanSet ): 315 | index += 10 316 | if event.key == K_i: 317 | if index > 0: 318 | index -= 1 319 | if event.key == K_0: 320 | index = 0 321 | if event.key == K_9: 322 | index = len( posesScanSet ) - 1 323 | if event.key == K_m: 324 | shouldDrawMap = not shouldDrawMap; 325 | if event.key == K_s: 326 | shouldDrawSensors = not shouldDrawSensors; 327 | if event.key == K_b: 328 | shouldDrawBeacons = not shouldDrawBeacons; 329 | if event.key == K_RIGHT: 330 | globals()['offset'] = (offset[0]+150, offset[1]) 331 | shouldRefreshNow = True 332 | if event.key == K_LEFT: 333 | globals()['offset'] = (offset[0]-150, offset[1]) 334 | shouldRefreshNow = True 335 | if event.key == K_UP: 336 | globals()['offset'] = (offset[0], offset[1]+150) 337 | shouldRefreshNow = True 338 | if event.key == K_DOWN: 339 | globals()['offset'] = (offset[0], offset[1]-150) 340 | shouldRefreshNow = True 341 | if event.key == K_PLUS or event.unicode == '+': 342 | globals()['scale'] *= 2.0 343 | globals()['tile_size'] /= 2.0 344 | shouldDrawTiles = shouldDrawMap = shouldDrawSensors = shouldDrawBeacons = True 345 | shouldRefreshNow = True 346 | if event.key == K_MINUS or event.unicode == '-': 347 | globals()['scale'] /= 2.0 348 | globals()['tile_size'] *= 2.0 349 | shouldDrawTiles = shouldDrawMap = shouldDrawSensors = shouldDrawBeacons = True 350 | shouldRefreshNow = True 351 | 352 | def has_image(k): 353 | return bool(posesScanSet[k][2]) 354 | 355 | if has_image(index) or shouldRefreshNow: 356 | foreground.fill( (0,0,0) ) 357 | 358 | refreshed_since = index 359 | 360 | if shouldRefreshNow: 361 | while refreshed_since > 0: 362 | if has_image(refreshed_since): 363 | break 364 | refreshed_since -= 1 365 | 366 | for i in range(refreshed_since, index + 1): 367 | drawPoses( foreground, posesScanSet[i][0] ) 368 | drawCompass( foreground, posesScanSet[i] ) 369 | drawScans( foreground, posesScanSet[i][1], shouldDrawSensors, shouldDrawBeacons ) 370 | drawImage( foreground, posesScanSet[i][2], posesScanSet[i][3] ) 371 | imgFileName = posesScanSet[i][2] 372 | 373 | if event.key == K_a: 374 | foreground.fill( (0,0,0) ) 375 | for (p,s,i,camdir,comp) in posesScanSet: 376 | drawScans( foreground, s, shouldDrawSensors, shouldDrawBeacons ) 377 | 378 | if shouldDrawTiles: 379 | drawTiles(background) 380 | 381 | if shouldDrawMap: 382 | drawMap( foreground, map ) 383 | screen.blit(background, (0, 0)) 384 | screen.blit(foreground, (0, 0)) 385 | pygame.display.flip() 386 | 387 | 388 | def usage(): 389 | print __doc__ 390 | 391 | if __name__ == "__main__": 392 | if len(sys.argv) < 2: 393 | usage() 394 | sys.exit(2) 395 | scale = 1.0 396 | 397 | args = sys.argv[:] 398 | startIndex = None 399 | if len(args) > 2 and args[2].startswith("--startIndex"): 400 | startIndex = int(args[2].split('=')[1]) 401 | args = args[:2]+args[3:] 402 | 403 | if len(args) > 2: 404 | scale = float( args[2] ) 405 | if len(args) > 3: 406 | tile_size = float( args[3] ) 407 | main(args[1], scale=scale, startIndex=startIndex) 408 | 409 | -------------------------------------------------------------------------------- /viewlog.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | functions for generation of logs for viewer 4 | """ 5 | 6 | import math 7 | from os import sep 8 | 9 | viewLogFile = None # is it automatically shared? 10 | 11 | def dumpPose( pose ): 12 | if viewLogFile: 13 | viewLogFile.write( "RefPose 1 %.2f %.2f %.4f\n" % pose ) 14 | 15 | def dumpObstacles( obstacles ): 16 | if viewLogFile: 17 | viewLogFile.write( "Map\n" ) 18 | for obj in obstacles: 19 | if len(obj) == 1: # single column 20 | x,y=obj[0] 21 | d = 0.025 22 | obj = ((x-d,y-d),(x+d,y-d),(x+d,y+d),(x-d,y+d),(x-d,y-d)) 23 | for wall in zip(obj[:-1],obj[1:]): 24 | viewLogFile.write( "%f\t%f\t%f\t%f\n" % \ 25 | (wall[0][0], wall[0][1], wall[1][0], wall[1][1]) ) 26 | 27 | def dumpSamples( samples ): 28 | if viewLogFile: 29 | viewLogFile.write( "Poses\n" ); 30 | for s in samples: 31 | s = tuple( s ) # workaround for Pose 32 | viewLogFile.write( "%f\t%f\t%f\n" % s ) 33 | 34 | def dumpCompass( angle ): 35 | if viewLogFile: 36 | viewLogFile.write( "Compass\t%f\n" % angle ); 37 | 38 | def dumpSharpsGeometry( sharpsGeometry ): 39 | if viewLogFile: 40 | viewLogFile.write( "Geometry" ) 41 | for s in sharpsGeometry: 42 | viewLogFile.write( " %.2f %.2f %.4f" % s ) 43 | viewLogFile.write( "\n" ) 44 | 45 | def dumpSharps( pose, sharp ): 46 | if viewLogFile and sharp: 47 | viewLogFile.write( "Ranger %.2f %.2f %.4f " % pose ) 48 | for s in sharp: 49 | viewLogFile.write( "%f " % s ) 50 | viewLogFile.write( "\n" ) 51 | 52 | def dumpPuck( coord ): 53 | if viewLogFile: 54 | viewLogFile.write( "Puck 1 %.2f %.2f\n" % coord ) 55 | 56 | def dumpBeacon( coord, index=None, color=None ): 57 | if viewLogFile: 58 | if index is None: 59 | if color == None: 60 | viewLogFile.write( "Beacon 1 %.2f %.2f\n" % coord ) 61 | else: 62 | viewLogFile.write( "Beacon 1 %.2f %.2f %d %d %d\n" % tuple(list(coord)+list(color)) ) 63 | else: 64 | viewLogFile.write( "Beacon 1 %.2f %.2f %d\n" % (coord[0], coord[1], index) ) 65 | 66 | def dumpCamera( filename, imgresult ): 67 | if viewLogFile: 68 | viewLogFile.write( "Image %s\n" % filename ) 69 | viewLogFile.write( "ImageResult %s\n" % imgresult) 70 | 71 | def dumpVideo( filename ): 72 | if viewLogFile: 73 | viewLogFile.write( "Video %s\n" % filename ) 74 | 75 | def dumpVideoFrame( frameNumber, timestamp ): 76 | if viewLogFile: 77 | viewLogFile.write( "Frame %d %d\n" % (frameNumber, timestamp) ) 78 | 79 | def viewLogExtension( robot, id, data ): 80 | if id == 0x80: 81 | if robot.localisation: 82 | dumpPose( robot.localisation.pose() ) 83 | 84 | def viewCompassExtension( robot, id, data ): 85 | if id == 0x80: 86 | if robot.compass and robot.localisation: 87 | pose = robot.localisation.pose() 88 | dumpCompass( pose[2] + math.radians( robot.compass/10.0 ) ) 89 | 90 | def viewPoseExtension( robot, id, data ): 91 | if id == 0x80: 92 | if robot.localisation: 93 | dumpSamples( [robot.localisation.pose()] ) 94 | 95 | #def viewCameraExtension( robot, id, data ): 96 | # assert( False ) # deprecated to be removed 97 | # if id == 'camera' and len(data)>=2 and data[1] != None: 98 | # log = sys.argv[2][:-18] # TODO extra param for extensions (like for threads args?) 99 | # dumpCamera( log + data[1].split('/')[-1], data[0].strip() ) 100 | 101 | class ViewCameraExtension: 102 | def __init__( self, absPath ): 103 | self.absPath = absPath 104 | 105 | def viewCameraExtension( self, robot, id, data ): 106 | #TODO: Except of the absPath vers sys.argv[2][:-18] difference, this is same as viewlog.viewCameraExtension 107 | if id == 'camera' and len(data)>=2 and data[1] != None: 108 | if data[0] == None: 109 | dumpCamera( self.absPath + sep + data[1].split('/')[-1], "" ) 110 | else: 111 | dumpCamera( self.absPath + sep + data[1].split('/')[-1], data[0].strip() ) 112 | 113 | -------------------------------------------------------------------------------- /wifisentinel.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """ 3 | WIFI network protector during flight 4 | usage: 5 | ./protector.py 6 | """ 7 | import subprocess 8 | import time 9 | import sys 10 | 11 | DRONE_IP_LIST = ['192.168.1.2', '192.168.1.3', '192.168.1.4'] 12 | 13 | def myWiFiIP(): 14 | ret = None 15 | if sys.platform == 'linux2': 16 | p = subprocess.Popen( ["/sbin/ifconfig", "wlan0"], stdout=subprocess.PIPE ) 17 | for line in p.stdout.readlines(): 18 | if "inet adr:" in line: # Czech linux (!) 19 | ret = line.strip().split()[1][4:] # cut "adr:" 20 | else: 21 | p = subprocess.Popen( "ipconfig", stdout=subprocess.PIPE ) 22 | for line in p.stdout.readlines(): 23 | if "IPv4 Address" in line and "192.168.56.1" not in line: 24 | ret = line.strip().split()[-1] 25 | return ret 26 | 27 | print sys.argv 28 | while True: 29 | ip = myWiFiIP() 30 | while ip not in DRONE_IP_LIST: 31 | print ip 32 | time.sleep(3) 33 | ip = myWiFiIP() 34 | print "CONNECTED TO", ip 35 | if subprocess.call( sys.argv[1:] ) == 0: 36 | break 37 | print "RECOVERY" 38 | 39 | --------------------------------------------------------------------------------