├── .gitattributes ├── .gitignore ├── PixelProcessor ├── Makefile ├── TestImages.rar ├── main.cpp ├── precomp.cpp ├── precomp.h ├── stb_image.h └── stb_image_write.h ├── PyMocap ├── blobdetectmodule.cpp ├── makeblobdetect.sh ├── mocap.py ├── setup.py ├── unit_test_detector.py ├── unit_test_drift.py └── unit_test_timing.py ├── PyUtilities ├── blobCode.cpp ├── high.py ├── rgbled.py └── single.py ├── README.md ├── VectorSynth ├── .gitignore ├── .qmake.stash ├── BundleVarying.exe ├── Makefile ├── Makefile.Debug ├── Makefile.Release ├── VectorSynth.pro ├── VectorSynth.sln ├── VectorSynth.vcxproj ├── VectorSynth.vcxproj.filters ├── assets │ └── shaders │ │ ├── basic.frag │ │ ├── basic.vert │ │ ├── font.frag │ │ ├── font.vert │ │ ├── minimal.frag │ │ ├── minimal.vert │ │ ├── shaded.frag │ │ ├── shaded.vert │ │ ├── textured.frag │ │ └── textured.vert ├── cameraView.cpp ├── cameraView.h ├── decoder.cpp ├── decoder.h ├── font_sheet.png ├── icons8-back.png ├── icons8-coordinate-system.png ├── icons8-dashboard.png ├── icons8-day-camera.png ├── icons8-exercise.png ├── icons8-expand.png ├── icons8-fast-forward.png ├── icons8-first-quarter.png ├── icons8-full-moon.png ├── icons8-grid.png ├── icons8-height.png ├── icons8-iron-man.png ├── icons8-less-than.png ├── icons8-more-than.png ├── icons8-new-moon.png ├── icons8-open.png ├── icons8-play.png ├── icons8-rename.png ├── icons8-rewind.png ├── icons8-save.png ├── icons8-sun.png ├── icons8-wall-mount-camera.png ├── icons8-waste.png ├── icons8-width.png ├── liveTracker.cpp ├── liveTracker.h ├── main.cpp ├── mainwindow.cpp ├── mainwindow.h ├── mainwindow.ui ├── mainwindow.ui.K10284 ├── make.bat ├── objLoader.cpp ├── objLoader.h ├── refined.txt ├── sceneView.cpp ├── sceneView.h ├── serverThreadWorker.cpp ├── serverThreadWorker.h ├── take.cpp ├── take.h ├── takeTracker.cpp ├── takeTracker.h ├── timelineWidget.cpp ├── timelineWidget.h ├── tracker.cpp ├── tracker.h ├── trackerConnection.cpp ├── trackerConnection.h ├── ui_mainwindow.h └── unrefined.txt ├── timesync ├── Makefile └── main.cpp ├── timing.js └── volume calibration process.txt /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | Screens/ 2 | PiImages/ -------------------------------------------------------------------------------- /PixelProcessor/Makefile: -------------------------------------------------------------------------------- 1 | build_folder := $(shell mkdir -p bin) 2 | 3 | output: ./bin/main.o ./bin/precomp.o 4 | g++ ./bin/main.o ./bin/precomp.o -o ./bin/pixel -lrt 5 | 6 | ./bin/main.o: main.cpp precomp.cpp 7 | g++ -O3 -c main.cpp -o ./bin/main.o 8 | 9 | ./bin/precomp.o: precomp.cpp 10 | g++ -O3 -c precomp.cpp -o ./bin/precomp.o 11 | 12 | clean: rm ./bin/*.o 13 | 14 | -------------------------------------------------------------------------------- /PixelProcessor/TestImages.rar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/PixelProcessor/TestImages.rar -------------------------------------------------------------------------------- /PixelProcessor/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | 13 | #include "precomp.h" 14 | 15 | typedef unsigned char u8; 16 | typedef unsigned short u16; 17 | typedef unsigned int u32; 18 | typedef char i8; 19 | typedef short i16; 20 | typedef int i32; 21 | typedef float f32; 22 | typedef double f64; 23 | 24 | struct region 25 | { 26 | u8 id; 27 | i32 minX; 28 | i32 minY; 29 | i32 maxX; 30 | i32 maxY; 31 | i32 width; 32 | i32 height; 33 | i32 pixelIdx; 34 | i32 pixelCount; 35 | u8 maxLum; 36 | }; 37 | 38 | struct regionPixel 39 | { 40 | i16 x; 41 | i16 y; 42 | u8 value; 43 | }; 44 | 45 | struct floodNode 46 | { 47 | i16 x; 48 | i16 y; 49 | }; 50 | 51 | struct segment 52 | { 53 | u8 id; 54 | i32 x; 55 | i32 y; 56 | i32 lowMark; 57 | i32 highMark; 58 | i32 pixelCount; 59 | regionPixel pixels[1024 * 706]; 60 | }; 61 | 62 | struct centroid 63 | { 64 | float x; 65 | float y; 66 | }; 67 | 68 | u8 regionMarks[1024 * 706]; 69 | regionPixel regionPixels[1024 * 706]; 70 | i32 regionPixelCount; 71 | region regions[1024 * 1024]; 72 | i32 regionCount; 73 | region regionCurrent; 74 | 75 | floodNode floodList[1024 * 1024]; 76 | i32 floodListCount; 77 | 78 | u8 tempData[100 * 100]; 79 | i32 tempDataWidth; 80 | i32 tempDataHeight; 81 | u8 tempMarks[100 * 100]; 82 | 83 | u8* mapData; 84 | i32 mapWidth; 85 | i32 mapHeight; 86 | 87 | segment segments[8]; 88 | i32 segmentCount; 89 | 90 | centroid centroids[1024]; 91 | i32 centroidCount; 92 | 93 | // Debug image. 94 | uint8_t* rgbImg; 95 | 96 | //------------------------------------------------------------------------------------------------------------ 97 | // Utility functions. 98 | //------------------------------------------------------------------------------------------------------------ 99 | inline int64_t PlatformGetMicrosecond() 100 | { 101 | timespec time; 102 | clock_gettime(CLOCK_REALTIME, &time); 103 | 104 | return time.tv_sec * 1000000 + time.tv_nsec / 1000; 105 | } 106 | 107 | inline int32_t PlatformGetMS() 108 | { 109 | return (PlatformGetMicrosecond() / 1000); 110 | } 111 | 112 | float distSq(float X1, float Y1, float X2, float Y2) 113 | { 114 | return (X2 - X1) * (X2 - X1) + (Y2 - Y1) * (Y2 - Y1); 115 | } 116 | 117 | void saveGrayscale(char* Filename, i32 Width, i32 Height, u8* Data) 118 | { 119 | if (stbi_write_tga(Filename, Width, Height, 1, Data) == 0) 120 | std::cout << "Failed STB write\n"; 121 | } 122 | 123 | //------------------------------------------------------------------------------------------------------------ 124 | // Flood to find minima regions. 125 | //------------------------------------------------------------------------------------------------------------ 126 | inline void addSegmentPixel(segment* S, i32 X, i32 Y, i32 Idx) 127 | { 128 | S->pixels[S->pixelCount].x = X; 129 | S->pixels[S->pixelCount].y = Y; 130 | S->pixels[S->pixelCount++].value = tempData[Idx]; 131 | tempMarks[Idx] = 255; 132 | } 133 | 134 | // Process next low marker. 135 | void watershedFloodUpdate(segment* S, u8 Threshold) 136 | { 137 | regionPixel* p = &S->pixels[S->lowMark++]; 138 | 139 | i32 X = p->x; 140 | i32 Y = p->y; 141 | 142 | i32 idx0 = (Y - 1) * tempDataWidth + (X - 1); 143 | i32 idx1 = (Y - 1) * tempDataWidth + (X - 0); 144 | i32 idx2 = (Y - 1) * tempDataWidth + (X + 1); 145 | i32 idx3 = (Y - 0) * tempDataWidth + (X - 1); 146 | i32 idx4 = (Y - 0) * tempDataWidth + (X + 1); 147 | i32 idx5 = (Y + 1) * tempDataWidth + (X - 1); 148 | i32 idx6 = (Y + 1) * tempDataWidth + (X - 0); 149 | i32 idx7 = (Y + 1) * tempDataWidth + (X + 1); 150 | 151 | if ((X > 0 && Y > 0) && (tempMarks[idx0] == 0 && tempData[idx0] >= Threshold)) addSegmentPixel(S, X - 1, Y - 1, idx0); 152 | if ((Y > 0) && (tempMarks[idx1] == 0 && tempData[idx1] >= Threshold)) addSegmentPixel(S, X - 0, Y - 1, idx1); 153 | if ((X < tempDataWidth - 1 && Y > 0) && (tempMarks[idx2] == 0 && tempData[idx2] >= Threshold)) addSegmentPixel(S, X + 1, Y - 1, idx2); 154 | if ((X > 0) && (tempMarks[idx3] == 0 && tempData[idx3] >= Threshold)) addSegmentPixel(S, X - 1, Y - 0, idx3); 155 | if ((X < tempDataWidth - 1) && (tempMarks[idx4] == 0 && tempData[idx4] >= Threshold)) addSegmentPixel(S, X + 1, Y - 0, idx4); 156 | if ((X > 0 && Y < tempDataHeight - 1) && (tempMarks[idx5] == 0 && tempData[idx5] >= Threshold)) addSegmentPixel(S, X - 1, Y + 1, idx5); 157 | if ((Y < tempDataHeight - 1) && (tempMarks[idx6] == 0 && tempData[idx6] >= Threshold)) addSegmentPixel(S, X - 0, Y + 1, idx6); 158 | if ((X < tempDataWidth - 1 && Y < tempDataHeight - 1) && (tempMarks[idx7] == 0 && tempData[idx7] >= Threshold)) addSegmentPixel(S, X + 1, Y + 1, idx7); 159 | } 160 | 161 | void watershedRegion() 162 | { 163 | segmentCount = 0; 164 | 165 | for (int iY = 0; iY < regionCurrent.height; ++iY) 166 | { 167 | for (int iX = 0; iX < regionCurrent.width; ++iX) 168 | { 169 | i32 idx = iY * regionCurrent.width + iX; 170 | 171 | if (tempData[idx] >= 200 && tempMarks[idx] == 0) 172 | { 173 | segment* s = &segments[segmentCount]; 174 | 175 | s->id = segmentCount; 176 | s->x = regionCurrent.minX; 177 | s->y = regionCurrent.minY; 178 | s->lowMark = 0; 179 | s->highMark = 0; 180 | s->pixelCount = 1; 181 | s->pixels[0].x = iX; 182 | s->pixels[0].y = iY; 183 | s->pixels[0].value = tempData[idx]; 184 | tempMarks[idx] = 255; 185 | 186 | //std::cout << "New segment " << (i32)s->id << " " << iX << ", " << iY << "\n"; 187 | 188 | while (s->lowMark != s->pixelCount) 189 | { 190 | //std::cout << "Itr " << s->lowMark << s->pixelCount 191 | s->highMark = s->pixelCount; 192 | watershedFloodUpdate(s, 200); 193 | } 194 | 195 | segmentCount++; 196 | } 197 | } 198 | } 199 | 200 | // Fill the segments 201 | for (int iS = 0; iS < segmentCount; ++iS) 202 | { 203 | segments[iS].lowMark = 0; 204 | segments[iS].highMark = segments[iS].pixelCount; 205 | } 206 | 207 | i32 floodLevel = 0; 208 | while (true) 209 | { 210 | bool completed = true; 211 | 212 | for (int iS = 0; iS < segmentCount; ++iS) 213 | { 214 | segment* s = &segments[iS]; 215 | 216 | if (s->lowMark != s->highMark) 217 | { 218 | while (s->lowMark != s->highMark) 219 | { 220 | watershedFloodUpdate(s, 1); 221 | } 222 | 223 | s->highMark = s->pixelCount; 224 | completed = false; 225 | } 226 | } 227 | 228 | if (completed) 229 | break; 230 | } 231 | } 232 | 233 | //------------------------------------------------------------------------------------------------------------ 234 | // Flood to find initial connected regions. 235 | //------------------------------------------------------------------------------------------------------------ 236 | inline void addFloodNode(i32 X, i32 Y) 237 | { 238 | floodNode fn = {}; 239 | fn.x = X; 240 | fn.y = Y; 241 | floodList[floodListCount++] = fn; 242 | } 243 | 244 | void flood(i32 X, i32 Y) 245 | { 246 | i32 idx = Y * mapWidth + X; 247 | regionMarks[idx] = 255; 248 | 249 | // Add pixel to region. 250 | regionPixel rp = {}; 251 | rp.x = X; 252 | rp.y = Y; 253 | rp.value = mapData[idx]; 254 | regionPixels[regionPixelCount++] = rp; 255 | regionCurrent.pixelCount++; 256 | 257 | if (X < regionCurrent.minX) regionCurrent.minX = X; 258 | else if (X > regionCurrent.maxX) regionCurrent.maxX = X; 259 | 260 | if (Y < regionCurrent.minY) regionCurrent.minY = Y; 261 | else if (Y > regionCurrent.maxY) regionCurrent.maxY = Y; 262 | 263 | if (rp.value > regionCurrent.maxLum) regionCurrent.maxLum = rp.value; 264 | 265 | // Remove this node from the process queue. 266 | floodListCount--; 267 | 268 | // Add next nodes to process. 269 | i32 idx0 = (Y - 1) * mapWidth + (X - 1); 270 | i32 idx1 = (Y - 1) * mapWidth + (X - 0); 271 | i32 idx2 = (Y - 1) * mapWidth + (X + 1); 272 | i32 idx3 = (Y - 0) * mapWidth + (X - 1); 273 | i32 idx4 = (Y - 0) * mapWidth + (X + 1); 274 | i32 idx5 = (Y + 1) * mapWidth + (X - 1); 275 | i32 idx6 = (Y + 1) * mapWidth + (X - 0); 276 | i32 idx7 = (Y + 1) * mapWidth + (X + 1); 277 | 278 | if (regionMarks[idx0] == 0 && mapData[idx0] > 0) addFloodNode(X - 1, Y - 1); 279 | if (regionMarks[idx1] == 0 && mapData[idx1] > 0) addFloodNode(X - 0, Y - 1); 280 | if (regionMarks[idx2] == 0 && mapData[idx2] > 0) addFloodNode(X + 1, Y - 1); 281 | if (regionMarks[idx3] == 0 && mapData[idx3] > 0) addFloodNode(X - 1, Y - 0); 282 | if (regionMarks[idx4] == 0 && mapData[idx4] > 0) addFloodNode(X + 1, Y - 0); 283 | if (regionMarks[idx5] == 0 && mapData[idx5] > 0) addFloodNode(X - 1, Y + 1); 284 | if (regionMarks[idx6] == 0 && mapData[idx6] > 0) addFloodNode(X - 0, Y + 1); 285 | if (regionMarks[idx7] == 0 && mapData[idx7] > 0) addFloodNode(X + 1, Y + 1); 286 | } 287 | 288 | //------------------------------------------------------------------------------------------------------------ 289 | // Entire algo for locating centroid positions. 290 | //------------------------------------------------------------------------------------------------------------ 291 | void process(i32 Pixels, i32 Width, i32 Height, u8* Data, const char* Filename) 292 | { 293 | // NOTE: There is strange ringing on the YUV output from the Pi camera. Values are supposed to be clamped to 294 | // 16 to 235, but in practice we get values outside of this range. Values below 16 should be around 20 and 295 | // values above 235 should be around 220. 296 | 297 | mapData = Data; 298 | mapWidth = Width; 299 | mapHeight = Height; 300 | 301 | centroidCount = 0; 302 | 303 | // Rescale data. 304 | // Remove everything below 32 305 | i32 startLevel = 32; 306 | f32 scale = 255.0f / (255 - startLevel); 307 | 308 | for (int i = 0; i < Pixels; ++i) 309 | { 310 | if (Data[i] <= startLevel) 311 | { 312 | Data[i] = 0; 313 | } 314 | else 315 | { 316 | f32 p = Data[i]; 317 | p = (p - startLevel) * scale; 318 | 319 | if (p < 0) 320 | p = 0; 321 | 322 | Data[i] = (u8)p; 323 | } 324 | } 325 | 326 | memset(tempMarks, 255, sizeof(tempMarks)); 327 | 328 | memset(regionMarks, 0, sizeof(regionMarks)); 329 | regionCount = 0; 330 | regionPixelCount = 0; 331 | 332 | // Create bounds border. 333 | for (int i = 0; i < Width; ++i) 334 | { 335 | Data[i] = 0; 336 | Data[(Height - 1) * Width + i] = 0; 337 | } 338 | 339 | for (int i = 0; i < Height; ++i) 340 | { 341 | Data[i * Width + 0] = 0; 342 | Data[i * Width + Width - 1] = 0; 343 | } 344 | 345 | regionCurrent.minX = 2000; 346 | regionCurrent.minY = 2000; 347 | regionCurrent.maxX = 0; 348 | regionCurrent.maxY = 0; 349 | regionCurrent.pixelIdx = regionPixelCount; 350 | regionCurrent.pixelCount = 0; 351 | regionCurrent.maxLum = 0; 352 | regionCurrent.id = regionCount; 353 | 354 | // Flood fill to find regions above 0. 355 | for (int iY = 0; iY < Height; ++iY) 356 | { 357 | for (int iX = 0; iX < Width; ++iX) 358 | { 359 | int idx = iY * Width + iX; 360 | 361 | if (regionMarks[idx] == 0 && Data[idx] > 30) 362 | { 363 | //std::cout << "Flooding " << iX << ", " << iY << "\n"; 364 | floodListCount = 1; 365 | flood(iX, iY); 366 | 367 | while (floodListCount) 368 | { 369 | flood(floodList[floodListCount - 1].x, floodList[floodListCount - 1].y); 370 | } 371 | 372 | //std::cout << "Added region " << (i32)regionCurrent.id << ": " << regionCurrent.pixelCount << "\n"; 373 | 374 | regionCurrent.width = (regionCurrent.maxX + 1) - regionCurrent.minX; 375 | regionCurrent.height = (regionCurrent.maxY + 1) - regionCurrent.minY; 376 | 377 | // Reject small regions. 378 | // TODO: Only reject large regions after watershedding? 379 | if (regionCurrent.width >= 3 && regionCurrent.width <= 100 && regionCurrent.height >= 3 && regionCurrent.height <= 100) 380 | { 381 | tempDataWidth = regionCurrent.width; 382 | tempDataHeight = regionCurrent.height; 383 | memset(tempData, 0, regionCurrent.width * regionCurrent.height); 384 | memset(tempMarks, 0, regionCurrent.width * regionCurrent.height); 385 | 386 | // Apply auto contrast scale to region. 387 | float lumScale = 255.0f / regionCurrent.maxLum; 388 | 389 | // Copy region to temporary data buffer. 390 | for (int iP = 0; iP < regionCurrent.pixelCount; ++iP) 391 | { 392 | regionPixel* p = ®ionPixels[regionCurrent.pixelIdx + iP]; 393 | float pVal = (float)p->value * lumScale; 394 | 395 | i32 tempIdx = (p->y - regionCurrent.minY) * regionCurrent.width + (p->x - regionCurrent.minX); 396 | tempData[tempIdx] = (u8)pVal; 397 | p->value = (u8)pVal; 398 | } 399 | 400 | //std::cout << "Watershed Region " << (i32)regionCurrent.id << "\n"; 401 | watershedRegion(); 402 | //std::cout << "Region " << (i32)regionCurrent.id << " Segments " << segmentCount << "\n"; 403 | 404 | // Calculate centroid from watershed segments. 405 | for (int iS = 0; iS < segmentCount; ++iS) 406 | { 407 | segment* s = &segments[iS]; 408 | i32 totalGravity = 0; 409 | float cX = 0.0f; 410 | float cY = 0.0f; 411 | 412 | for (int iSP = 0; iSP < s->pixelCount; ++iSP) 413 | { 414 | totalGravity += s->pixels[iSP].value; 415 | } 416 | 417 | float tG = (float)totalGravity; 418 | 419 | for (int iSP = 0; iSP < s->pixelCount; ++iSP) 420 | { 421 | float gravity = (float)(s->pixels[iSP].value) / tG; 422 | cX += (float)s->pixels[iSP].x * gravity; 423 | cY += (float)s->pixels[iSP].y * gravity; 424 | } 425 | 426 | cX += s->x; 427 | cY += s->y; 428 | 429 | //std::cout << "Centroid " << cX << ", " << cY << "\n"; 430 | centroids[centroidCount].x = cX; 431 | centroids[centroidCount++].y = cY; 432 | } 433 | 434 | for (int iS = 0; iS < segmentCount; ++iS) 435 | { 436 | segment* s = &segments[iS]; 437 | 438 | srand(regionCurrent.id * 100 + iS + 10); 439 | u8 cR = rand() % 255; 440 | u8 cG = rand() % 255; 441 | u8 cB = rand() % 255; 442 | 443 | // Draw segs on image 444 | for (int iSP = 0; iSP < s->pixelCount; ++iSP) 445 | { 446 | int idx = (s->pixels[iSP].y + s->y) * 1024 + (s->pixels[iSP].x + s->x); 447 | float scale = rgbImg[idx * 3 + 0] / 255.0f; 448 | rgbImg[idx * 3 + 0] = (u8)(cR * scale); 449 | rgbImg[idx * 3 + 1] = (u8)(cG * scale); 450 | rgbImg[idx * 3 + 2] = (u8)(cB * scale); 451 | } 452 | } 453 | 454 | //char tempFileName[256]; 455 | //sprintf(tempFileName, "reg%d_%s", regionCurrent.id, Filename); 456 | //saveGrayscale(tempFileName, regionCurrent.width, regionCurrent.height, tempData); 457 | 458 | regions[regionCount++] = regionCurrent; 459 | 460 | // Run distance field transform. 461 | 462 | // Multiply distance and image lum. 463 | 464 | // Apply threshold to find minima in each region. 465 | 466 | // Priority fill to build watersheds. 467 | 468 | // Reject large segments. 469 | 470 | // Gravity weight each segment to find centroid. 471 | } 472 | 473 | regionCurrent.minX = 2000; 474 | regionCurrent.minY = 2000; 475 | regionCurrent.maxX = 0; 476 | regionCurrent.maxY = 0; 477 | regionCurrent.pixelIdx = regionPixelCount; 478 | regionCurrent.pixelCount = 0; 479 | regionCurrent.maxLum = 0; 480 | regionCurrent.id = regionCount; 481 | } 482 | } 483 | } 484 | } 485 | 486 | void processImage(const char* Filename) 487 | { 488 | std::cout << "Process Pixels in " << Filename << "\n"; 489 | 490 | int width = 1024; 491 | int height = 704; 492 | int pixelCount = width * height; 493 | 494 | u8 buffer[pixelCount]; 495 | 496 | int x, y, n; 497 | unsigned char* imgData = stbi_load(Filename, &x, &y, &n, 0); 498 | 499 | for (int i = 0; i < pixelCount; ++i) 500 | { 501 | buffer[i] = imgData[i * 3]; 502 | } 503 | 504 | stbi_image_free(imgData); 505 | 506 | rgbImg = (uint8_t*)malloc(pixelCount * 3); 507 | 508 | // Copy grayscale to rgb. 509 | for (int y = 0; y < height; ++y) 510 | { 511 | for (int x = 0; x < width; ++x) 512 | { 513 | int idx = y * width + x; 514 | u8 val = buffer[idx]; 515 | 516 | //if (buffer[idx] > 0) 517 | //val = 255; 518 | 519 | rgbImg[idx * 3 + 0] = val; 520 | rgbImg[idx * 3 + 1] = val; 521 | rgbImg[idx * 3 + 2] = val; 522 | } 523 | } 524 | 525 | int brightPixels = 0; 526 | 527 | int64_t startTime = PlatformGetMicrosecond(); 528 | 529 | process(pixelCount, width, height, buffer, Filename); 530 | 531 | startTime = PlatformGetMicrosecond() - startTime; 532 | 533 | std::cout << "Runtime: " << (startTime / 1000.0f) << " ms\n"; 534 | 535 | for (int iR = 0; iR < regionCount; ++iR) 536 | { 537 | region* r = ®ions[iR]; 538 | 539 | srand(r->id + 10); 540 | u8 cR = rand() % 255; 541 | u8 cG = rand() % 255; 542 | u8 cB = rand() % 255; 543 | 544 | /* 545 | for (int iP = 0; iP < r->pixelCount; ++iP) 546 | { 547 | regionPixel* p = ®ionPixels[r->pixelIdx + iP]; 548 | 549 | int idx = p->y * width + p->x; 550 | rgbImg[idx * 3 + 0] = cR; 551 | rgbImg[idx * 3 + 1] = cG; 552 | rgbImg[idx * 3 + 2] = cB; 553 | } 554 | */ 555 | 556 | /* 557 | for (int iP = 0; iP < r->pixelCount; ++iP) 558 | { 559 | regionPixel* p = ®ionPixels[r->pixelIdx + iP]; 560 | 561 | int idx = p->y * width + p->x; 562 | rgbImg[idx * 3 + 0] = p->value; 563 | rgbImg[idx * 3 + 1] = p->value; 564 | rgbImg[idx * 3 + 2] = p->value; 565 | } 566 | */ 567 | 568 | //* 569 | // Region bounds. 570 | for (int y = r->minY - 1; y < r->maxY + 2; ++y) 571 | { 572 | for (int x = r->minX - 1; x < r->maxX + 2; ++x) 573 | { 574 | if (x == r->minX - 1 || x == r->maxX + 1 || 575 | y == r->minY - 1 || y == r->maxY + 1) 576 | { 577 | int idx = y * width + x; 578 | 579 | rgbImg[idx * 3 + 0] = 255; 580 | rgbImg[idx * 3 + 1] = 0; 581 | rgbImg[idx * 3 + 2] = 0; 582 | } 583 | } 584 | } 585 | //*/ 586 | } 587 | 588 | for (int iC = 0; iC < centroidCount; ++iC) 589 | { 590 | int idx = (int)centroids[iC].y * width + (int)centroids[iC].x; 591 | 592 | rgbImg[idx * 3 + 0] = 0; 593 | rgbImg[idx * 3 + 1] = 255; 594 | rgbImg[idx * 3 + 2] = 0; 595 | } 596 | 597 | // Save processed frame. 598 | char outFilename[256]; 599 | sprintf(outFilename, "proc_%s", Filename); 600 | if (stbi_write_tga(outFilename, width, height, 3, rgbImg) == 0) 601 | std::cout << "Failed STB write\n"; 602 | } 603 | 604 | //------------------------------------------------------------------------------------------------------------ 605 | // Application entry. 606 | //------------------------------------------------------------------------------------------------------------ 607 | int main(int argc, char** argv) 608 | { 609 | /* 610 | const char* fileList[] = 611 | { 612 | "m1.tga", 613 | "m2.tga", 614 | "m2.tga", 615 | "m4.tga", 616 | "m5.tga", 617 | "close1.tga", 618 | "close2.tga", 619 | "blur1.tga", 620 | "blur2.tga", 621 | "desk_blobs.tga", 622 | "blobs.tga" 623 | }; 624 | //*/ 625 | //* 626 | const char* fileList[] = 627 | { 628 | "desk_blobs_masked.tga" 629 | }; 630 | //*/ 631 | 632 | for (int i = 0; i < sizeof(fileList) / sizeof(char**); ++i) 633 | { 634 | processImage(fileList[i]); 635 | } 636 | 637 | return 0; 638 | } -------------------------------------------------------------------------------- /PixelProcessor/precomp.cpp: -------------------------------------------------------------------------------- 1 | #include "precomp.h" 2 | 3 | #define STB_IMAGE_WRITE_IMPLEMENTATION 4 | #include "stb_image_write.h" 5 | 6 | #define STB_IMAGE_IMPLEMENTATION 7 | #include "stb_image.h" -------------------------------------------------------------------------------- /PixelProcessor/precomp.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include "stb_image_write.h" 4 | #include "stb_image.h" -------------------------------------------------------------------------------- /PyMocap/makeblobdetect.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | python setup.py build_ext --inplace -------------------------------------------------------------------------------- /PyMocap/mocap.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------ 2 | # Marker Tracker. 3 | #------------------------------------------------------------------------------ 4 | import io 5 | from io import BytesIO 6 | import socket 7 | from socket import * 8 | import struct 9 | import time 10 | import picamera 11 | import sys 12 | import select 13 | import threading 14 | import pprint 15 | import numpy as np 16 | import os 17 | import re 18 | import subprocess 19 | import zlib 20 | import RPIO.PWM as PWM 21 | import Queue 22 | import traceback 23 | import blobdetect 24 | from fractions import Fraction 25 | import json 26 | 27 | version = 110 28 | 29 | # Current Camera State 30 | camRunning = False 31 | camFps = 50 32 | camMode = 0 33 | 34 | # Net Command Request State 35 | setRunCamera = camRunning 36 | setCamFps = camFps 37 | setCamMode = camMode 38 | 39 | # Net 40 | netSend = Queue.Queue() 41 | serverFile = None 42 | 43 | #------------------------------------------------------------------------------ 44 | # Helper functions. 45 | #------------------------------------------------------------------------------ 46 | def GetSerial(): 47 | cpuserial = "00000000" 48 | 49 | try: 50 | f = open('/proc/cpuinfo','r') 51 | for line in f: 52 | if line[0:6]=='Serial': 53 | cpuserial = line[18:26] 54 | f.close() 55 | except: 56 | pass 57 | 58 | return int(cpuserial, 16) 59 | 60 | #------------------------------------------------------------------------------ 61 | # Config. 62 | #------------------------------------------------------------------------------ 63 | tempMask = '' 64 | for i in range(0, 64 * 44): 65 | tempMask += '1' 66 | 67 | config = { 68 | 'name' : 'Unnamed', 69 | 'mask' : tempMask 70 | } 71 | 72 | def SaveConfig(): 73 | with open('config.json', 'w') as f: 74 | json.dump(config, f, indent=4) 75 | 76 | def LoadConfig(): 77 | try: 78 | with open('config.json', 'r') as f: 79 | config.update(json.load(f)) 80 | except Exception,e: 81 | print 'Config file: ' + str(e) 82 | 83 | SaveConfig() 84 | 85 | LoadConfig() 86 | 87 | #------------------------------------------------------------------------------ 88 | # RGB LED Setup. 89 | #------------------------------------------------------------------------------ 90 | print('Starting RGB LED') 91 | #PWM.set_loglevel(PWM.LOG_LEVEL_DEBUG) 92 | PWM.set_loglevel(PWM.LOG_LEVEL_ERRORS) 93 | 94 | PWM.setup(pulse_incr_us=10) 95 | PWM.init_channel(0, subcycle_time_us=10000) 96 | PWM.print_channel(0) 97 | 98 | BLUE_LED = 2 99 | RED_LED = 3 100 | GREEN_LED = 4 101 | 102 | PWM.add_channel_pulse(0, BLUE_LED, 0, 1000) 103 | PWM.add_channel_pulse(0, RED_LED, 0, 1000) 104 | PWM.add_channel_pulse(0, GREEN_LED, 0, 1000) 105 | 106 | def SetRGB(r, g, b): 107 | PWM.add_channel_pulse(0, RED_LED, 0, int(500 + (1.0 - r) * 500)) 108 | PWM.add_channel_pulse(0, GREEN_LED, 0, int(500 + (1.0 - g) * 500)) 109 | PWM.add_channel_pulse(0, BLUE_LED, 0, int(500 + (1.0 - b) * 500)) 110 | 111 | #------------------------------------------------------------------------------ 112 | # Network Command Thread. 113 | #------------------------------------------------------------------------------ 114 | class NetworkReaderThread(threading.Thread): 115 | def __init__(self): 116 | super(NetworkReaderThread, self).__init__() 117 | self.daemon = True 118 | self.broadcastSocket = None 119 | self.serverIp = None 120 | self.serverSocket = None 121 | self.start() 122 | 123 | def FindServer(self): 124 | # NOTE: We need to drain the socket as old broadcast msgs remain. 125 | self.broadcastSocket.setblocking(0) 126 | 127 | while True: 128 | try: 129 | recvData = self.broadcastSocket.recv(1024) 130 | except: 131 | break 132 | 133 | self.broadcastSocket.settimeout(0.5) 134 | 135 | while True: 136 | try: 137 | #print('Waiting for Server IP') 138 | msg = self.broadcastSocket.recvfrom(1024) 139 | 140 | r = re.search('KineticSynth:(\d+.\d+.\d+.\d+)', msg[0]) 141 | 142 | if r != None: 143 | print 'Found Server IP: {} from {}'.format(r.group(1), msg[1]) 144 | return r.group(1) 145 | except: 146 | continue 147 | 148 | def run(self): 149 | global setRunCamera 150 | global netSend 151 | global serverFile 152 | global camera 153 | global setCamFps 154 | global setCamMode 155 | global deviceSerial 156 | 157 | print('Starting Network Thread') 158 | 159 | self.broadcastSocket = socket(AF_INET, SOCK_DGRAM) 160 | self.broadcastSocket.bind(('', 45454)) 161 | 162 | while True: 163 | self.serverSocket = socket() 164 | self.serverIP = self.FindServer() 165 | blobdetect.masterconnectionmade(self.serverIP) 166 | #self.serverIP = '192.168.1.106' 167 | 168 | try: 169 | self.serverSocket.connect((self.serverIP, 8000)) 170 | self.serverSocket.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1) 171 | except: 172 | print('Cant Connect') 173 | continue 174 | 175 | serverFile = self.serverSocket.makefile('wb') 176 | # TODO: Need to clear this 177 | #netSend = Queue.Queue() 178 | print('Connected to Server') 179 | 180 | while True: 181 | # TODO: Catch exceptions here. 182 | try: 183 | data = serverFile.readline() 184 | #print('Socket Read ' + data) 185 | # Check all the things 186 | if not data: 187 | blobdetect.masterconnectionlost() 188 | print('Socket Dead') 189 | setRunCamera = False 190 | serverFile = None 191 | break 192 | else: 193 | data = data.rstrip() 194 | args = data.split(',') 195 | 196 | if args[0] == 'sc': 197 | print('Net start cam') 198 | setRunCamera = True 199 | elif args[0] == 'ec': 200 | print('Net stop cam') 201 | setRunCamera = False 202 | elif args[0] == 'pe': 203 | print('Set exposure ' + args[1]) 204 | camera.shutter_speed = int(args[1]) 205 | elif args[0] == 'pi': 206 | print('Set ISO ' + args[1]) 207 | camera.iso = int(args[1]) 208 | elif args[0] == 'pf': 209 | print('Set FPS ' + args[1]) 210 | setCamFps = int(args[1]) 211 | elif args[0] == 'gi': 212 | configStr = json.dumps(config) 213 | data = struct.pack('IIII', 4, version, deviceSerial, len(configStr)) 214 | netSend.put(data) 215 | netSend.put(configStr) 216 | elif args[0] == 'cm': 217 | print('Set camera mode ' + args[1]) 218 | setCamMode = int(args[1]) 219 | elif args[0] == 'sm': 220 | print('Set mask') 221 | config['mask'] = args[1] 222 | blobdetect.setmask(args[1]) 223 | SaveConfig() 224 | elif args[0] == 'sp': 225 | print('Set properties') 226 | config.update(json.loads(data[3:])) 227 | blobdetect.setmask(config['mask']) 228 | SaveConfig() 229 | 230 | except Exception,e: 231 | blobdetect.masterconnectionlost() 232 | print('Socket Dead: ' + str(e)) 233 | setRunCamera = False 234 | serverFile = None 235 | break 236 | 237 | class NetworkWriterThread(threading.Thread): 238 | def __init__(self): 239 | super(NetworkWriterThread, self).__init__() 240 | self.daemon = True 241 | self.start() 242 | 243 | def run(self): 244 | global setRunCamera 245 | #global netSend 246 | global serverFile 247 | 248 | print('Starting Network Writer Thread') 249 | 250 | while True: 251 | data = netSend.get() 252 | 253 | try: 254 | if serverFile != None: 255 | serverFile.write(data) 256 | serverFile.flush() 257 | except Exception,e: 258 | print "Net Write Ex:" + str(e) 259 | #continue 260 | 261 | 262 | NetworkReaderThread() 263 | NetworkWriterThread() 264 | 265 | #------------------------------------------------------------------------------ 266 | # Camera Output H264. 267 | #------------------------------------------------------------------------------ 268 | class CamOutput(object): 269 | def __init__(self): 270 | self.lastFrameTime = 0 271 | 272 | def write(self, s): 273 | global camera 274 | global netSend 275 | 276 | try: 277 | frameSize = len(s) 278 | frameTime = camera.frame.timestamp 279 | frameType = int(camera.frame.frame_type) 280 | 281 | if frameTime == None: 282 | frameTime = 0 283 | 284 | #print('{} {} {} {}'.format(camera.frame.index, frameType, frameSize, frameTime)) 285 | 286 | avgHostOffset = 0.0 287 | currentFrameMasterId = 0 288 | 289 | if frameType != 2: 290 | diff = frameTime - self.lastFrameTime 291 | self.lastFrameTime = frameTime 292 | 293 | masterTime, avgHostOffset = blobdetect.getmastertime() 294 | camToMasterTime = masterTime - camera.timestamp 295 | 296 | fd = 11080 297 | 298 | if camFps == 50: 299 | fd = 19941 300 | elif camFps == 60: 301 | fd = 16611 302 | elif camFps == 70: 303 | fd = 14235 304 | elif camFps == 80: 305 | fd = 12463 306 | elif camFps == 90: 307 | fd = 11080 308 | elif camFps == 100: 309 | fd = 9970 310 | 311 | fId = (frameTime + camToMasterTime) / fd 312 | frameTarget = (fId) * fd 313 | frameErr = frameTarget - (frameTime + camToMasterTime) 314 | 315 | if frameErr < -(fd / 2): 316 | frameErr = (fd + frameErr) 317 | 318 | fIdDiff = (frameTime + camToMasterTime) - frameTarget 319 | if fIdDiff < 3000: 320 | currentFrameMasterId = fId 321 | elif fIdDiff > fd - 3000: 322 | currentFrameMasterId = fId + 1 323 | 324 | correction = 0.0 325 | updateIndex = int(70000 / fd) 326 | 327 | if camera.frame.index % updateIndex == 0: 328 | if abs(frameErr) > 2000: 329 | correction = 10 330 | elif abs(frameErr) > 1000: 331 | correction = 6 332 | elif abs(frameErr) > 200: 333 | correction = 0.6 334 | elif abs(frameErr) > 100: 335 | correction = 0.1 336 | 337 | if frameErr > 0: 338 | correction *= -1 339 | 340 | camera.framerate_delta = correction 341 | else: 342 | camera.framerate_delta = 0 343 | 344 | print('{:>6} {:>6} {:>6} {:>10} {:>8.2f}'.format(diff, frameErr, correction, currentFrameMasterId, avgHostOffset)) 345 | 346 | packetHeader = struct.pack('IIIfq', 3, frameSize, frameType, avgHostOffset, currentFrameMasterId) 347 | netSend.put(packetHeader) 348 | netSend.put(s) 349 | 350 | except Exception,e: 351 | print "Ex:" + str(e) 352 | traceback.print_exc() 353 | 354 | #------------------------------------------------------------------------------ 355 | # Camera Output Markers. 356 | #------------------------------------------------------------------------------ 357 | class CamOutputMarkers(object): 358 | def __init__(self): 359 | self.lastFrameTime = 0 360 | 361 | def write(self, s): 362 | global camera 363 | global netSend 364 | 365 | try: 366 | frameTime = camera.frame.timestamp 367 | 368 | if frameTime == None: 369 | frameTime = 0 370 | return 371 | 372 | diff = frameTime - self.lastFrameTime 373 | self.lastFrameTime = frameTime 374 | 375 | currentFrameMasterId = 0 376 | masterTime, avgHostOffset = blobdetect.getmastertime() 377 | camToMasterTime = masterTime - camera.timestamp 378 | 379 | fd = 11080 380 | 381 | if camFps == 50: 382 | fd = 19941 383 | elif camFps == 60: 384 | fd = 16611 385 | elif camFps == 70: 386 | fd = 14235 387 | elif camFps == 80: 388 | fd = 12463 389 | elif camFps == 90: 390 | fd = 11080 391 | elif camFps == 100: 392 | fd = 9970 393 | 394 | fId = (frameTime + camToMasterTime) / fd 395 | frameTarget = (fId) * fd 396 | frameErr = frameTarget - (frameTime + camToMasterTime) 397 | 398 | if frameErr < -(fd / 2): 399 | frameErr = (fd + frameErr) 400 | 401 | fIdDiff = (frameTime + camToMasterTime) - frameTarget 402 | if fIdDiff < 3000: 403 | currentFrameMasterId = fId 404 | elif fIdDiff > fd - 3000: 405 | currentFrameMasterId = fId + 1 406 | 407 | correction = 0.0 408 | updateIndex = int(70000 / fd) 409 | 410 | if camera.frame.index % updateIndex == 0: 411 | if abs(frameErr) > 2000: 412 | correction = 10 413 | elif abs(frameErr) > 1000: 414 | correction = 6 415 | elif abs(frameErr) > 200: 416 | correction = 0.6 417 | elif abs(frameErr) > 100: 418 | correction = 0.1 419 | 420 | if frameErr > 0: 421 | correction *= -1 422 | 423 | camera.framerate_delta = correction 424 | else: 425 | camera.framerate_delta = 0 426 | 427 | print('{:>6} {:>6} {:>6} {:>10} {:>8.2f}'.format(diff, frameErr, correction, currentFrameMasterId, avgHostOffset)) 428 | 429 | blobStatus = blobdetect.getstatus() 430 | if blobStatus == 0 or blobStatus == 3: 431 | if blobStatus == 3: 432 | blobData = blobdetect.getblobdata() 433 | packetHeader = struct.pack('II', 5, len(blobData)) 434 | netSend.put(packetHeader) 435 | netSend.put(blobData) 436 | 437 | # NOTE: Takes about 1ms to copy buffer, should double buffer in C code. 438 | blobdetect.pushframe(currentFrameMasterId, avgHostOffset, s) 439 | 440 | #print('Markers {} {} {} {}'.format(camera.frame.index, diff, blobCount, len(blobData))) 441 | 442 | except Exception,e: 443 | print "Ex:" + str(e) 444 | traceback.print_exc() 445 | 446 | #------------------------------------------------------------------------------ 447 | # Camera & Main Loop. 448 | #------------------------------------------------------------------------------ 449 | print('VectorONE Device Software') 450 | 451 | blobdetect.startworkers() 452 | blobdetect.setmask(config['mask']) 453 | 454 | # TODO: Set all the things from config. 455 | 456 | deviceSerial = GetSerial() 457 | print('Serial: {}'.format(deviceSerial)) 458 | 459 | camera = picamera.PiCamera(sensor_mode = 6, clock_mode = 'raw') 460 | camera.sensor_mode = 6 461 | camera.rotation = 180 462 | camera.resolution = (1024, 704) 463 | camera.framerate = camFps 464 | camera.shutter_speed = 7000 465 | camera.iso = 800 466 | camera.awb_mode = 'off' 467 | camera.awb_gains = (1.1, 1.1) 468 | camera.exposure_mode = 'sports' 469 | camera.framerate_delta = 0 470 | 471 | def StopCamera(): 472 | print('Stopping Camera') 473 | camera.stop_recording() 474 | SetRGB(0.1, 0.1, 0.1) 475 | 476 | def StartCamera(): 477 | if camMode == 0: 478 | print('Starting Camera H264') 479 | camera.start_recording(CamOutput(), format='h264', quality=25, profile='high', level='4.2', inline_headers=True, intra_period=50) 480 | SetRGB(0, 0, 0.1) 481 | elif camMode == 1: 482 | print('Starting Camera Markers') 483 | camera.start_recording(CamOutputMarkers(), format='yuv') 484 | SetRGB(0.1, 0, 0.1) 485 | 486 | while True: 487 | SetRGB(0.1, 0.1, 0.1) 488 | 489 | while True: 490 | time.sleep(0.1) 491 | 492 | if camFps != setCamFps: 493 | if camRunning: 494 | StopCamera() 495 | 496 | camera.framerate = setCamFps 497 | camFps = setCamFps 498 | 499 | if camRunning: 500 | StartCamera() 501 | 502 | if camMode != setCamMode: 503 | if camRunning: 504 | StopCamera() 505 | 506 | camMode = setCamMode 507 | 508 | if camRunning: 509 | StartCamera() 510 | 511 | if camRunning != setRunCamera: 512 | if camRunning: 513 | # TODO: Catch Exceptions 514 | StopCamera() 515 | else: 516 | StartCamera() 517 | 518 | camRunning = setRunCamera 519 | -------------------------------------------------------------------------------- /PyMocap/setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup, Extension 2 | 3 | setup ( 4 | ext_modules = [ 5 | Extension( 6 | 'blobdetect', 7 | sources = ['blobdetectmodule.cpp'], 8 | extra_compile_args = ['--std=c++11'], 9 | language='c++11', 10 | ), 11 | ] 12 | ) -------------------------------------------------------------------------------- /PyMocap/unit_test_detector.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------ 2 | # Marker Tracker. 3 | #------------------------------------------------------------------------------ 4 | import io 5 | from io import BytesIO 6 | import struct 7 | import time 8 | import picamera 9 | import sys 10 | import select 11 | import threading 12 | import pprint 13 | import numpy as np 14 | import os 15 | import re 16 | import subprocess 17 | import zlib 18 | import Queue 19 | import socket 20 | from socket import * 21 | import multiprocessing 22 | import blobdetect 23 | 24 | #------------------------------------------------------------------------------ 25 | # Processing thread. 26 | #------------------------------------------------------------------------------ 27 | ''' 28 | processQueue = Queue.Queue() 29 | 30 | class DetectorThread(threading.Thread): 31 | def __init__(self): 32 | super(DetectorThread, self).__init__() 33 | self.daemon = True 34 | self.start() 35 | 36 | def run(self): 37 | print('Starting detector thread.') 38 | 39 | while True: 40 | processQueue.get() 41 | #print('Got Frame {}'.format(processQueue.qsize())) 42 | 43 | try: 44 | t0 = time.time() 45 | #time.sleep(0.008) 46 | for x in range (0, 40000): 47 | pass 48 | 49 | #blobdetect.detect() 50 | t0 = time.time() - t0 51 | print('Process Time {}'.format(t0 * 1000)) 52 | except: 53 | continue 54 | 55 | DetectorThread() 56 | ''' 57 | 58 | ''' 59 | wpQueue = multiprocessing.Queue() 60 | wp = multiprocessing.Process(target=worker.worker, args=(wpQueue,)) 61 | wp.daemon = True 62 | wp.start() 63 | ''' 64 | 65 | #------------------------------------------------------------------------------ 66 | # Camera Output. 67 | #------------------------------------------------------------------------------ 68 | class CamOutput(object): 69 | def __init__(self): 70 | self.size = 0 71 | self.lastFrameTime = 0 72 | self.adjusting = 0 73 | 74 | def write(self, s): 75 | global camera 76 | global processQueue 77 | 78 | try: 79 | frameSize = len(s) 80 | self.size += frameSize 81 | frameTime = camera.frame.timestamp 82 | 83 | if frameTime == None: 84 | print('SHOULD NEVER HAPPEN') 85 | 86 | diff = frameTime - self.lastFrameTime 87 | #print('{} {} {}'.format(camera.frame.index, diff, frameSize)) 88 | 89 | t0 = time.time() 90 | # NOTE: Will block until previously pushed frame has been completed. 91 | blobCount = blobdetect.getblobcount() 92 | # NOTE: Takes about 1ms to copy buffer, should double buffer in C code. 93 | blobdetect.pushframe(s) 94 | t0 = time.time() - t0 95 | print('copy {}ms {}'.format(t0 * 1000.0, blobCount)) 96 | 97 | if diff > 10100: 98 | print('FRAME SKIPPED {}'.format(diff)) 99 | 100 | self.lastFrameTime = frameTime 101 | 102 | except Exception,e: 103 | print "Ex:" + str(e) 104 | traceback.print_exc() 105 | 106 | def flush(self): 107 | print('{} Bytes Written'.format(self.size)) 108 | 109 | #------------------------------------------------------------------------------ 110 | # Camera & Main Loop. 111 | #------------------------------------------------------------------------------ 112 | print('High FPS Capture') 113 | 114 | blobdetect.startworkers() 115 | 116 | camera = picamera.PiCamera(sensor_mode = 6, clock_mode = 'raw') 117 | camera.sensor_mode = 6 118 | camera.rotation = 180 119 | camera.resolution = (1024, 704) 120 | camera.framerate = 100 121 | camera.awb_mode = 'off' 122 | camera.awb_gains = (1.1, 1.1) 123 | camera.exposure_mode = 'sports' 124 | camera.framerate_delta = 0 125 | camera.shutter_speed = 7000 126 | camera.iso = 800 127 | 128 | while True: 129 | 130 | #camera.start_recording(CamOutput(), format='h264', quality=25, profile='high', level='4.2', inline_headers=True, intra_period=50) 131 | camera.start_recording(CamOutput(), format='yuv') 132 | while True: 133 | time.sleep(1.0) 134 | 135 | print('Stopped Camera') 136 | camera.stop_recording() -------------------------------------------------------------------------------- /PyMocap/unit_test_drift.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------ 2 | # Marker Tracker. 3 | #------------------------------------------------------------------------------ 4 | import io 5 | from io import BytesIO 6 | import struct 7 | import time 8 | import picamera 9 | import sys 10 | import select 11 | import threading 12 | import pprint 13 | import numpy as np 14 | import os 15 | import re 16 | import subprocess 17 | import zlib 18 | import Queue 19 | import socket 20 | from socket import * 21 | import multiprocessing 22 | import blobdetect 23 | 24 | #------------------------------------------------------------------------------ 25 | # Processing thread. 26 | #------------------------------------------------------------------------------ 27 | ''' 28 | processQueue = Queue.Queue() 29 | 30 | class DetectorThread(threading.Thread): 31 | def __init__(self): 32 | super(DetectorThread, self).__init__() 33 | self.daemon = True 34 | self.start() 35 | 36 | def run(self): 37 | print('Starting detector thread.') 38 | 39 | while True: 40 | processQueue.get() 41 | #print('Got Frame {}'.format(processQueue.qsize())) 42 | 43 | try: 44 | t0 = time.time() 45 | #time.sleep(0.008) 46 | for x in range (0, 40000): 47 | pass 48 | 49 | #blobdetect.detect() 50 | t0 = time.time() - t0 51 | print('Process Time {}'.format(t0 * 1000)) 52 | except: 53 | continue 54 | 55 | DetectorThread() 56 | ''' 57 | 58 | ''' 59 | wpQueue = multiprocessing.Queue() 60 | wp = multiprocessing.Process(target=worker.worker, args=(wpQueue,)) 61 | wp.daemon = True 62 | wp.start() 63 | ''' 64 | 65 | broadcastSocket = socket(AF_INET, SOCK_DGRAM) 66 | #broadcastSocket.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) 67 | broadcastSocket.setsockopt(SOL_SOCKET, SO_BROADCAST, 1) 68 | 69 | #------------------------------------------------------------------------------ 70 | # Camera Output. 71 | #------------------------------------------------------------------------------ 72 | class CamOutput(object): 73 | def __init__(self): 74 | self.size = 0 75 | self.lastFrameTime = 0 76 | self.adjusting = 0 77 | self.average = 0 78 | self.averageCount = 0 79 | self.ca = 0 80 | 81 | def write(self, s): 82 | global camera 83 | global processQueue 84 | global broadcastSocket 85 | global camTimeOffset 86 | 87 | try: 88 | frameSize = len(s) 89 | self.size += frameSize 90 | frameTime = camera.frame.timestamp 91 | 92 | if frameTime == None: 93 | frameTime = 0 94 | else: 95 | frameTime -= camTimeOffset 96 | 97 | diff = frameTime - self.lastFrameTime 98 | 99 | #err = int(frameTime / 9970.72 + 0.5) 100 | err = frameTime / 9970.74175 101 | print('{} {} {} {} {}'.format(camera.frame.index, diff, frameTime, err, self.ca)) 102 | 103 | if diff > 0 and diff < 10100: 104 | self.average += float(diff) 105 | self.averageCount += 1 106 | self.ca = self.average / float(self.averageCount) 107 | 108 | #broadcastSocket.sendto('test', ('192.168.1.255', 45455)) 109 | broadcastSocket.sendto('{}'.format(frameTime), ('255.255.255.255', 45455)) 110 | 111 | #if diff > 10100: 112 | #print('FRAME SKIPPED {}'.format(diff)) 113 | 114 | self.lastFrameTime = frameTime 115 | 116 | except Exception,e: 117 | print "Ex:" + str(e) 118 | traceback.print_exc() 119 | 120 | def flush(self): 121 | print('{} Bytes Written'.format(self.size)) 122 | 123 | #------------------------------------------------------------------------------ 124 | # Camera & Main Loop. 125 | #------------------------------------------------------------------------------ 126 | print('High FPS Capture') 127 | 128 | 129 | blobdetect.startworkers() 130 | 131 | camera = picamera.PiCamera(sensor_mode = 6, clock_mode = 'raw') 132 | camera.sensor_mode = 6 133 | camera.rotation = 180 134 | camera.resolution = (1024, 704) 135 | camera.framerate = 40 136 | camera.awb_mode = 'off' 137 | camera.awb_gains = (1.1, 1.1) 138 | camera.exposure_mode = 'sports' 139 | camera.framerate_delta = 0 140 | camera.shutter_speed = 7000 141 | camera.iso = 800 142 | 143 | camTimeOffset = camera.timestamp 144 | 145 | print(str(camTimeOffset)) 146 | 147 | while True: 148 | 149 | #camera.start_recording(CamOutput(), format='h264', quality=25, profile='high', level='4.2', inline_headers=True, intra_period=50) 150 | camera.start_recording(CamOutput(), format='yuv') 151 | #print('Start Time {0:.2f}ms'.format(t)) 152 | 153 | while True: 154 | time.sleep(1.0) 155 | 156 | print('Stopped Camera') 157 | camera.stop_recording() -------------------------------------------------------------------------------- /PyMocap/unit_test_timing.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------ 2 | # Marker Tracker. 3 | #------------------------------------------------------------------------------ 4 | import io 5 | from io import BytesIO 6 | import struct 7 | import time 8 | import picamera 9 | import sys 10 | import select 11 | import threading 12 | import pprint 13 | import numpy as np 14 | import os 15 | import re 16 | import subprocess 17 | import zlib 18 | import Queue 19 | import socket 20 | from socket import * 21 | import multiprocessing 22 | import blobdetect 23 | 24 | #------------------------------------------------------------------------------ 25 | # Processing thread. 26 | #------------------------------------------------------------------------------ 27 | ''' 28 | processQueue = Queue.Queue() 29 | 30 | class DetectorThread(threading.Thread): 31 | def __init__(self): 32 | super(DetectorThread, self).__init__() 33 | self.daemon = True 34 | self.start() 35 | 36 | def run(self): 37 | print('Starting detector thread.') 38 | 39 | while True: 40 | processQueue.get() 41 | #print('Got Frame {}'.format(processQueue.qsize())) 42 | 43 | try: 44 | t0 = time.time() 45 | #time.sleep(0.008) 46 | for x in range (0, 40000): 47 | pass 48 | 49 | #blobdetect.detect() 50 | t0 = time.time() - t0 51 | print('Process Time {}'.format(t0 * 1000)) 52 | except: 53 | continue 54 | 55 | DetectorThread() 56 | ''' 57 | 58 | ''' 59 | wpQueue = multiprocessing.Queue() 60 | wp = multiprocessing.Process(target=worker.worker, args=(wpQueue,)) 61 | wp.daemon = True 62 | wp.start() 63 | ''' 64 | 65 | broadcastSocket = socket(AF_INET, SOCK_DGRAM) 66 | #broadcastSocket.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) 67 | broadcastSocket.setsockopt(SOL_SOCKET, SO_BROADCAST, 1) 68 | 69 | #------------------------------------------------------------------------------ 70 | # Camera Output. 71 | #------------------------------------------------------------------------------ 72 | class CamOutput(object): 73 | def __init__(self): 74 | self.size = 0 75 | self.lastFrameTime = 0 76 | self.adjusting = 0 77 | self.average = 0 78 | self.averageCount = 0 79 | self.ca = 0 80 | 81 | def write(self, s): 82 | global camera 83 | global processQueue 84 | global broadcastSocket 85 | global camTimeOffset 86 | 87 | try: 88 | frameSize = len(s) 89 | self.size += frameSize 90 | frameTime = camera.frame.timestamp 91 | 92 | if frameTime == None: 93 | frameTime = 0 94 | else: 95 | frameTime -= camTimeOffset 96 | 97 | diff = frameTime - self.lastFrameTime 98 | 99 | #err = int(frameTime / 9970.72 + 0.5) 100 | err = frameTime / 9970.74175 101 | print('{} {} {} {} {}'.format(camera.frame.index, diff, frameTime, err, self.ca)) 102 | 103 | if diff > 0 and diff < 10100: 104 | self.average += float(diff) 105 | self.averageCount += 1 106 | self.ca = self.average / float(self.averageCount) 107 | 108 | #broadcastSocket.sendto('test', ('192.168.1.255', 45455)) 109 | #broadcastSocket.sendto('{}'.format(camera.frame.index), ('255.255.255.255', 45455)) 110 | 111 | #if diff > 10100: 112 | #print('FRAME SKIPPED {}'.format(diff)) 113 | 114 | self.lastFrameTime = frameTime 115 | 116 | except Exception,e: 117 | print "Ex:" + str(e) 118 | traceback.print_exc() 119 | 120 | def flush(self): 121 | print('{} Bytes Written'.format(self.size)) 122 | 123 | #------------------------------------------------------------------------------ 124 | # Camera & Main Loop. 125 | #------------------------------------------------------------------------------ 126 | print('High FPS Capture') 127 | 128 | 129 | blobdetect.startworkers() 130 | 131 | camera = picamera.PiCamera(sensor_mode = 6, clock_mode = 'raw') 132 | camera.sensor_mode = 6 133 | camera.rotation = 180 134 | camera.resolution = (1024, 704) 135 | camera.framerate = 100 136 | camera.awb_mode = 'off' 137 | camera.awb_gains = (1.1, 1.1) 138 | camera.exposure_mode = 'sports' 139 | camera.framerate_delta = 0 140 | camera.shutter_speed = 7000 141 | camera.iso = 800 142 | 143 | camTimeOffset = camera.timestamp 144 | 145 | print(str(camTimeOffset)) 146 | 147 | while True: 148 | 149 | #camera.start_recording(CamOutput(), format='h264', quality=25, profile='high', level='4.2', inline_headers=True, intra_period=50) 150 | camera.start_recording(CamOutput(), format='yuv') 151 | #print('Start Time {0:.2f}ms'.format(t)) 152 | 153 | while True: 154 | time.sleep(1.0) 155 | 156 | print('Stopped Camera') 157 | camera.stop_recording() -------------------------------------------------------------------------------- /PyUtilities/blobCode.cpp: -------------------------------------------------------------------------------- 1 | 2 | uint8_t _frameBuffer[1024 * 704]; 3 | 4 | typedef struct 5 | { 6 | float minX; 7 | float minY; 8 | float maxX; 9 | float maxY; 10 | float cX; 11 | float cY; 12 | 13 | } blob; 14 | 15 | int blobCount = 0; 16 | blob blobs[256]; 17 | 18 | float distSq(float X1, float Y1, float X2, float Y2) 19 | { 20 | return (X2 - X1) * (X2 - X1) + (Y2 - Y1) * (Y2 - Y1); 21 | } 22 | 23 | void ProcessPixels(void) 24 | { 25 | int width = 1024; 26 | int height = 704; 27 | //int pixelCount = width * height; 28 | 29 | int brightPixels = 0; 30 | 31 | blobCount = 0; 32 | 33 | for (int y = 0; y < height; ++y) 34 | { 35 | for (int x = 0; x < width; ++x) 36 | { 37 | //if (buffer[y * width + x] > 95 && buffer[y * width + x] < 189) 38 | if (_frameBuffer[y * width + x] > 60) 39 | { 40 | ++brightPixels; 41 | 42 | bool blobFound = false; 43 | 44 | // Could check all blobs and find closest. 45 | 46 | for (int b = 0; b < blobCount; ++b) 47 | { 48 | blob* cb = blobs + b; 49 | 50 | float cx = (cb->minX + cb->maxX) / 2; 51 | float cy = (cb->minY + cb->maxY) / 2; 52 | float d = distSq(x, y, cx, cy); 53 | 54 | if (d < 8 * 8) 55 | { 56 | if (x < cb->minX) cb->minX = x; 57 | if (y < cb->minY) cb->minY = y; 58 | if (x > cb->maxX) cb->maxX = x; 59 | if (y > cb->maxY) cb->maxY = y; 60 | 61 | blobFound = true; 62 | break; 63 | } 64 | } 65 | 66 | if (!blobFound) 67 | { 68 | blobs[blobCount].minX = x; 69 | blobs[blobCount].minY = y; 70 | blobs[blobCount].maxX = x; 71 | blobs[blobCount].maxY = y; 72 | 73 | ++blobCount; 74 | } 75 | } 76 | } 77 | } 78 | 79 | for (int i = 0; i < blobCount; ++i) 80 | { 81 | blob* b = blobs + i; 82 | 83 | b->cX = 0.0f; 84 | b->cY = 0.0f; 85 | 86 | float totalWeight = 0.0f; 87 | // Count total weight 88 | for (int y = b->minY; y < b->maxY + 1; ++y) 89 | { 90 | for (int x = b->minX; x < b->maxX + 1; ++x) 91 | { 92 | uint8_t p = _frameBuffer[y * width + x]; 93 | if (p > 60) 94 | { 95 | totalWeight += p; 96 | } 97 | } 98 | } 99 | 100 | for (int y = blobs[i].minY; y < blobs[i].maxY + 1; ++y) 101 | { 102 | for (int x = blobs[i].minX; x < blobs[i].maxX + 1; ++x) 103 | { 104 | uint8_t p = _frameBuffer[y * width + x]; 105 | if (p > 60) 106 | { 107 | float pixelV = p; 108 | 109 | b->cX += x * (pixelV / totalWeight); 110 | b->cY += y * (pixelV / totalWeight); 111 | } 112 | } 113 | } 114 | } 115 | 116 | //printf("%d %d %d %d\n", brightPixels, blobCount, (int)t1, (int)startTime); 117 | } -------------------------------------------------------------------------------- /PyUtilities/high.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------ 2 | # Marker Tracker. 3 | #------------------------------------------------------------------------------ 4 | import io 5 | from io import BytesIO 6 | import struct 7 | import time 8 | import picamera 9 | import sys 10 | import select 11 | import threading 12 | import pprint 13 | import numpy as np 14 | import os 15 | import re 16 | import subprocess 17 | import zlib 18 | 19 | import socket 20 | from socket import * 21 | 22 | runCamera = False 23 | serverSocket = None 24 | serverFile = None 25 | 26 | #------------------------------------------------------------------------------ 27 | # Camera Output. 28 | #------------------------------------------------------------------------------ 29 | class CamOutput(object): 30 | def __init__(self): 31 | self.size = 0 32 | self.lastFrameTime = 0 33 | self.adjusting = 0 34 | 35 | def write(self, s): 36 | global runCamera 37 | global serverFile 38 | global camera 39 | 40 | frameSize = len(s) 41 | self.size += frameSize 42 | frameTime = camera.frame.timestamp 43 | 44 | try: 45 | serverFile.write(s) 46 | except: 47 | print('Lost Server Connection') 48 | runCamera = False 49 | serverFile = None 50 | 51 | if frameTime != None: 52 | diff = frameTime - self.lastFrameTime 53 | print('{} {} {}'.format(camera.frame.index, diff, frameSize)) 54 | self.lastFrameTime = frameTime 55 | 56 | def flush(self): 57 | print('{} Bytes Written'.format(self.size)) 58 | 59 | #------------------------------------------------------------------------------ 60 | # Camera & Main Loop. 61 | #------------------------------------------------------------------------------ 62 | print('High FPS Capture') 63 | print('Searching For Server') 64 | 65 | broadcastSocket = socket(AF_INET, SOCK_DGRAM) 66 | broadcastSocket.bind(('', 45454)) 67 | 68 | def FindServer(): 69 | 70 | return '192.168.1.100' 71 | 72 | # NOTE: We need to drain the socket as old broadcast msgs remain. 73 | broadcastSocket.setblocking(0) 74 | 75 | while True: 76 | try: 77 | recvData = broadcastSocket.recv(1024) 78 | except: 79 | break; 80 | 81 | broadcastSocket.settimeout(1) 82 | 83 | while True: 84 | try: 85 | print('Waiting for Server IP') 86 | msg = broadcastSocket.recvfrom(1024) 87 | r = re.search('KineticSynth:(\d+.\d+.\d+.\d+)', msg[0]) 88 | 89 | if r != None: 90 | print 'Found Server IP: {} from {}'.format(r.group(1), msg[1]) 91 | return r.group(1) 92 | except: 93 | continue 94 | 95 | camera = picamera.PiCamera(sensor_mode = 6, clock_mode = 'raw') 96 | camera.sensor_mode = 6 97 | camera.rotation = 180 98 | camera.resolution = (1000, 700) 99 | camera.framerate = 100 100 | camera.awb_mode = 'off' 101 | camera.awb_gains = (1.1, 1.1) 102 | camera.exposure_mode = 'sports' 103 | camera.framerate_delta = 0 104 | camera.shutter_speed = 7000 105 | camera.iso = 800 106 | #camera.shutter_speed = 23000 107 | #camera.iso = 800 108 | 109 | while True: 110 | serverIP = FindServer() 111 | serverSocket = socket() 112 | 113 | try: 114 | serverSocket.connect((serverIP, 8000)) 115 | serverSocket.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1) 116 | except: 117 | print('Cant Connect') 118 | continue 119 | 120 | serverFile = serverSocket.makefile('wb') 121 | print('Connected to Server') 122 | 123 | runCamera = True 124 | camera.start_recording(CamOutput(), format='h264', quality=25, profile='high', level='4.2') 125 | while runCamera: 126 | time.sleep(1.0) 127 | 128 | print('Stopped Camera') 129 | camera.stop_recording() -------------------------------------------------------------------------------- /PyUtilities/rgbled.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------ 2 | # RGB LED Test App. 3 | #------------------------------------------------------------------------------ 4 | import io 5 | import time 6 | import math 7 | import RPIO.PWM as PWM 8 | 9 | print('Starting RGB LED Test') 10 | #PWM.set_loglevel(PWM.LOG_LEVEL_DEBUG) 11 | PWM.set_loglevel(PWM.LOG_LEVEL_ERRORS) 12 | 13 | PWM.setup(pulse_incr_us=10) 14 | PWM.init_channel(0, subcycle_time_us=10000) 15 | PWM.print_channel(0) 16 | 17 | BLUE_LED = 2 18 | RED_LED = 3 19 | GREEN_LED = 4 20 | 21 | PWM.add_channel_pulse(0, BLUE_LED, 0, 1000) 22 | PWM.add_channel_pulse(0, RED_LED, 0, 1000) 23 | PWM.add_channel_pulse(0, GREEN_LED, 0, 1000) 24 | 25 | startTime = time.time() 26 | 27 | speed = 4.0 28 | brightness = 100 29 | r = 0.0 30 | g = 0.0 31 | b = 0.0 32 | 33 | while True: 34 | time.sleep(1.0 / 30.0) 35 | t = math.sin(float(time.time() - startTime) * speed) * 0.5 + 0.5 36 | tg = math.sin(float(time.time() - startTime) * speed + 1) * 0.5 + 0.5 37 | tb = math.sin(float(time.time() - startTime) * speed + 2) * 0.5 + 0.5 38 | 39 | r = t * 3 - 2 40 | if r < 0: 41 | r = 0 42 | 43 | g = tg * 3 - 2 44 | if g < 0: 45 | g = 0 46 | 47 | b = tb * 3 - 2 48 | if b < 0: 49 | b = 0 50 | 51 | # Update LEDs 52 | pulse = int((1.0 - r) * brightness) + 1000 - brightness 53 | PWM.add_channel_pulse(0, RED_LED, 0, pulse) 54 | pulse = int((1.0 - g) * brightness) + 1000 - brightness 55 | PWM.add_channel_pulse(0, GREEN_LED, 0, pulse) 56 | pulse = int((1.0 - b) * brightness) + 1000 - brightness 57 | PWM.add_channel_pulse(0, BLUE_LED, 0, pulse) 58 | 59 | 60 | -------------------------------------------------------------------------------- /PyUtilities/single.py: -------------------------------------------------------------------------------- 1 | #------------------------------------------------------------------------------ 2 | # Car Sense 3 | # Version: 0.1.0 4 | #------------------------------------------------------------------------------ 5 | # Created by Robert Gowans 2017 6 | #------------------------------------------------------------------------------ 7 | 8 | import io 9 | from io import BytesIO 10 | import socket 11 | import struct 12 | import time 13 | import picamera 14 | import sys 15 | import select 16 | import threading 17 | import pprint 18 | import numpy as np 19 | import os 20 | import re 21 | 22 | print('Car Sense Single Capture') 23 | 24 | #------------------------------------------------------------------------------ 25 | # Camera & Main Loop. 26 | #------------------------------------------------------------------------------ 27 | 28 | with picamera.PiCamera(sensor_mode=5) as camera: 29 | 30 | camera.sensor_mode = 5 31 | camera.rotation = 180 32 | camera.resolution = (1640, 922) 33 | camera.framerate = 30 34 | #camera.shutter_speed = 200000 35 | # camera.meter_mode = 'backlit' 36 | 37 | imgNum = 0 38 | 39 | cameraStartTime = time.time() 40 | time.sleep(2) 41 | 42 | try: 43 | i = 0; 44 | while True: 45 | camera.capture('cap' + str(i) + '.jpg', use_video_port=True, resize=(1640, 922), quality=100) 46 | #camera.capture('/home/pi/preview.jpg', use_video_port=True, resize=(960, 540), quality=10) 47 | time.sleep(1) 48 | print('Captured') 49 | i += 1; 50 | 51 | finally: 52 | print('Car Sense Terminated') 53 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # VectorONE 2 | -------------------------------------------------------------------------------- /VectorSynth/.gitignore: -------------------------------------------------------------------------------- 1 | project/ 2 | 3 | ## Ignore Visual Studio temporary files, build results, and 4 | ## files generated by popular Visual Studio add-ons. 5 | ## 6 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 7 | 8 | # User-specific files 9 | *.suo 10 | *.user 11 | *.userosscache 12 | *.sln.docstates 13 | 14 | # User-specific files (MonoDevelop/Xamarin Studio) 15 | *.userprefs 16 | 17 | # Build results 18 | [Dd]ebug/ 19 | [Dd]ebugPublic/ 20 | [Rr]elease/ 21 | [Rr]eleases/ 22 | x64/ 23 | x86/ 24 | bld/ 25 | [Bb]in/ 26 | [Oo]bj/ 27 | [Ll]og/ 28 | 29 | # Visual Studio 2015 cache/options directory 30 | .vs/ 31 | # Uncomment if you have tasks that create the project's static files in wwwroot 32 | #wwwroot/ 33 | 34 | # MSTest test Results 35 | [Tt]est[Rr]esult*/ 36 | [Bb]uild[Ll]og.* 37 | 38 | # NUNIT 39 | *.VisualState.xml 40 | TestResult.xml 41 | 42 | # Build Results of an ATL Project 43 | [Dd]ebugPS/ 44 | [Rr]eleasePS/ 45 | dlldata.c 46 | 47 | # Benchmark Results 48 | BenchmarkDotNet.Artifacts/ 49 | 50 | # .NET Core 51 | project.lock.json 52 | project.fragment.lock.json 53 | artifacts/ 54 | **/Properties/launchSettings.json 55 | 56 | *_i.c 57 | *_p.c 58 | *_i.h 59 | *.ilk 60 | *.meta 61 | *.obj 62 | *.pch 63 | *.pdb 64 | *.pgc 65 | *.pgd 66 | *.rsp 67 | *.sbr 68 | *.tlb 69 | *.tli 70 | *.tlh 71 | *.tmp 72 | *.tmp_proj 73 | *.log 74 | *.vspscc 75 | *.vssscc 76 | .builds 77 | *.pidb 78 | *.svclog 79 | *.scc 80 | 81 | # Chutzpah Test files 82 | _Chutzpah* 83 | 84 | # Visual C++ cache files 85 | ipch/ 86 | *.aps 87 | *.ncb 88 | *.opendb 89 | *.opensdf 90 | *.sdf 91 | *.cachefile 92 | *.VC.db 93 | *.VC.VC.opendb 94 | 95 | # Visual Studio profiler 96 | *.psess 97 | *.vsp 98 | *.vspx 99 | *.sap 100 | 101 | # TFS 2012 Local Workspace 102 | $tf/ 103 | 104 | # Guidance Automation Toolkit 105 | *.gpState 106 | 107 | # ReSharper is a .NET coding add-in 108 | _ReSharper*/ 109 | *.[Rr]e[Ss]harper 110 | *.DotSettings.user 111 | 112 | # JustCode is a .NET coding add-in 113 | .JustCode 114 | 115 | # TeamCity is a build add-in 116 | _TeamCity* 117 | 118 | # DotCover is a Code Coverage Tool 119 | *.dotCover 120 | 121 | # AxoCover is a Code Coverage Tool 122 | .axoCover/* 123 | !.axoCover/settings.json 124 | 125 | # Visual Studio code coverage results 126 | *.coverage 127 | *.coveragexml 128 | 129 | # NCrunch 130 | _NCrunch_* 131 | .*crunch*.local.xml 132 | nCrunchTemp_* 133 | 134 | # MightyMoose 135 | *.mm.* 136 | AutoTest.Net/ 137 | 138 | # Web workbench (sass) 139 | .sass-cache/ 140 | 141 | # Installshield output folder 142 | [Ee]xpress/ 143 | 144 | # DocProject is a documentation generator add-in 145 | DocProject/buildhelp/ 146 | DocProject/Help/*.HxT 147 | DocProject/Help/*.HxC 148 | DocProject/Help/*.hhc 149 | DocProject/Help/*.hhk 150 | DocProject/Help/*.hhp 151 | DocProject/Help/Html2 152 | DocProject/Help/html 153 | 154 | # Click-Once directory 155 | publish/ 156 | 157 | # Publish Web Output 158 | *.[Pp]ublish.xml 159 | *.azurePubxml 160 | # Note: Comment the next line if you want to checkin your web deploy settings, 161 | # but database connection strings (with potential passwords) will be unencrypted 162 | *.pubxml 163 | *.publishproj 164 | 165 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 166 | # checkin your Azure Web App publish settings, but sensitive information contained 167 | # in these scripts will be unencrypted 168 | PublishScripts/ 169 | 170 | # NuGet Packages 171 | *.nupkg 172 | # The packages folder can be ignored because of Package Restore 173 | **/packages/* 174 | # except build/, which is used as an MSBuild target. 175 | !**/packages/build/ 176 | # Uncomment if necessary however generally it will be regenerated when needed 177 | #!**/packages/repositories.config 178 | # NuGet v3's project.json files produces more ignorable files 179 | *.nuget.props 180 | *.nuget.targets 181 | 182 | # Microsoft Azure Build Output 183 | csx/ 184 | *.build.csdef 185 | 186 | # Microsoft Azure Emulator 187 | ecf/ 188 | rcf/ 189 | 190 | # Windows Store app package directories and files 191 | AppPackages/ 192 | BundleArtifacts/ 193 | Package.StoreAssociation.xml 194 | _pkginfo.txt 195 | *.appx 196 | 197 | # Visual Studio cache files 198 | # files ending in .cache can be ignored 199 | *.[Cc]ache 200 | # but keep track of directories ending in .cache 201 | !*.[Cc]ache/ 202 | 203 | # Others 204 | ClientBin/ 205 | ~$* 206 | *~ 207 | *.dbmdl 208 | *.dbproj.schemaview 209 | *.jfm 210 | *.pfx 211 | *.publishsettings 212 | orleans.codegen.cs 213 | 214 | # Since there are multiple workflows, uncomment next line to ignore bower_components 215 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 216 | #bower_components/ 217 | 218 | # RIA/Silverlight projects 219 | Generated_Code/ 220 | 221 | # Backup & report files from converting an old project file 222 | # to a newer Visual Studio version. Backup files are not needed, 223 | # because we have git ;-) 224 | _UpgradeReport_Files/ 225 | Backup*/ 226 | UpgradeLog*.XML 227 | UpgradeLog*.htm 228 | 229 | # SQL Server files 230 | *.mdf 231 | *.ldf 232 | *.ndf 233 | 234 | # Business Intelligence projects 235 | *.rdl.data 236 | *.bim.layout 237 | *.bim_*.settings 238 | 239 | # Microsoft Fakes 240 | FakesAssemblies/ 241 | 242 | # GhostDoc plugin setting file 243 | *.GhostDoc.xml 244 | 245 | # Node.js Tools for Visual Studio 246 | .ntvs_analysis.dat 247 | node_modules/ 248 | 249 | # Typescript v1 declaration files 250 | typings/ 251 | 252 | # Visual Studio 6 build log 253 | *.plg 254 | 255 | # Visual Studio 6 workspace options file 256 | *.opt 257 | 258 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 259 | *.vbw 260 | 261 | # Visual Studio LightSwitch build output 262 | **/*.HTMLClient/GeneratedArtifacts 263 | **/*.DesktopClient/GeneratedArtifacts 264 | **/*.DesktopClient/ModelManifest.xml 265 | **/*.Server/GeneratedArtifacts 266 | **/*.Server/ModelManifest.xml 267 | _Pvt_Extensions 268 | 269 | # Paket dependency manager 270 | .paket/paket.exe 271 | paket-files/ 272 | 273 | # FAKE - F# Make 274 | .fake/ 275 | 276 | # JetBrains Rider 277 | .idea/ 278 | *.sln.iml 279 | 280 | # CodeRush 281 | .cr/ 282 | 283 | # Python Tools for Visual Studio (PTVS) 284 | __pycache__/ 285 | *.pyc 286 | 287 | # Cake - Uncomment if you are using it 288 | # tools/** 289 | # !tools/packages.config 290 | 291 | # Tabs Studio 292 | *.tss 293 | 294 | # Telerik's JustMock configuration file 295 | *.jmconfig 296 | 297 | # BizTalk build output 298 | *.btp.cs 299 | *.btm.cs 300 | *.odx.cs 301 | *.xsd.cs 302 | -------------------------------------------------------------------------------- /VectorSynth/.qmake.stash: -------------------------------------------------------------------------------- 1 | QMAKE_DEFAULT_INCDIRS = \ 2 | "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\INCLUDE" \ 3 | "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\ATLMFC\\INCLUDE" \ 4 | "C:\\Program Files (x86)\\Windows Kits\\10\\include\\10.0.10240.0\\ucrt" \ 5 | "C:\\Program Files (x86)\\Windows Kits\\NETFXSDK\\4.6.1\\include\\um" \ 6 | "C:\\Program Files (x86)\\Windows Kits\\8.1\\include\\\\shared" \ 7 | "C:\\Program Files (x86)\\Windows Kits\\8.1\\include\\\\um" \ 8 | "C:\\Program Files (x86)\\Windows Kits\\8.1\\include\\\\winrt" 9 | QMAKE_DEFAULT_LIBDIRS = \ 10 | "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\LIB" \ 11 | "C:\\Program Files (x86)\\Microsoft Visual Studio 14.0\\VC\\ATLMFC\\LIB" \ 12 | "C:\\Program Files (x86)\\Windows Kits\\10\\lib\\10.0.10240.0\\ucrt\\x86" \ 13 | "C:\\Program Files (x86)\\Windows Kits\\NETFXSDK\\4.6.1\\lib\\um\\x86" \ 14 | "C:\\Program Files (x86)\\Windows Kits\\8.1\\lib\\winv6.3\\um\\x86" 15 | -------------------------------------------------------------------------------- /VectorSynth/BundleVarying.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/BundleVarying.exe -------------------------------------------------------------------------------- /VectorSynth/VectorSynth.pro: -------------------------------------------------------------------------------- 1 | #------------------------------------------------- 2 | # 3 | # Project created by QtCreator 2017-04-27T21:39:11 4 | # 5 | #------------------------------------------------- 6 | 7 | QT += core gui \ 8 | network 9 | 10 | greaterThan(QT_MAJOR_VERSION, 4): QT += widgets 11 | 12 | TARGET = VectorSynth 13 | TEMPLATE = vcapp 14 | 15 | # The following define makes your compiler emit warnings if you use 16 | # any feature of Qt which as been marked as deprecated (the exact warnings 17 | # depend on your compiler). Please consult the documentation of the 18 | # deprecated API in order to know how to port your code away from it. 19 | DEFINES += QT_DEPRECATED_WARNINGS 20 | 21 | # You can also make your code fail to compile if you use deprecated APIs. 22 | # In order to do so, uncomment the following line. 23 | # You can also select to disable deprecated APIs only up to a certain version of Qt. 24 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0 25 | 26 | 27 | SOURCES += main.cpp \ 28 | mainwindow.cpp \ 29 | cameraView.cpp \ 30 | sceneView.cpp \ 31 | decoder.cpp \ 32 | tracker.cpp \ 33 | take.cpp \ 34 | takeTracker.cpp \ 35 | timelineWidget.cpp \ 36 | objLoader.cpp \ 37 | trackerConnection.cpp \ 38 | serverThreadWorker.cpp \ 39 | liveTracker.cpp 40 | 41 | HEADERS += mainwindow.h \ 42 | cameraView.h \ 43 | sceneView.h \ 44 | decoder.h \ 45 | tracker.h \ 46 | take.h \ 47 | takeTracker.h \ 48 | timelineWidget.h \ 49 | objLoader.h \ 50 | trackerConnection.h \ 51 | serverThreadWorker.h \ 52 | liveTracker.h 53 | 54 | FORMS += mainwindow.ui 55 | 56 | 57 | CONFIG(debug, debug|release) { 58 | LIBS += -L$(OPENCV_DIR)/x64/vc14/lib \ 59 | -LE:/ffmpeg/lib \ 60 | -lopencv_world320d \ 61 | -lavcodec \ 62 | -lavdevice \ 63 | -lavfilter \ 64 | -lavformat \ 65 | -lavutil \ 66 | -lpostproc \ 67 | -lswresample \ 68 | -lswscale \ 69 | -lws2_32 70 | } 71 | else { 72 | LIBS += -L$(OPENCV_DIR)/x64/vc14/lib \ 73 | -LE:/ffmpeg/lib \ 74 | -lopencv_world320 \ 75 | -lavcodec \ 76 | -lavdevice \ 77 | -lavfilter \ 78 | -lavformat \ 79 | -lavutil \ 80 | -lpostproc \ 81 | -lswresample \ 82 | -lswscale \ 83 | -lws2_32 84 | } 85 | 86 | INCLUDEPATH += "$(OPENCV_DIR)/include" \ 87 | "E:/ffmpeg/include" -------------------------------------------------------------------------------- /VectorSynth/VectorSynth.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 14 4 | VisualStudioVersion = 14.0.24720.0 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "VectorSynth", "VectorSynth.vcxproj", "{0D5697C3-41DC-3FF2-A8E3-6C73D908A422}" 7 | EndProject 8 | Global 9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 10 | Debug|x64 = Debug|x64 11 | Debug|x86 = Debug|x86 12 | Release|x64 = Release|x64 13 | Release|x86 = Release|x86 14 | EndGlobalSection 15 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 16 | {0D5697C3-41DC-3FF2-A8E3-6C73D908A422}.Debug|x64.ActiveCfg = Debug|x64 17 | {0D5697C3-41DC-3FF2-A8E3-6C73D908A422}.Debug|x64.Build.0 = Debug|x64 18 | {0D5697C3-41DC-3FF2-A8E3-6C73D908A422}.Debug|x86.ActiveCfg = Debug|x64 19 | {0D5697C3-41DC-3FF2-A8E3-6C73D908A422}.Release|x64.ActiveCfg = Release|x64 20 | {0D5697C3-41DC-3FF2-A8E3-6C73D908A422}.Release|x64.Build.0 = Release|x64 21 | {0D5697C3-41DC-3FF2-A8E3-6C73D908A422}.Release|x86.ActiveCfg = Release|x64 22 | EndGlobalSection 23 | GlobalSection(SolutionProperties) = preSolution 24 | HideSolutionNode = FALSE 25 | EndGlobalSection 26 | EndGlobal 27 | -------------------------------------------------------------------------------- /VectorSynth/VectorSynth.vcxproj.filters: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | {99349809-55BA-4b9d-BF79-8FDBB0286EB3} 6 | ui 7 | false 8 | 9 | 10 | {99349809-55BA-4b9d-BF79-8FDBB0286EB3} 11 | ui 12 | false 13 | 14 | 15 | {71ED8ED8-ACB9-4CE9-BBE1-E00B30144E11} 16 | cpp;c;cxx;moc;h;def;odl;idl;res; 17 | 18 | 19 | {71ED8ED8-ACB9-4CE9-BBE1-E00B30144E11} 20 | cpp;c;cxx;moc;h;def;odl;idl;res; 21 | 22 | 23 | {93995380-89BD-4b04-88EB-625FBE52EBFB} 24 | h;hpp;hxx;hm;inl;inc;xsd 25 | 26 | 27 | {93995380-89BD-4b04-88EB-625FBE52EBFB} 28 | h;hpp;hxx;hm;inl;inc;xsd 29 | 30 | 31 | {4FC737F1-C7A5-4376-A066-2A32D752A2FF} 32 | cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx 33 | 34 | 35 | {4FC737F1-C7A5-4376-A066-2A32D752A2FF} 36 | cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx 37 | 38 | 39 | 40 | 41 | Source Files 42 | 43 | 44 | Source Files 45 | 46 | 47 | Source Files 48 | 49 | 50 | Source Files 51 | 52 | 53 | Source Files 54 | 55 | 56 | Source Files 57 | 58 | 59 | Source Files 60 | 61 | 62 | Source Files 63 | 64 | 65 | Source Files 66 | 67 | 68 | Source Files 69 | 70 | 71 | Source Files 72 | 73 | 74 | Source Files 75 | 76 | 77 | Source Files 78 | 79 | 80 | 81 | 82 | Header Files 83 | 84 | 85 | Header Files 86 | 87 | 88 | Header Files 89 | 90 | 91 | Header Files 92 | 93 | 94 | Header Files 95 | 96 | 97 | Header Files 98 | 99 | 100 | Header Files 101 | 102 | 103 | Header Files 104 | 105 | 106 | Header Files 107 | 108 | 109 | Header Files 110 | 111 | 112 | Header Files 113 | 114 | 115 | Header Files 116 | 117 | 118 | 119 | 120 | Generated Files 121 | 122 | 123 | Generated Files 124 | 125 | 126 | Generated Files 127 | 128 | 129 | Generated Files 130 | 131 | 132 | Generated Files 133 | 134 | 135 | Generated Files 136 | 137 | 138 | Generated Files 139 | 140 | 141 | Generated Files 142 | 143 | 144 | Generated Files 145 | 146 | 147 | Generated Files 148 | 149 | 150 | Generated Files 151 | 152 | 153 | Generated Files 154 | 155 | 156 | Generated Files 157 | 158 | 159 | 160 | 161 | Form Files 162 | 163 | 164 | -------------------------------------------------------------------------------- /VectorSynth/assets/shaders/basic.frag: -------------------------------------------------------------------------------- 1 | precision mediump float; 2 | 3 | uniform vec4 u_color; 4 | 5 | varying vec4 v_color; 6 | 7 | void main() 8 | { 9 | gl_FragColor = v_color * u_color; 10 | //gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); 11 | } -------------------------------------------------------------------------------- /VectorSynth/assets/shaders/basic.vert: -------------------------------------------------------------------------------- 1 | uniform mat4 mvp_matrix; 2 | 3 | attribute vec3 a_position; 4 | attribute vec4 a_color; 5 | 6 | varying vec4 v_color; 7 | 8 | void main() 9 | { 10 | gl_PointSize = 3.0; 11 | gl_Position = mvp_matrix * vec4(a_position, 1); 12 | v_color = a_color; 13 | } -------------------------------------------------------------------------------- /VectorSynth/assets/shaders/font.frag: -------------------------------------------------------------------------------- 1 | precision mediump float; 2 | 3 | uniform vec4 u_color; 4 | uniform sampler2D texMain; 5 | 6 | varying vec2 v_uvs; 7 | varying vec4 v_color; 8 | 9 | void main() 10 | { 11 | vec4 diffuse = texture2D(texMain, vec2(v_uvs)); 12 | float alpha = diffuse.r * u_color.a * v_color.a; 13 | 14 | //gl_FragColor = vec4(v_color.rgb, 1); 15 | //gl_FragColor = vec4(alpha, alpha, alpha, 1); 16 | gl_FragColor = vec4(u_color.rgb * v_color.rgb, alpha); 17 | } 18 | -------------------------------------------------------------------------------- /VectorSynth/assets/shaders/font.vert: -------------------------------------------------------------------------------- 1 | attribute vec3 a_position; 2 | attribute vec2 a_uvs; 3 | attribute vec4 a_color; 4 | 5 | varying vec2 v_uvs; 6 | varying vec4 v_color; 7 | 8 | void main() 9 | { 10 | vec2 pos = a_position.xy * vec2(2, 2) - vec2(1, -1); 11 | gl_Position = vec4(pos, 0.0, 1.0); 12 | v_uvs = a_uvs; 13 | v_color = a_color; 14 | } -------------------------------------------------------------------------------- /VectorSynth/assets/shaders/minimal.frag: -------------------------------------------------------------------------------- 1 | precision mediump float; 2 | 3 | uniform vec4 u_color; 4 | 5 | void main() 6 | { 7 | gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0) * u_color; 8 | } -------------------------------------------------------------------------------- /VectorSynth/assets/shaders/minimal.vert: -------------------------------------------------------------------------------- 1 | uniform mat4 u_mvp_mat; 2 | 3 | attribute vec3 a_position; 4 | 5 | void main() 6 | { 7 | gl_Position = u_mvp_mat * vec4(a_position, 1); 8 | } -------------------------------------------------------------------------------- /VectorSynth/assets/shaders/shaded.frag: -------------------------------------------------------------------------------- 1 | precision mediump float; 2 | 3 | uniform vec4 u_color; 4 | uniform sampler2D texMain; 5 | 6 | varying vec2 v_uvs; 7 | varying vec3 v_normal; 8 | varying vec3 v_world_pos; 9 | 10 | void main() 11 | { 12 | vec4 diffuse = texture2D(texMain, vec2(v_uvs)); 13 | 14 | vec3 lightPos = vec3(-10.0, -10.0, 10.0); 15 | vec3 lightDir = normalize(lightPos - v_world_pos); 16 | float lightIntensity = max(dot(v_normal, lightDir), 0.0); 17 | //vec3 d = lightIntensity * v_color * u_color.rgb; 18 | //vec3 a = 0.3 * v_color * u_color.rgb; 19 | 20 | //vec3 surfaceColor = u_color.rgb; 21 | vec3 surfaceColor = diffuse.rgb; 22 | 23 | vec3 d = lightIntensity * surfaceColor * 0.5; 24 | vec3 a = 0.7 * surfaceColor; 25 | 26 | gl_FragColor = vec4(surfaceColor, 1.0); 27 | //gl_FragColor = vec4(min(d + a, 1.0), 1.0); 28 | 29 | //gl_FragColor = vec4(v_uvs, 0.0, 1.0); 30 | //gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); 31 | } -------------------------------------------------------------------------------- /VectorSynth/assets/shaders/shaded.vert: -------------------------------------------------------------------------------- 1 | uniform mat4 u_mvp_mat; 2 | uniform mat4 u_model_mat; 3 | 4 | attribute vec3 a_position; 5 | attribute vec2 a_uvs; 6 | attribute vec3 a_normal; 7 | 8 | varying vec2 v_uvs; 9 | varying vec3 v_normal; 10 | varying vec3 v_world_pos; 11 | 12 | void main() 13 | { 14 | gl_Position = u_mvp_mat * vec4(a_position, 1); 15 | v_uvs = a_uvs; 16 | v_uvs.y = 1.0 - v_uvs.y; 17 | v_normal = normalize((u_model_mat * vec4(a_normal, 0)).xyz); 18 | v_world_pos = (u_model_mat * vec4(a_position, 1)).xyz; 19 | } -------------------------------------------------------------------------------- /VectorSynth/assets/shaders/textured.frag: -------------------------------------------------------------------------------- 1 | precision mediump float; 2 | 3 | uniform vec4 u_color; 4 | uniform sampler2D texMain; 5 | 6 | varying vec2 v_uvs; 7 | 8 | void main() 9 | { 10 | vec4 diffuse = texture2D(texMain, vec2(v_uvs)); 11 | float alpha = diffuse.r * u_color.a; 12 | 13 | gl_FragColor = vec4(u_color.rgb * alpha, alpha); 14 | } 15 | -------------------------------------------------------------------------------- /VectorSynth/assets/shaders/textured.vert: -------------------------------------------------------------------------------- 1 | uniform mat4 u_mvp_mat; 2 | 3 | attribute vec3 a_position; 4 | attribute vec2 a_uvs; 5 | 6 | varying vec2 v_uvs; 7 | 8 | void main() 9 | { 10 | gl_Position = u_mvp_mat * vec4(a_position, 1); 11 | v_uvs = a_uvs; 12 | v_uvs.y = 1.0 - v_uvs.y; 13 | } -------------------------------------------------------------------------------- /VectorSynth/cameraView.cpp: -------------------------------------------------------------------------------- 1 | #include "cameraView.h" 2 | #include 3 | #include 4 | #include 5 | #include "mainwindow.h" 6 | 7 | QT_USE_NAMESPACE 8 | 9 | CameraView::CameraView(QWidget* Parent, MainWindow* Main) : 10 | QWidget(Parent), 11 | main(Main) 12 | { 13 | setBackgroundRole(QPalette::Base); 14 | setAutoFillBackground(true); 15 | setMouseTracking(true); 16 | 17 | setMinimumSize(200, 200); 18 | 19 | _mainFont = QFont("Arial", 8); 20 | _detailFont = QFont("Arial", 7); 21 | _largeFont = QFont("Arial", 50); 22 | _medFont = QFont("Arial", 20); 23 | mode = 0; 24 | take = 0; 25 | hoveredId = -1; 26 | 27 | setSizePolicy(QSizePolicy::Preferred, QSizePolicy::MinimumExpanding); 28 | 29 | viewZoom = 0.4f; 30 | viewTranslate = QVector2D(-VID_W / 2, -VID_H / 2); 31 | 32 | _mouseLeft = false; 33 | _mouseRight = false; 34 | _mouseDownTrackerId = 0; 35 | _editMaskMode = 0; 36 | 37 | showMask = false; 38 | markerMode = 0; 39 | showPixelGrid = false; 40 | showCamRays = false; 41 | showDistortedMarkers = false; 42 | showUndistortedMarkers = true; 43 | showReprojectedMarkers = true; 44 | alignHorizontal = false; 45 | } 46 | 47 | QPointF CameraView::_GetVPointF(float X, float Y) 48 | { 49 | QVector2D point = (_vt * QVector3D(X, Y, 1)).toVector2D(); 50 | return QPointF(point.x(), point.y()); 51 | } 52 | 53 | QPoint CameraView::_GetVPoint(float X, float Y) 54 | { 55 | QPointF point = _GetVPointF(X, Y); 56 | return QPoint(point.x(), point.y()); 57 | } 58 | 59 | void CameraView::setMask(bool Visible) 60 | { 61 | showMask = Visible; 62 | update(); 63 | } 64 | 65 | void CameraView::setPixelGrid(bool Visible) 66 | { 67 | showPixelGrid = Visible; 68 | update(); 69 | } 70 | 71 | void CameraView::setMarkerMode(int Mode) 72 | { 73 | markerMode = Mode; 74 | update(); 75 | } 76 | 77 | void CameraView::paintEvent(QPaintEvent* Event) 78 | { 79 | Q_UNUSED(Event); 80 | 81 | QPainter qp(this); 82 | qp.fillRect(0, 0, width(), height(), QColor(28, 30, 32)); 83 | 84 | //qp.setPen(QPen(QColor(28 + 8, 30 + 8, 32 + 8), 2.0f / viewZoom)); 85 | qp.setPen(QPen(QColor(100, 30 + 8, 32 + 8), 2.0f / viewZoom)); 86 | qp.setFont(_medFont); 87 | 88 | if (take->isLive) 89 | { 90 | qp.drawText(5, 30, "LIVE"); 91 | } 92 | else 93 | { 94 | qp.drawText(5, 30, "PRERECORDED"); 95 | } 96 | 97 | qp.setFont(_mainFont); 98 | 99 | qp.translate(width() / 2, height() / 2); 100 | qp.scale(viewZoom, viewZoom); 101 | qp.translate(viewTranslate.x(), viewTranslate.y()); 102 | _vt = qp.transform(); 103 | 104 | int trackerCount = 0; 105 | 106 | for (std::map::iterator it = take->liveTrackers.begin(); it != take->liveTrackers.end(); ++it) 107 | { 108 | LiveTracker* tracker = it->second; 109 | 110 | float tX = 0; 111 | float tY = 0; 112 | 113 | if (alignHorizontal) 114 | tY = trackerCount * (VID_H + 10); 115 | else 116 | tX = trackerCount * (VID_W + 10); 117 | 118 | qp.setTransform(_vt); 119 | 120 | QImage img = QImage((uchar*)tracker->frameData, VID_W, VID_H, QImage::Format::Format_RGB888); 121 | qp.drawImage(QPointF(tX + -0.5f, tY + -0.5f), img); 122 | 123 | if (showMask) 124 | { 125 | QImage maskImg = QImage((uchar*)tracker->maskVisualData, 64, 44, QImage::Format::Format_RGBA8888); 126 | qp.drawImage(QRectF(tX + -0.5f, tY + -0.5f, VID_W, VID_H), maskImg); 127 | } 128 | 129 | qp.setPen(QPen(QColor(50, 50, 50), 1.0f / viewZoom)); 130 | 131 | if (showPixelGrid) 132 | { 133 | for (int i = 1; i <= 1024; ++i) 134 | { 135 | qp.drawLine(QPointF(tX + i - 0.5f, tY - 0.5f), QPointF(tX + i - 0.5f, tY + 704 - 0.5f)); 136 | } 137 | 138 | for (int i = 1; i <= 704; ++i) 139 | { 140 | qp.drawLine(QPointF(tX - 0.5f, tY + i - 0.5f), QPointF(tX + 1024 - 0.5f, tY + i - 0.5f)); 141 | } 142 | } 143 | 144 | if (!tracker->connected && !tracker->loaded) 145 | { 146 | qp.setPen(QPen(QColor(200, 50, 50), 2.0f / viewZoom)); 147 | qp.setFont(_largeFont); 148 | 149 | qp.drawText(tX + VID_W / 2 - 200, tY + VID_H / 2, "Not Connected"); 150 | } 151 | else if (tracker->id == hoveredId) 152 | { 153 | if (tracker->selected) 154 | qp.setPen(QPen(QColor(100, 200, 255), 2.0f / viewZoom)); 155 | else 156 | qp.setPen(QPen(QColor(200, 200, 200), 2.0f / viewZoom)); 157 | } 158 | else 159 | { 160 | if (tracker->selected) 161 | qp.setPen(QPen(QColor(50, 150, 255), 2.0f / viewZoom)); 162 | else 163 | qp.setPen(QPen(QColor(100, 100, 100), 2.0f / viewZoom)); 164 | } 165 | 166 | qp.setBrush(Qt::BrushStyle::NoBrush); 167 | qp.drawRect(QRectF(tX - 0.5f, tY + -0.5f, VID_W + 1.0, VID_H + 1.0)); 168 | 169 | qp.setPen(QPen(QColor(255, 100, 255), 1.0f / viewZoom)); 170 | 171 | //_vt = qp.transform(); 172 | 173 | 174 | //qp.resetTransform(); 175 | 176 | // NOTE: Realtime blobs. 177 | if (tracker->markerDataSize > 0) 178 | { 179 | blobDataHeader* blobHeader = (blobDataHeader*)tracker->markerData; 180 | blob* blobs = (blob*)(tracker->markerData + sizeof(blobDataHeader)); 181 | region* regions = (region*)(tracker->markerData + sizeof(blobDataHeader) + (sizeof(blob) * blobHeader->blobCount)); 182 | 183 | //qDebug() << blobHeader->foundRegionCount; 184 | 185 | qp.setRenderHint(QPainter::Antialiasing, true); 186 | qp.setBrush(QBrush(QColor::fromRgb(255, 0, 255), Qt::BrushStyle::SolidPattern)); 187 | //qp.setBrush(QBrush(QColor::fromRgb(255, 255, 255), Qt::BrushStyle::SolidPattern)); 188 | qp.setPen(Qt::PenStyle::NoPen); 189 | 190 | for (int i = 0; i < blobHeader->blobCount; ++i) 191 | { 192 | //qp.drawEllipse(_GetVPointF(tX + blobs[i].cX, blobs[i].cY), 2.0f, 2.0f); 193 | qp.drawEllipse(QPointF(tX + blobs[i].cX, tY + blobs[i].cY), 2.0f, 2.0f); 194 | } 195 | 196 | qp.setRenderHint(QPainter::Antialiasing, false); 197 | 198 | qp.setBrush(Qt::NoBrush); 199 | qp.setPen(QPen(QColor(200, 200, 200), 1.0f)); 200 | 201 | for (int i = 0; i < blobHeader->foundRegionCount; ++i) 202 | { 203 | region* r = ®ions[i]; 204 | qp.drawRect(tX + r->minX, tY + r->minY, r->width, r->height); 205 | } 206 | } 207 | 208 | qp.resetTransform(); 209 | 210 | qp.setPen(QPen(QColor(200, 200, 200), 1.0f)); 211 | qp.setFont(_mainFont); 212 | 213 | char infoText[256]; 214 | sprintf(infoText, "%s (0x%X) V%d FPS: %3d Data %7.2f KB Frame: %7lld Sync: %d", tracker->name.toUtf8().data(), tracker->serial, tracker->version, (int)tracker->fps, tracker->dataRecv, tracker->latestFrameId, (int)tracker->avgMasterOffset); 215 | qp.drawText(_GetVPoint(tX, tY + -5), infoText); 216 | 217 | if (tracker->markerDataSize > 0) 218 | { 219 | blobDataHeader* blobHeader = (blobDataHeader*)tracker->markerData; 220 | qp.drawText(_GetVPoint(tX, tY + 10) + QPoint(0, 0), "Markers: " + QString::number(blobHeader->blobCount)); 221 | qp.drawText(_GetVPoint(tX, tY + 10) + QPoint(0, 15), "Regions: " + QString::number(blobHeader->regionCount)); 222 | qp.drawText(_GetVPoint(tX, tY + 10) + QPoint(0, 30), "Total time: " + QString::number(blobHeader->totalTime)); 223 | } 224 | 225 | trackerCount++; 226 | } 227 | 228 | if (take) 229 | { 230 | qp.setFont(_detailFont); 231 | qp.setRenderHint(QPainter::Antialiasing, true); 232 | 233 | for (int t = 0; t < take->trackers.count(); ++t) 234 | { 235 | TakeTracker* tracker = take->trackers[t]; 236 | 237 | if (tracker->vidFrameData.count() > 0) 238 | { 239 | float tX = 0; 240 | float tY = 0; 241 | 242 | if (alignHorizontal) 243 | tY = t * (VID_H + 10); 244 | else 245 | tX = t * (VID_W + 10); 246 | 247 | //qp.setBrush(QBrush(QColor::fromRgb(255, 128, 32), Qt::BrushStyle::SolidPattern)); 248 | //qp.setPen(Qt::PenStyle::NoPen); 249 | 250 | for (int fI = tracker->drawMarkerFrameIndex; fI <= tracker->drawMarkerFrameIndex; ++fI) 251 | { 252 | if (fI >= 0) 253 | { 254 | qp.setBrush(Qt::BrushStyle::NoBrush); 255 | 256 | // Gravity centroids. 257 | for (int i = 0; i < tracker->vidFrameData[fI].newMarkers.count(); ++i) 258 | { 259 | qp.setPen(Qt::PenStyle::NoPen); 260 | 261 | Marker2D* m = &tracker->vidFrameData[fI].newMarkers[i]; 262 | 263 | //qDebug() << tracker->drawMarkerFrameIndex; 264 | 265 | // Distorted position 266 | if (showDistortedMarkers) 267 | { 268 | qp.setBrush(QBrush(QColor::fromRgb(255, 0, 255), Qt::BrushStyle::SolidPattern)); 269 | qp.drawEllipse(_GetVPointF(tX + m->distPos.x(), tY + m->distPos.y()), 2.0f * viewZoom, 2.0f * viewZoom); 270 | } 271 | 272 | // Undistorted position 273 | if (showUndistortedMarkers) 274 | { 275 | qp.setBrush(QBrush(QColor::fromRgb(0, 255, 0), Qt::BrushStyle::SolidPattern)); 276 | qp.drawEllipse(_GetVPointF(tX + m->pos.x(), tY + m->pos.y()), 2.0f * viewZoom, 2.0f * viewZoom); 277 | } 278 | } 279 | } 280 | } 281 | } 282 | } 283 | 284 | qp.setPen(QPen(QColor::fromRgb(255, 0, 255))); 285 | qp.setBrush(Qt::BrushStyle::NoBrush); 286 | 287 | // Project 3D Markers onto camera image. 288 | if (showReprojectedMarkers) 289 | { 290 | qp.setTransform(_vt); 291 | qp.setBrush(QBrush(QColor::fromRgb(100, 100, 255), Qt::BrushStyle::SolidPattern)); 292 | qp.setPen(Qt::PenStyle::NoPen); 293 | 294 | for (int t = 0; t < take->trackers.count(); ++t) 295 | { 296 | TakeTracker* tracker = take->trackers[t]; 297 | 298 | float tX = 0; 299 | float tY = 0; 300 | 301 | if (alignHorizontal) 302 | tY = t * (VID_H + 10); 303 | else 304 | tX = t * (VID_W + 10); 305 | 306 | for (int m = 0; m < take->markers[timelineFrame].size(); ++m) 307 | { 308 | QVector3D markerPos = take->markers[timelineFrame][m].pos; 309 | 310 | cv::Mat wp(4, 1, CV_64F); 311 | wp.at(0) = markerPos.x(); 312 | wp.at(1) = markerPos.y(); 313 | wp.at(2) = markerPos.z(); 314 | wp.at(3) = 1.0; 315 | 316 | cv::Mat imgPt = tracker->projMat * wp; 317 | float x = imgPt.at(0) / imgPt.at(2); 318 | float y = imgPt.at(1) / imgPt.at(2); 319 | 320 | qp.drawEllipse(QPointF(tX + x, tY + y), 2.0f, 2.0f); 321 | } 322 | } 323 | 324 | qp.resetTransform(); 325 | } 326 | 327 | // Project Rays from camera pair onto camera image. 328 | if (showCamRays) 329 | { 330 | for (int t = 0; t < take->trackers.count(); ++t) 331 | { 332 | TakeTracker* tracker = take->trackers[t]; 333 | float tX = 0; 334 | float tY = 0; 335 | 336 | if (alignHorizontal) 337 | tY = t * (VID_H + 10); 338 | else 339 | tX = t * (VID_W + 10); 340 | 341 | // Draw rays from all other trackers 342 | for (int tP = 0; tP < take->trackers.count(); ++tP) 343 | { 344 | if (tP == t) 345 | continue; 346 | 347 | TakeTracker* otherTracker = take->trackers[tP]; 348 | 349 | QPointF oto; 350 | QPointF refOto; 351 | 352 | { 353 | QVector3D camPos = otherTracker->worldPos; 354 | 355 | cv::Mat wp(4, 1, CV_64F); 356 | wp.at(0) = camPos.x(); 357 | wp.at(1) = camPos.y(); 358 | wp.at(2) = camPos.z(); 359 | wp.at(3) = 1.0; 360 | 361 | cv::Mat imgPt = tracker->projMat * wp; 362 | float x = imgPt.at(0) / imgPt.at(2); 363 | float y = imgPt.at(1) / imgPt.at(2); 364 | 365 | oto = _GetVPointF(tX + x, tY + y); 366 | 367 | qp.setPen(QPen(QColor::fromRgb(255, 255, 0))); 368 | qp.drawEllipse(oto, 3.0f, 3.0f); 369 | } 370 | 371 | for (int iM = 0; iM < otherTracker->vidFrameData[otherTracker->drawMarkerFrameIndex].newMarkers.size(); ++iM) 372 | { 373 | Marker2D* m = &otherTracker->vidFrameData[otherTracker->drawMarkerFrameIndex].newMarkers[iM]; 374 | 375 | { 376 | QVector3D camPos = m->worldRayD * 6.0f + otherTracker->worldPos; 377 | 378 | cv::Mat wp(4, 1, CV_64F); 379 | wp.at(0) = camPos.x(); 380 | wp.at(1) = camPos.y(); 381 | wp.at(2) = camPos.z(); 382 | wp.at(3) = 1.0; 383 | 384 | cv::Mat imgPt = tracker->projMat * wp; 385 | float x = imgPt.at(0) / imgPt.at(2); 386 | float y = imgPt.at(1) / imgPt.at(2); 387 | 388 | QPointF rayE = _GetVPointF(tX + x, tY + y); 389 | 390 | qp.setPen(QPen(QColor::fromRgb(255, 255, 0))); 391 | qp.drawEllipse(rayE, 3.0f, 3.0f); 392 | qp.drawLine(oto, rayE); 393 | } 394 | } 395 | } 396 | } 397 | } 398 | 399 | qp.setRenderHint(QPainter::Antialiasing, false); 400 | } 401 | } 402 | 403 | void CameraView::mousePressEvent(QMouseEvent* Event) 404 | { 405 | if (_mouseLeft || _mouseRight) 406 | return; 407 | 408 | _mouseDownPos = Event->localPos(); 409 | _mouseMovedPos = _mouseDownPos; 410 | 411 | if (Event->button() == Qt::MouseButton::LeftButton) 412 | { 413 | _mouseLeft = true; 414 | _editMaskMode = 0; 415 | 416 | QVector2D trackerSpace; 417 | LiveTracker* tracker = _GetTracker(Event->localPos().x(), Event->localPos().y(), &trackerSpace); 418 | 419 | if (tracker) 420 | { 421 | _mouseDownTrackerId = tracker->id; 422 | int mX = trackerSpace.x() * 64; 423 | int mY = trackerSpace.y() * 44; 424 | 425 | if (mX >= 0 && mX < 64 && mY >= 0 && mY < 44 && tracker->interactMode == 1) 426 | { 427 | if (tracker->getMask(mX, mY)) 428 | { 429 | _editMaskMode = 1; 430 | _ChangeMask(tracker, mX, mY, false); 431 | } 432 | else 433 | { 434 | _editMaskMode = 2; 435 | _ChangeMask(tracker, mX, mY, true); 436 | } 437 | } 438 | } 439 | 440 | } 441 | else if (Event->button() == Qt::MouseButton::RightButton) 442 | { 443 | _mouseRight = true; 444 | } 445 | } 446 | 447 | void CameraView::mouseReleaseEvent(QMouseEvent* Event) 448 | { 449 | QPointF moveDelta = (_mouseDownPos - _mouseMovedPos); 450 | float moveDeltaLengthSq = moveDelta.x() * moveDelta.x() + moveDelta.y() * moveDelta.y(); 451 | bool moved = (moveDeltaLengthSq > 2 * 2); 452 | 453 | if (_mouseLeft && Event->button() == Qt::MouseButton::LeftButton) 454 | { 455 | _mouseLeft = false; 456 | 457 | QVector2D trackerSpace; 458 | LiveTracker* tracker = _GetTracker(Event->localPos().x(), Event->localPos().y(), &trackerSpace); 459 | 460 | if (!moved) 461 | { 462 | _DeselectTrackers(); 463 | 464 | if (tracker) 465 | { 466 | tracker->selected = true; 467 | main->selectTracker(tracker); 468 | } 469 | } 470 | } 471 | else if (_mouseRight && Event->button() == Qt::MouseButton::RightButton) 472 | { 473 | _mouseRight = false; 474 | 475 | if (!moved) 476 | { 477 | LiveTracker* tracker = _GetTracker(Event->localPos().x(), Event->localPos().y()); 478 | 479 | if (tracker) 480 | { 481 | QMenu contextMenu("Tracker View", this); 482 | 483 | QAction* active = contextMenu.addAction("Active"); 484 | active->setCheckable(true); 485 | active->setChecked(tracker->active); 486 | 487 | contextMenu.addSeparator(); 488 | QAction* video = contextMenu.addAction("Video"); 489 | video->setCheckable(true); 490 | video->setChecked(false); 491 | 492 | QAction* markers = contextMenu.addAction("Markers"); 493 | markers->setCheckable(true); 494 | markers->setChecked(false); 495 | 496 | contextMenu.addSeparator(); 497 | QAction* interactNone = contextMenu.addAction("None"); 498 | interactNone->setCheckable(true); 499 | interactNone->setChecked(tracker->interactMode == 0); 500 | 501 | QAction* interactMasks = contextMenu.addAction("Edit Mask"); 502 | interactMasks->setCheckable(true); 503 | interactMasks->setChecked(tracker->interactMode == 1); 504 | 505 | QAction* interactMarkers = contextMenu.addAction("Select Markers"); 506 | interactMarkers->setCheckable(true); 507 | interactMarkers->setChecked(tracker->interactMode == 2); 508 | 509 | QAction* result = contextMenu.exec(Event->globalPos()); 510 | 511 | if (result == video) 512 | { 513 | main->viewFeed(tracker->id, 1); 514 | } 515 | else if (result == markers) 516 | { 517 | main->viewFeed(tracker->id, 2); 518 | } 519 | else if (result == interactNone) 520 | { 521 | tracker->interactMode = 0; 522 | } 523 | else if (result == interactMasks) 524 | { 525 | tracker->interactMode = 1; 526 | } 527 | else if (result == interactMarkers) 528 | { 529 | tracker->interactMode = 2; 530 | } 531 | } 532 | } 533 | } 534 | } 535 | 536 | void CameraView::mouseMoveEvent(QMouseEvent* Event) 537 | { 538 | QPointF delta = Event->localPos() - _mouseMovedPos; 539 | _mouseMovedPos = Event->localPos(); 540 | 541 | if (_mouseLeft) 542 | { 543 | QVector2D trackerSpace; 544 | LiveTracker* tracker = _GetTracker(Event->localPos().x(), Event->localPos().y(), &trackerSpace); 545 | 546 | if (tracker) 547 | { 548 | if (tracker->id == _mouseDownTrackerId) 549 | { 550 | int mX = trackerSpace.x() * 64; 551 | int mY = trackerSpace.y() * 44; 552 | 553 | if (mX >= 0 && mX < 64 && mY >= 0 && mY < 44) 554 | { 555 | if (tracker->interactMode == 1 && _editMaskMode == 1) 556 | _ChangeMask(tracker, mX, mY, false); 557 | else if (tracker->interactMode == 1 && _editMaskMode == 2) 558 | _ChangeMask(tracker, mX, mY, true); 559 | } 560 | } 561 | } 562 | } 563 | else if (_mouseRight) 564 | { 565 | viewTranslate.setX(viewTranslate.x() + delta.x() / viewZoom); 566 | viewTranslate.setY(viewTranslate.y() + delta.y() / viewZoom); 567 | update(); 568 | } 569 | else 570 | { 571 | int newHoveredId = -1; 572 | LiveTracker* tracker = _GetTracker(_mouseMovedPos.x(), _mouseMovedPos.y()); 573 | 574 | if (tracker) 575 | { 576 | newHoveredId = tracker->id; 577 | } 578 | 579 | if (hoveredId != newHoveredId) 580 | { 581 | hoveredId = newHoveredId; 582 | update(); 583 | } 584 | } 585 | } 586 | 587 | void CameraView::_ChangeMask(LiveTracker* Tracker, int X, int Y, bool Value) 588 | { 589 | if (Tracker->getMask(X, Y) != Value) 590 | { 591 | Tracker->changeMask(X, Y, Value); 592 | main->changeMask(Tracker); 593 | update(); 594 | } 595 | } 596 | 597 | void CameraView::wheelEvent(QWheelEvent* Event) 598 | { 599 | viewZoom += Event->angleDelta().y() * 0.002f * viewZoom; 600 | 601 | if (viewZoom < 0.1) 602 | viewZoom = 0.1f; 603 | 604 | //viewTranslate.setX(viewTranslate.x() + (viewTranslate.x() - (_mouseMovedPos.x() * viewZoom))); 605 | //viewTranslate.setY(viewTranslate.y() + (viewTranslate.y() - (_mouseMovedPos.y() * viewZoom))); 606 | 607 | update(); 608 | } 609 | 610 | LiveTracker* CameraView::_GetTracker(int X, int Y, QVector2D* TrackerSpace) 611 | { 612 | int trackerCount = 0; 613 | for (std::map::iterator it = take->liveTrackers.begin(); it != take->liveTrackers.end(); ++it) 614 | { 615 | LiveTracker* tracker = it->second; 616 | float tX = trackerCount * (VID_W + 10); 617 | 618 | QPoint min = _GetVPoint(tX - 0.5f, -0.5f); 619 | QPoint max = _GetVPoint(tX - 0.5f + VID_W + 1.0f, -0.5f + VID_H + 1.0f); 620 | 621 | if (X >= min.x() && X <= max.x() && Y >= min.y() && Y <= max.y()) 622 | { 623 | if (TrackerSpace) 624 | { 625 | TrackerSpace->setX(((float)X - min.x()) / (max.x() - min.x())); 626 | TrackerSpace->setY(((float)Y - min.y()) / (max.y() - min.y())); 627 | } 628 | 629 | return tracker; 630 | } 631 | 632 | trackerCount++; 633 | } 634 | 635 | return 0; 636 | } 637 | 638 | void CameraView::_DeselectTrackers() 639 | { 640 | int trackerCount = 0; 641 | for (std::map::iterator it = take->liveTrackers.begin(); it != take->liveTrackers.end(); ++it) 642 | { 643 | LiveTracker* tracker = it->second; 644 | tracker->selected = false; 645 | 646 | trackerCount++; 647 | } 648 | } -------------------------------------------------------------------------------- /VectorSynth/cameraView.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | #include "take.h" 6 | #include "liveTracker.h" 7 | 8 | QT_USE_NAMESPACE 9 | 10 | class MainWindow; 11 | 12 | class CameraView : public QWidget 13 | { 14 | Q_OBJECT 15 | 16 | public: 17 | 18 | CameraView(QWidget* Parent, MainWindow* Main); 19 | 20 | MainWindow* main; 21 | 22 | int mode; 23 | 24 | QImage camImage; 25 | 26 | // Take view. 27 | Take* take; 28 | int timelineFrame; 29 | 30 | // Global view controls. 31 | QVector2D viewTranslate; 32 | float viewZoom; 33 | 34 | int hoveredId; 35 | 36 | bool showMask; 37 | int markerMode; 38 | bool showPixelGrid; 39 | bool showCamRays; 40 | bool showDistortedMarkers; 41 | bool showUndistortedMarkers; 42 | bool showReprojectedMarkers; 43 | bool alignHorizontal; 44 | 45 | void setMask(bool Visible); 46 | void setMarkerMode(int Mode); 47 | void setPixelGrid(bool Visible); 48 | 49 | protected: 50 | 51 | void paintEvent(QPaintEvent* Event); 52 | void mousePressEvent(QMouseEvent* Event); 53 | void mouseMoveEvent(QMouseEvent* Event); 54 | void mouseReleaseEvent(QMouseEvent* Event); 55 | void wheelEvent(QWheelEvent* Event); 56 | 57 | private: 58 | 59 | QFont _mainFont; 60 | QFont _detailFont; 61 | QFont _largeFont; 62 | QFont _medFont; 63 | 64 | bool _mouseLeft; 65 | bool _mouseRight; 66 | QPointF _mouseDownPos; 67 | QPointF _mouseMovedPos; 68 | int _mouseDownTrackerId; 69 | int _editMaskMode; 70 | 71 | QTransform _vt; 72 | 73 | QPointF _GetVPointF(float X, float Y); 74 | QPoint _GetVPoint(float X, float Y); 75 | 76 | LiveTracker* _GetTracker(int X, int Y, QVector2D* TrackerSpace = 0); 77 | void _DeselectTrackers(); 78 | 79 | void _ChangeMask(LiveTracker* Tracker, int X, int Y, bool Value); 80 | }; -------------------------------------------------------------------------------- /VectorSynth/decoder.cpp: -------------------------------------------------------------------------------- 1 | #include "decoder.h" 2 | #include 3 | #include 4 | 5 | using namespace cv; 6 | 7 | Decoder::Decoder() 8 | { 9 | frameSkip = 0; 10 | _frameLimit = 0; 11 | dataRecvBytes = 0; 12 | _calibrating = false; 13 | findCalibrationSheet = false; 14 | drawUndistorted = false; 15 | newFrames = 0; 16 | destWidth = VID_W * 1; 17 | destHeight = VID_H * 1; 18 | _parsedFrames = 0; 19 | 20 | for (int i = 0; i < sizeof(frameMaskData); ++i) 21 | frameMaskData[i] = 1; 22 | 23 | colMat = Mat(VID_H, VID_W, CV_8UC3); 24 | _initOpenCV(); 25 | 26 | avcodec_register_all(); 27 | pkt = av_packet_alloc(); 28 | 29 | memset(inbuf + INBUF_SIZE, 0, AV_INPUT_BUFFER_PADDING_SIZE); 30 | codec = avcodec_find_decoder(AV_CODEC_ID_H264); 31 | 32 | parser = av_parser_init(codec->id); 33 | if (!parser) { 34 | qDebug() << "parser not found"; 35 | exit(1); 36 | } 37 | 38 | c = avcodec_alloc_context3(codec); 39 | if (!c) { 40 | qDebug() << "Could not allocate video codec context"; 41 | exit(1); 42 | } 43 | 44 | /* For some codecs, such as msmpeg4 and mpeg4, width and height 45 | MUST be initialized there because this information is not 46 | available in the bitstream. */ 47 | /* open it */ 48 | 49 | c->width = VID_W; 50 | c->height = VID_H; 51 | c->framerate = { 80, 1 }; 52 | //c->flags |= CODEC_FLAG_LOW_DELAY; 53 | if (avcodec_open2(c, codec, NULL) < 0) { 54 | qDebug() << "Could not open codec"; 55 | exit(1); 56 | } 57 | 58 | frame = av_frame_alloc(); 59 | 60 | if (!frame) { 61 | qDebug() << "Could not allocate video frame"; 62 | exit(1); 63 | } 64 | 65 | frameRGB = av_frame_alloc(); 66 | 67 | int numBgrFrameBytes = avpicture_get_size(AV_PIX_FMT_GRAY8, destWidth, destHeight); 68 | 69 | uint8_t* bgrBuffer = (uint8_t *)av_malloc(numBgrFrameBytes); 70 | 71 | avpicture_fill((AVPicture *)frameRGB, bgrBuffer, AV_PIX_FMT_GRAY8, destWidth, destHeight); 72 | 73 | //sws = sws_getContext(VID_W, VID_H, AV_PIX_FMT_YUV420P, destWidth, destHeight, AV_PIX_FMT_RGB24, 0, 0, 0, 0); 74 | sws = sws_getContext(VID_W, VID_H, AV_PIX_FMT_YUV420P, destWidth, destHeight, AV_PIX_FMT_GRAY8, 0, 0, 0, 0); 75 | } 76 | 77 | Decoder::~Decoder() 78 | { 79 | av_parser_close(parser); 80 | avcodec_free_context(&c); 81 | av_frame_free(&frame); 82 | av_packet_free(&pkt); 83 | } 84 | 85 | void Decoder::_initOpenCV() 86 | { 87 | SimpleBlobDetector::Params blobDetectorParams; 88 | blobDetectorParams.filterByCircularity = false; 89 | blobDetectorParams.filterByConvexity = false; 90 | blobDetectorParams.filterByInertia = false; 91 | 92 | blobDetectorParams.filterByArea = false; 93 | blobDetectorParams.minArea = 600; 94 | blobDetectorParams.maxArea = 60000; 95 | 96 | blobDetectorParams.filterByColor = true; 97 | blobDetectorParams.blobColor = 255; 98 | 99 | blobDetectorParams.minThreshold = 0; 100 | blobDetectorParams.maxThreshold = 110; 101 | blobDetectorParams.thresholdStep = 100; 102 | 103 | blobDetector = cv::SimpleBlobDetector::create(blobDetectorParams); 104 | } 105 | 106 | bool Decoder::DoDecode(uint8_t* Data, int Len) 107 | { 108 | uint8_t *data; 109 | size_t data_size; 110 | int ret; 111 | 112 | dataRecvBytes += Len; 113 | 114 | data = Data; 115 | data_size = Len; 116 | 117 | bool decodedFrame = false; 118 | while (data_size > 0) 119 | { 120 | ret = av_parser_parse2(parser, c, &pkt->data, &pkt->size, data, data_size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0); 121 | 122 | if (ret < 0) 123 | { 124 | qDebug() << "Error while parsing"; 125 | exit(1); 126 | } 127 | 128 | data += ret; 129 | data_size -= ret; 130 | 131 | if (pkt->size) 132 | { 133 | if (_Decode()) 134 | { 135 | ProcessFrameLive(); 136 | decodedFrame = true; 137 | } 138 | } 139 | } 140 | 141 | return decodedFrame; 142 | } 143 | 144 | bool Decoder::DoDecodeSingleFrame(uint8_t* Data, int Len, int* Consumed) 145 | { 146 | uint8_t *data; 147 | size_t data_size; 148 | int ret; 149 | 150 | data = Data; 151 | data_size = Len; 152 | 153 | bool decodedFrame = false; 154 | while (true) 155 | { 156 | //qDebug() << "Decode" << data_size; 157 | ret = av_parser_parse2(parser, c, &pkt->data, &pkt->size, data, data_size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0); 158 | //qDebug() << "Ret" << ret << pkt->size; 159 | 160 | if (ret < 0) 161 | { 162 | qDebug() << "Error while parsing"; 163 | exit(1); 164 | } 165 | 166 | data += ret; 167 | data_size -= ret; 168 | *Consumed = *Consumed + ret; 169 | 170 | if (pkt->size) 171 | { 172 | //qDebug() << "Got Packet"; 173 | if (_Decode()) 174 | { 175 | decodedFrame = true; 176 | return true; 177 | } 178 | } 179 | else 180 | { 181 | continue; 182 | } 183 | 184 | if (data_size <= 0) 185 | break; 186 | } 187 | 188 | return false; 189 | } 190 | 191 | void Decoder::_CreateFrameGray() 192 | { 193 | QElapsedTimer t; 194 | t.start(); 195 | 196 | int lineSize = frame->linesize[0]; 197 | uint8_t* lumBuffer = frame->data[0]; 198 | 199 | #if 1 200 | for (int iY = 0; iY < VID_H; ++iY) 201 | { 202 | for (int iX = 0; iX < VID_W; ++iX) 203 | { 204 | //int dataIndex = iY * VID_W + iX; 205 | //_postFrameData[dataIndex] = lumBuffer[iY * lineSize + iX]; 206 | 207 | //uint8_t lum = frameRGB->data[0][iY * VID_W + iX]; 208 | 209 | // NOTE: H264 YUV from PI after decode seems to be in the range of 16 - 254. 210 | // NOTE: Ringing undershoots, and overshoots? 211 | uint8_t lum = lumBuffer[iY * lineSize + iX]; 212 | float l = ((float)lum - 16.0f) / (255.0f - 16.0f); 213 | 214 | //if (l < camThreshold) 215 | //l = 0.0f; 216 | 217 | //l = l * camSensitivity * 10; 218 | 219 | if (l < 0.0f) l = 0.0f; 220 | else if (l > 1.0f) l = 1.0f; 221 | 222 | int dataIndex = iY * VID_W + iX; 223 | //_postFrameData[dataIndex] = (uint8_t)(l * 255) * frameMaskData[(iY / 8) * 128 + (iX / 8)]; 224 | _postFrameData[dataIndex] = (uint8_t)(l * 255); 225 | } 226 | } 227 | #else 228 | float invByteRange = 1.0f / (255.0f - 16.0f); 229 | 230 | __m128i v16 = _mm_set1_epi8(16); 231 | 232 | qDebug() << "Aligned Data:" << ((unsigned long)(_postFrameData) & 15); 233 | 234 | // NOTE: Data must be 16byte aligned and both array dimensions must be divisible by 32. 235 | for (int iY = 0; iY < VID_H; ++iY) 236 | { 237 | for (int iX = 0; iX < VID_W; iX += 16) 238 | { 239 | __m128i lum = _mm_loadu_si128((__m128i const*)&lumBuffer[iY * lineSize + iX]); 240 | __m128i lumSub16 = _mm_subs_epu8(lum, v16); 241 | _mm_store_si128((__m128i*)(&_postFrameData[iY * VID_W + iX]), lumSub16); 242 | 243 | /* 244 | uint8_t lum0 = lumBuffer[iY * lineSize + iX + 0]; 245 | uint8_t lum1 = lumBuffer[iY * lineSize + iX + 1]; 246 | uint8_t lum2 = lumBuffer[iY * lineSize + iX + 2]; 247 | uint8_t lum3 = lumBuffer[iY * lineSize + iX + 3]; 248 | 249 | float l0 = ((float)lum0 - 16.0f) * invByteRange; 250 | float l1 = ((float)lum1 - 16.0f) * invByteRange; 251 | float l2 = ((float)lum2 - 16.0f) * invByteRange; 252 | float l3 = ((float)lum3 - 16.0f) * invByteRange; 253 | 254 | if (l0 < 0.0f) l0 = 0.0f; else if (l0 > 1.0f) l0 = 1.0f; 255 | if (l1 < 0.0f) l1 = 0.0f; else if (l1 > 1.0f) l1 = 1.0f; 256 | if (l2 < 0.0f) l2 = 0.0f; else if (l2 > 1.0f) l2 = 1.0f; 257 | if (l3 < 0.0f) l3 = 0.0f; else if (l3 > 1.0f) l3 = 1.0f; 258 | 259 | int dataIndex = iY * VID_W + iX + 0; 260 | _postFrameData[dataIndex] = (uint8_t)(l0 * 255) * _frameMaskData[dataIndex]; 261 | dataIndex = iY * VID_W + iX + 1; 262 | _postFrameData[dataIndex] = (uint8_t)(l1 * 255) * _frameMaskData[dataIndex]; 263 | dataIndex = iY * VID_W + iX + 2; 264 | _postFrameData[dataIndex] = (uint8_t)(l2 * 255) * _frameMaskData[dataIndex]; 265 | dataIndex = iY * VID_W + iX + 3; 266 | _postFrameData[dataIndex] = (uint8_t)(l3 * 255) * _frameMaskData[dataIndex]; 267 | */ 268 | } 269 | } 270 | 271 | for (int iX = 0; iX < VID_W * VID_H; iX += 16) 272 | { 273 | __m128i lum = _mm_loadu_si128((__m128i const*)&lumBuffer[iX]); 274 | __m128i lumSub16 = _mm_subs_epu8(lum, v16); 275 | _mm_store_si128((__m128i*)(&_postFrameData[iX]), lumSub16); 276 | } 277 | #endif 278 | 279 | cvFrame = Mat(VID_H, VID_W, CV_8UC1, _postFrameData); 280 | 281 | //float t1 = (t.nsecsElapsed() / 1000) / 1000.0; 282 | //qDebug() << "Gray Pass:" << t1; 283 | } 284 | 285 | void Decoder::ProcessFrameLive() 286 | { 287 | if (_frameLimit++ < frameSkip) 288 | return; 289 | 290 | _frameLimit = 0; 291 | 292 | _CreateFrameGray(); 293 | 294 | if (drawUndistorted) 295 | { 296 | //undistort(cvFrame, undistortMat, calibCameraMatrix, calibDistCoeffs, optCamMat); 297 | cv::cvtColor(undistortMat, colMat, cv::COLOR_GRAY2RGB); 298 | } 299 | else 300 | { 301 | cv::cvtColor(cvFrame, colMat, cv::COLOR_GRAY2RGB); 302 | } 303 | 304 | if (findCalibrationSheet) 305 | { 306 | if (!_calibrating) 307 | { 308 | qDebug() << "Calibrating!"; 309 | calibMutex.lock(); 310 | _calibrating = true; 311 | calibImageCount = 0; 312 | calibImagePoints.clear(); 313 | calibMutex.unlock(); 314 | } 315 | 316 | _findCalibrationSheet(); 317 | } 318 | else 319 | { 320 | if (_calibrating) 321 | { 322 | calibMutex.lock(); 323 | _calibrating = false; 324 | calibMutex.unlock(); 325 | } 326 | } 327 | } 328 | 329 | void Decoder::ProcessFrame() 330 | { 331 | _CreateFrameGray(); 332 | 333 | //undistort(cvFrame, undistortMat, calibCameraMatrix, calibDistCoeffs, optCamMat); 334 | //undistort(cvFrame, undistortMat, refK, refD, refOptK); 335 | //cv::cvtColor(undistortMat, colMat, cv::COLOR_GRAY2RGB); 336 | 337 | // No undistortion. 338 | cv::cvtColor(cvFrame, colMat, cv::COLOR_GRAY2RGB); 339 | 340 | /* 341 | std::vector worldPoints; 342 | worldPoints.push_back(Point3f(0, 0, 0)); 343 | 344 | std::vector imgPoints; 345 | 346 | cv::projectPoints(worldPoints, Mat(Point3f(0, 0, 0)), Mat(Point3f(0, 0, 0)), optCamMat, _calibDistCoeffs, imgPoints); 347 | 348 | for (int i = 0; i < imgPoints.size(); ++i) 349 | { 350 | cv::circle(colMat, imgPoints[i], 4, Scalar(255, 0, 0), -1, CV_AA); 351 | } 352 | */ 353 | 354 | //cv::circle(colMat, Point2f(500, 500), 4, Scalar(255, 0, 0), 1, 8); 355 | 356 | return; 357 | } 358 | 359 | struct blob 360 | { 361 | float minX; 362 | float minY; 363 | float maxX; 364 | float maxY; 365 | }; 366 | 367 | float distSq(float X1, float Y1, float X2, float Y2) 368 | { 369 | return (X2 - X1) * (X2 - X1) + (Y2 - Y1) * (Y2 - Y1); 370 | } 371 | 372 | QList Decoder::ProcessFrameNewMarkers() 373 | { 374 | QList markers; 375 | 376 | if (blankFrame) 377 | return markers; 378 | 379 | _CreateFrameGray(); 380 | 381 | QElapsedTimer t; 382 | t.start(); 383 | 384 | blob blobs[1024]; 385 | int blobCount = 0; 386 | 387 | const int width = VID_W; 388 | const int height = VID_H; 389 | const int pixelCount = width * height; 390 | 391 | //uint8_t buffer[pixelCount]; 392 | 393 | int brightPixels = 0; 394 | 395 | blobCount = 0; 396 | 397 | for (int y = 0; y < height; ++y) 398 | { 399 | for (int x = 0; x < width; ++x) 400 | { 401 | //if (buffer[y * width + x] > 95 && buffer[y * width + x] < 189) 402 | if (_postFrameData[y * width + x] > 20) 403 | { 404 | ++brightPixels; 405 | 406 | bool blobFound = false; 407 | 408 | // TODO: Acceleration structure for closest blob search? 409 | 410 | for (int b = 0; b < blobCount; ++b) 411 | { 412 | blob* cb = blobs + b; 413 | 414 | float cx = (cb->minX + cb->maxX) / 2; 415 | float cy = (cb->minY + cb->maxY) / 2; 416 | float d = distSq(x, y, cx, cy); 417 | 418 | if (d < 8 * 8) 419 | { 420 | if (x < cb->minX) cb->minX = x; 421 | if (y < cb->minY) cb->minY = y; 422 | if (x > cb->maxX) cb->maxX = x; 423 | if (y > cb->maxY) cb->maxY = y; 424 | 425 | blobFound = true; 426 | break; 427 | } 428 | } 429 | 430 | if (!blobFound) 431 | { 432 | blobs[blobCount].minX = x; 433 | blobs[blobCount].minY = y; 434 | blobs[blobCount].maxX = x; 435 | blobs[blobCount].maxY = y; 436 | 437 | ++blobCount; 438 | } 439 | } 440 | } 441 | } 442 | 443 | for (int i = 0; i < blobCount; ++i) 444 | { 445 | if (blobs[i].maxX - blobs[i].minX < 2 || blobs[i].maxY - blobs[i].minY < 2) 446 | continue; 447 | 448 | Marker2D m = {}; 449 | m.pos = QVector2D((blobs[i].maxX - blobs[i].minX) * 0.5f + blobs[i].minX, (blobs[i].maxY - blobs[i].minY) * 0.5f + blobs[i].minY); 450 | m.bounds = QVector4D(blobs[i].minX, blobs[i].minY, blobs[i].maxX, blobs[i].maxY); 451 | 452 | m.pos = QVector2D(0, 0); 453 | float totalWeight = 0.0f; 454 | // Count total weight 455 | for (int y = blobs[i].minY; y < blobs[i].maxY + 1; ++y) 456 | { 457 | for (int x = blobs[i].minX; x < blobs[i].maxX + 1; ++x) 458 | { 459 | //if (buffer[y * width + x] > 95 && buffer[y * width + x] < 189) 460 | if (_postFrameData[y * width + x] > 20) 461 | { 462 | totalWeight += _postFrameData[y * width + x]; 463 | } 464 | } 465 | } 466 | 467 | for (int y = blobs[i].minY; y < blobs[i].maxY + 1; ++y) 468 | { 469 | for (int x = blobs[i].minX; x < blobs[i].maxX + 1; ++x) 470 | { 471 | //if (buffer[y * width + x] > 95 && buffer[y * width + x] < 189) 472 | if (_postFrameData[y * width + x] > 20) 473 | { 474 | float pixelV = _postFrameData[y * width + x]; 475 | 476 | m.pos += QVector2D(x, y) * (pixelV / totalWeight); 477 | } 478 | } 479 | } 480 | 481 | m.distPos = m.pos; 482 | markers.push_back(m); 483 | } 484 | 485 | /* 486 | // Undistort markers with default mats. 487 | if (markers.size() > 0) 488 | { 489 | cv::Mat_ matPoint(1, markers.size()); 490 | for (int i = 0; i < markers.size(); ++i) 491 | matPoint(i) = Point2f(markers[i].distPos.x(), markers[i].distPos.y()); 492 | 493 | cv::Mat matOutPoints; 494 | //cv::undistortPoints(matPoint, matOutPoints, _calibCameraMatrix, _calibDistCoeffs, cv::noArray(), optCamMat); 495 | cv::undistortPoints(matPoint, matOutPoints, optCamMat, calibDistCoeffs, cv::noArray(), optCamMat); 496 | 497 | //markers.clear(); 498 | 499 | // Clip markers. 500 | for (int i = 0; i < matOutPoints.size().width; ++i) 501 | { 502 | Point2f p = matOutPoints.at(i); 503 | 504 | if (p.x >= 0 && p.x < VID_W && p.y >= 0 && p.y < VID_H) 505 | { 506 | markers[i].pos = QVector2D(p.x, p.y); 507 | } 508 | } 509 | } 510 | 511 | /* 512 | // Undistort markers with refined mats. 513 | if (markers.size() > 0) 514 | { 515 | cv::Mat_ matPoint(1, markers.size()); 516 | for (int i = 0; i < markers.size(); ++i) 517 | matPoint(i) = Point2f(markers[i].distPos.x(), markers[i].distPos.y()); 518 | 519 | cv::Mat matOutPoints; 520 | cv::undistortPoints(matPoint, matOutPoints, refK, refD, cv::noArray(), refOptK); 521 | 522 | //markers.clear(); 523 | 524 | // Clip markers. 525 | for (int i = 0; i < matOutPoints.size().width; ++i) 526 | { 527 | Point2f p = matOutPoints.at(i); 528 | 529 | if (p.x >= 0 && p.x < VID_W && p.y >= 0 && p.y < VID_H) 530 | { 531 | markers[i].refPos = QVector2D(p.x, p.y); 532 | } 533 | } 534 | } 535 | //*/ 536 | 537 | //qDebug() << "Custom:" << ((t.nsecsElapsed() / 1000) / 1000.0) << "ms"; 538 | 539 | return markers; 540 | } 541 | 542 | bool Decoder::_Decode() 543 | { 544 | int ret; 545 | ret = avcodec_send_packet(c, pkt); 546 | if (ret < 0) 547 | { 548 | qDebug() << "Error sending a packet for decoding"; 549 | exit(1); 550 | } 551 | 552 | bool decodedFrame = false; 553 | 554 | // TODO: Check if we really only ever process a single frame here. 555 | //while (ret >= 0) 556 | { 557 | ret = avcodec_receive_frame(c, frame); 558 | if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) 559 | { 560 | return decodedFrame; 561 | } 562 | else if (ret < 0) 563 | { 564 | qDebug() << "Error during decoding"; 565 | exit(1); 566 | } 567 | 568 | ++_parsedFrames; 569 | ++newFrames; 570 | 571 | decodedFrame = true; 572 | } 573 | 574 | return decodedFrame; 575 | } 576 | 577 | void Decoder::TransferFrameToBuffer(uint8_t* Data) 578 | { 579 | //memcpy(Data, _postFrameData, sizeof(_postFrameData)); 580 | memcpy(Data, colMat.data, VID_W * VID_H * 3); 581 | } 582 | 583 | void Decoder::_findCalibrationSheet() 584 | { 585 | SimpleBlobDetector::Params blobDetectorParams; 586 | 587 | blobDetectorParams.filterByCircularity = false; 588 | blobDetectorParams.filterByConvexity = false; 589 | blobDetectorParams.filterByInertia = false; 590 | 591 | blobDetectorParams.filterByArea = true; 592 | blobDetectorParams.minArea = 50; 593 | blobDetectorParams.maxArea = 2000; 594 | 595 | blobDetectorParams.filterByColor = true; 596 | blobDetectorParams.blobColor = 0; 597 | 598 | blobDetectorParams.minThreshold = 80; 599 | blobDetectorParams.maxThreshold = 255; 600 | 601 | Ptr blobDetector = SimpleBlobDetector::create(blobDetectorParams); 602 | 603 | std::vector pointBuf; 604 | bool found = findCirclesGrid(cvFrame, Size(4, 11), pointBuf, CALIB_CB_ASYMMETRIC_GRID, blobDetector); 605 | 606 | if (found) 607 | { 608 | calibImagePoints.push_back(pointBuf); 609 | ++calibImageCount; 610 | drawChessboardCorners(colMat, Size(4, 11), Mat(pointBuf), found); 611 | } 612 | else 613 | { 614 | std::vector keypoints; 615 | blobDetector->detect(cvFrame, keypoints); 616 | 617 | for (int k = 0; k < keypoints.size(); ++k) 618 | { 619 | circle(colMat, keypoints[k].pt, keypoints[k].size * 0.5f + 4.0f, Scalar(255, 0, 0), 1, 8); 620 | drawMarker(colMat, keypoints[k].pt, Scalar(255, 0, 255), cv::MarkerTypes::MARKER_CROSS, 40, 1); 621 | } 622 | } 623 | } 624 | 625 | void Decoder::ShowBlankFrame() 626 | { 627 | colMat = cv::Scalar(80, 0, 0); 628 | } -------------------------------------------------------------------------------- /VectorSynth/decoder.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | #include 6 | #include 7 | #include 8 | 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | 17 | extern "C" 18 | { 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | } 29 | 30 | #define VID_W 1024 31 | #define VID_H 704 32 | #define INBUF_SIZE 8192 33 | 34 | class Marker2D 35 | { 36 | public: 37 | 38 | int markerId; 39 | int trackerId; 40 | QVector2D pos; 41 | QVector2D distPos; 42 | QVector4D bounds; 43 | QVector3D worldRayD; 44 | }; 45 | 46 | class Decoder 47 | { 48 | public: 49 | Decoder(); 50 | ~Decoder(); 51 | 52 | int frameSkip; 53 | bool drawUndistorted; 54 | volatile bool findCalibrationSheet; 55 | bool blankFrame; 56 | int dataRecvBytes; 57 | int newFrames; 58 | uint8_t frameMaskData[64 * 44]; 59 | 60 | QMutex calibMutex; 61 | int calibImageCount; 62 | std::vector> calibImagePoints; 63 | 64 | bool DoDecode(uint8_t* Data, int Len); 65 | bool DoDecodeSingleFrame(uint8_t* Data, int Len, int* Consumed); 66 | bool isCalibrating() { return _calibrating; }; 67 | uint8_t* GetFrameMatData() { return colMat.data; } 68 | void TransferFrameToBuffer(uint8_t* Data); 69 | void ShowBlankFrame(); 70 | void ProcessFrameLive(); 71 | void ProcessFrame(); 72 | QList ProcessFrameNewMarkers(); 73 | 74 | private: 75 | 76 | // LibAV 77 | const AVCodec* codec; 78 | AVCodecParserContext* parser; 79 | AVCodecContext* c = NULL; 80 | AVFrame* frame; 81 | AVFrame* frameRGB; 82 | uint8_t inbuf[INBUF_SIZE + AV_INPUT_BUFFER_PADDING_SIZE]; 83 | AVPacket* pkt; 84 | SwsContext* sws; 85 | 86 | // OpenCV 87 | cv::Ptr blobDetector; 88 | cv::Mat cvFrame; 89 | cv::Mat colMat; 90 | cv::Mat undistortMat; 91 | cv::Mat maskMat; 92 | 93 | // Calibration 94 | bool _calibrating; 95 | 96 | // Frame parsing. 97 | int destWidth; 98 | int destHeight; 99 | int _parsedFrames; 100 | int _frameLimit; 101 | uint8_t _postFrameData[VID_W * VID_H]; 102 | 103 | bool _Decode(); 104 | void _initOpenCV(); 105 | void _findCalibrationSheet(); 106 | void _CreateFrameGray(); 107 | }; -------------------------------------------------------------------------------- /VectorSynth/font_sheet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/font_sheet.png -------------------------------------------------------------------------------- /VectorSynth/icons8-back.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-back.png -------------------------------------------------------------------------------- /VectorSynth/icons8-coordinate-system.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-coordinate-system.png -------------------------------------------------------------------------------- /VectorSynth/icons8-dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-dashboard.png -------------------------------------------------------------------------------- /VectorSynth/icons8-day-camera.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-day-camera.png -------------------------------------------------------------------------------- /VectorSynth/icons8-exercise.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-exercise.png -------------------------------------------------------------------------------- /VectorSynth/icons8-expand.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-expand.png -------------------------------------------------------------------------------- /VectorSynth/icons8-fast-forward.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-fast-forward.png -------------------------------------------------------------------------------- /VectorSynth/icons8-first-quarter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-first-quarter.png -------------------------------------------------------------------------------- /VectorSynth/icons8-full-moon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-full-moon.png -------------------------------------------------------------------------------- /VectorSynth/icons8-grid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-grid.png -------------------------------------------------------------------------------- /VectorSynth/icons8-height.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-height.png -------------------------------------------------------------------------------- /VectorSynth/icons8-iron-man.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-iron-man.png -------------------------------------------------------------------------------- /VectorSynth/icons8-less-than.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-less-than.png -------------------------------------------------------------------------------- /VectorSynth/icons8-more-than.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-more-than.png -------------------------------------------------------------------------------- /VectorSynth/icons8-new-moon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-new-moon.png -------------------------------------------------------------------------------- /VectorSynth/icons8-open.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-open.png -------------------------------------------------------------------------------- /VectorSynth/icons8-play.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-play.png -------------------------------------------------------------------------------- /VectorSynth/icons8-rename.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-rename.png -------------------------------------------------------------------------------- /VectorSynth/icons8-rewind.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-rewind.png -------------------------------------------------------------------------------- /VectorSynth/icons8-save.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-save.png -------------------------------------------------------------------------------- /VectorSynth/icons8-sun.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-sun.png -------------------------------------------------------------------------------- /VectorSynth/icons8-wall-mount-camera.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-wall-mount-camera.png -------------------------------------------------------------------------------- /VectorSynth/icons8-waste.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-waste.png -------------------------------------------------------------------------------- /VectorSynth/icons8-width.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Rob2048/VectorONE/5a2229cdf3a5bceff6a830617d5b518536d73958/VectorSynth/icons8-width.png -------------------------------------------------------------------------------- /VectorSynth/liveTracker.cpp: -------------------------------------------------------------------------------- 1 | #include "liveTracker.h" 2 | 3 | LiveTracker::LiveTracker() : 4 | frames(0), 5 | data(0), 6 | fps(0), 7 | dataRecv(0), 8 | selected(false), 9 | active(true), 10 | connected(false), 11 | loaded(false), 12 | interactMode(0), 13 | markerDataSize(0) 14 | { 15 | for (int i = 0; i < 64 * 44; ++i) 16 | { 17 | maskVisualData[i] = { 0, 0, 0, 0 }; 18 | maskData[i] = 1; 19 | } 20 | } 21 | 22 | LiveTracker::~LiveTracker() 23 | { 24 | } 25 | 26 | void LiveTracker::updateStats() 27 | { 28 | // NOTE: Called at 1Hz 29 | fps = frames; 30 | dataRecv = data / 1024.0f; 31 | frames = 0; 32 | data = 0; 33 | } 34 | 35 | void LiveTracker::setMask(uint8_t* Data) 36 | { 37 | memcpy(maskData, Data, sizeof(maskData)); 38 | 39 | for (int i = 0; i < 64 * 44; ++i) 40 | { 41 | if (maskData[i] == 0) 42 | maskVisualData[i] = { 255, 0, 0, 128 }; 43 | else 44 | maskVisualData[i] = { 0, 0, 0, 0 }; 45 | } 46 | } 47 | 48 | void LiveTracker::changeMask(int X, int Y, bool Value) 49 | { 50 | if (Value) 51 | { 52 | maskData[Y * 64 + X] = 0; 53 | maskVisualData[Y * 64 + X] = { 255, 0, 0, 128 }; 54 | } 55 | else 56 | { 57 | maskData[Y * 64 + X] = 1; 58 | maskVisualData[Y * 64 + X] = { 0, 0, 0, 0 }; 59 | } 60 | } 61 | 62 | bool LiveTracker::getMask(int X, int Y) 63 | { 64 | return (maskData[Y * 64 + X] == 0); 65 | } 66 | 67 | void LiveTracker::generateMask() 68 | { 69 | for (int iY = 0; iY < 44; ++iY) 70 | { 71 | for (int iX = 0; iX < 64; ++iX) 72 | { 73 | bool masked = false; 74 | 75 | int kXStart = (iX * 16) - 16; 76 | int kXEnd = (iX * 16) + 32; 77 | int kYStart = (iY * 16) - 16; 78 | int kYEnd = (iY * 16) + 32; 79 | 80 | if (kXStart < 0) kXStart = 0; 81 | if (kXEnd > VID_W) kXEnd = VID_W; 82 | if (kYStart < 0) kYStart = 0; 83 | if (kYEnd > VID_H) kYEnd = VID_H; 84 | 85 | for (int kY = kYStart; kY < kYEnd; ++kY) 86 | { 87 | for (int kX = kXStart; kX < kXEnd; ++kX) 88 | { 89 | if (frameData[(kY * VID_W + kX) * 3] > 0) 90 | { 91 | masked = true; 92 | break; 93 | } 94 | } 95 | 96 | if (masked) 97 | break; 98 | } 99 | 100 | changeMask(iX, iY, masked); 101 | } 102 | } 103 | } -------------------------------------------------------------------------------- /VectorSynth/liveTracker.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include "decoder.h" 5 | 6 | struct blob 7 | { 8 | float minX; 9 | float minY; 10 | float maxX; 11 | float maxY; 12 | float cX; 13 | float cY; 14 | }; 15 | 16 | struct region 17 | { 18 | uint8_t id; 19 | int minX; 20 | int minY; 21 | int maxX; 22 | int maxY; 23 | int width; 24 | int height; 25 | int pixelIdx; 26 | int pixelCount; 27 | uint8_t maxLum; 28 | }; 29 | 30 | struct blobDataHeader 31 | { 32 | int64_t frameId; 33 | float avgMasterOffset; 34 | int blobCount; 35 | int regionCount; 36 | int foundRegionCount; 37 | int totalTime; 38 | }; 39 | 40 | struct MaskElement 41 | { 42 | uint8_t r; 43 | uint8_t g; 44 | uint8_t b; 45 | uint8_t a; 46 | }; 47 | 48 | class LiveTracker 49 | { 50 | public: 51 | 52 | LiveTracker(); 53 | ~LiveTracker(); 54 | 55 | int id; 56 | uint32_t serial; 57 | QString name; 58 | int version; 59 | 60 | int exposure; 61 | int iso; 62 | int threshold; 63 | int sensitivity; 64 | int frameDuration; 65 | 66 | float frames; 67 | float data; 68 | float fps; 69 | float dataRecv; 70 | float avgMasterOffset; 71 | int64_t latestFrameId; 72 | 73 | bool selected; 74 | bool active; 75 | bool connected; 76 | bool loaded; 77 | int interactMode; 78 | 79 | uint8_t frameData[VID_W * VID_H * 3]; 80 | MaskElement maskVisualData[64 * 44]; 81 | uint8_t maskData[64 * 44]; 82 | uint8_t markerData[1024 * 10]; 83 | int markerDataSize; 84 | 85 | void updateStats(); 86 | 87 | void changeMask(int X, int Y, bool Value); 88 | bool getMask(int X, int Y); 89 | void generateMask(); 90 | void setMask(uint8_t* Data); 91 | 92 | private: 93 | 94 | }; -------------------------------------------------------------------------------- /VectorSynth/main.cpp: -------------------------------------------------------------------------------- 1 | #define WIN32_LEAN_AND_MEAN 2 | #include 3 | 4 | #include "mainwindow.h" 5 | #include 6 | #include 7 | #include 8 | 9 | HANDLE timeSyncThreadHandle; 10 | int64_t timeFrequency; 11 | int64_t timeCounterStart; 12 | 13 | double startTime; 14 | 15 | struct pingCamEntry 16 | { 17 | unsigned long ip; 18 | float avgTime; 19 | }; 20 | 21 | pingCamEntry pingEntries[64]; 22 | int pingEntryCount = 0; 23 | 24 | inline pingCamEntry* getPingEntry(unsigned long Ip) 25 | { 26 | for (int i = 0; i < pingEntryCount; ++i) 27 | { 28 | if (pingEntries[i].ip == Ip) 29 | return &pingEntries[i]; 30 | } 31 | 32 | pingCamEntry* result = &pingEntries[pingEntryCount++]; 33 | result->ip = Ip; 34 | result->avgTime = 0.0f; 35 | 36 | return result; 37 | } 38 | 39 | double GetTime() 40 | { 41 | LARGE_INTEGER counter; 42 | QueryPerformanceCounter(&counter); 43 | int64_t time = counter.QuadPart - timeCounterStart; 44 | double result = (double)time / ((double)timeFrequency); 45 | 46 | return result; 47 | } 48 | 49 | DWORD WINAPI timeSyncThreadProc(LPVOID Parameter) 50 | { 51 | LARGE_INTEGER freq; 52 | LARGE_INTEGER counter; 53 | QueryPerformanceFrequency(&freq); 54 | QueryPerformanceCounter(&counter); 55 | timeCounterStart = counter.QuadPart; 56 | timeFrequency = freq.QuadPart; 57 | 58 | startTime = GetTime(); 59 | 60 | OutputDebugStringA("Time sync thread started\n"); 61 | 62 | WSADATA wsa; 63 | WSAStartup(MAKEWORD(2, 2), &wsa); 64 | 65 | SOCKET s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP); 66 | 67 | if (s == INVALID_SOCKET) 68 | { 69 | OutputDebugStringA("Invalid socket\n"); 70 | } 71 | 72 | SOCKADDR_IN server; 73 | 74 | server.sin_family = AF_INET; 75 | server.sin_addr.s_addr = INADDR_ANY; 76 | server.sin_port = htons(4894); 77 | 78 | 79 | if (bind(s, (SOCKADDR*)&server, sizeof(server)) == SOCKET_ERROR) 80 | { 81 | OutputDebugStringA("Bind failed\n"); 82 | return 1; 83 | } 84 | 85 | OutputDebugStringA("Now we bound boyz\n"); 86 | 87 | char msg[1500]; 88 | 89 | while (true) 90 | { 91 | SOCKADDR_IN clientAddr; 92 | int clientLen = sizeof(clientAddr); 93 | 94 | int bytesRead = recvfrom(s, msg, sizeof(msg), 0, (SOCKADDR*)&clientAddr, &clientLen); 95 | int64_t recvTimeUs = (int64_t)((GetTime() - startTime) * 1000000.0); 96 | 97 | int64_t guessedTime = *(int64_t*)(msg + 8); 98 | int64_t diff = recvTimeUs - guessedTime; 99 | 100 | pingCamEntry* pce = getPingEntry(clientAddr.sin_addr.s_addr); 101 | 102 | if (diff < 10000) 103 | pce->avgTime = pce->avgTime * 0.8f + (float)diff * 0.2f; 104 | else 105 | pce->avgTime = 10000.0f; 106 | 107 | *(int64_t*)(msg + 8) = recvTimeUs; 108 | *(float*)(msg + 16) = pce->avgTime; 109 | sendto(s, msg, 20, 0, (SOCKADDR*)&clientAddr, clientLen); 110 | 111 | //char outMsg[256]; 112 | //sprintf(outMsg, "Got UDP packet (%u) %s:%d - %d %d %d %d %f\n", clientAddr.sin_addr, inet_ntoa(clientAddr.sin_addr), ntohs(clientAddr.sin_port), bytesRead, recvTimeUs, guessedTime, diff, pce->avgTime); 113 | //OutputDebugStringA(outMsg); 114 | } 115 | 116 | closesocket(s); 117 | WSACleanup(); 118 | 119 | return 0; 120 | } 121 | 122 | int main(int argc, char* argv[]) 123 | { 124 | timeSyncThreadHandle = CreateThread(0, 0, timeSyncThreadProc, NULL, 0, NULL); 125 | 126 | QApplication a(argc, argv); 127 | MainWindow w; 128 | w.show(); 129 | 130 | return a.exec(); 131 | } 132 | -------------------------------------------------------------------------------- /VectorSynth/mainwindow.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include "decoder.h" 6 | #include "tracker.h" 7 | #include "take.h" 8 | #include "timelineWidget.h" 9 | #include "trackerConnection.h" 10 | #include "serverThreadWorker.h" 11 | #include "liveTracker.h" 12 | 13 | class CameraView; 14 | class SceneView; 15 | 16 | namespace Ui 17 | { 18 | class MainWindow; 19 | } 20 | 21 | class MainWindow : public QMainWindow 22 | { 23 | Q_OBJECT 24 | 25 | public: 26 | 27 | explicit MainWindow(QWidget* parent = 0); 28 | ~MainWindow(); 29 | 30 | signals: 31 | 32 | void OnServerStart(); 33 | void OnSendData(int ClientId, QByteArray Data); 34 | void OnUpdateParams(unsigned int SerialId, QByteArray Props); 35 | void OnMaskChange(int ClientId, QByteArray Data); 36 | void OnStartLiveFeed(int ClientId); 37 | void OnResetFrameIds(); 38 | void OnStartRecording(QString TakeName); 39 | void OnStopRecording(); 40 | void OnStartCalibrating(int TrackerId); 41 | void OnStopCalibrating(); 42 | void OnStartViewSteam(int TrackedId, int StreamMode); 43 | 44 | public slots: 45 | 46 | void OnCalibrationStartClick(); 47 | void OnCalibrationStopClick(); 48 | 49 | void OnResetFrameIdsClick(); 50 | void OnStartRecordingClick(); 51 | 52 | void OnTimerTick(); 53 | void OnTimelineTimerTick(); 54 | void OnSceneViewTimerTick(); 55 | 56 | void OnTrackerConnected(int TrackerId); 57 | void OnTrackerDisconnected(int TrackerId); 58 | void OnTrackerFrame(int TrackerId); 59 | void OnTrackerMarkersFrame(int TrackerId); 60 | void OnTrackerInfoUpdate(int TrackerId); 61 | 62 | void OnCamSensitivityChange(int Value); 63 | 64 | void OnExposureEditingFinished(); 65 | void OnIsoEditingFinished(); 66 | void OnFpsEditingFinished(); 67 | 68 | void OnLoadTakeClick(); 69 | void OnSaveTakeClick(); 70 | void OnNextFrameClick(); 71 | void OnNextFrameJumpClick(); 72 | void OnPrevFrameClick(); 73 | void OnPrevFrameJumpClick(); 74 | void OnPlayClick(); 75 | 76 | void OnTimelineChange(int Value); 77 | 78 | void OnBuild3DMarkersClicked(); 79 | void OnAutoLabelClicked(); 80 | void OnBuildFundamentalMatClicked(); 81 | void OnBundleAdjustClicked(); 82 | void OnSelectWorldBasisClicked(); 83 | void OnAssignWorldBasisClicked(); 84 | void OnPushToLiveClicked(); 85 | 86 | // Tracker view buttons. 87 | void OnToggleUpdateClicked(); 88 | void OnToggleMaskClicked(); 89 | void OnToggleDistortedMarkersClicked(); 90 | void OnToggleUndistortedMarkersClicked(); 91 | void OnToggleReprojectedMarkersClicked(); 92 | void OnTogglePixelGridClicked(); 93 | 94 | // Scene view buttons. 95 | void OnToggleMarkerSourcesClicked(); 96 | void OnToggleRaysClicked(); 97 | void OnToggleExpandedMarkersClicked(); 98 | void OnShowLiveClicked(); 99 | 100 | void OnPlayTimerTick(); 101 | 102 | void OnBroadcastRead(); 103 | 104 | private: 105 | 106 | Ui::MainWindow* ui; 107 | TimelineWidget* _timeline; 108 | CameraView* _cameraView; 109 | QUdpSocket* _udpSocket; 110 | QUdpSocket* _recvSocket; 111 | QTimer* _timer; 112 | QTimer* _timelineTimer; 113 | QTimer* _sceneViewTimer; 114 | QHostAddress _localIp; 115 | SceneView* _glView; 116 | ServerThreadWorker* _serverWorker; 117 | QThread _serverThread; 118 | QSignalMapper* _deviceListMapper; 119 | QElapsedTimer _mainTimer; 120 | 121 | LiveTake* _liveTake; 122 | LoadedTake* _loadedTake; 123 | Take* _take; 124 | 125 | int _timelineRequestedFrame; 126 | int _timelineCurrentFrame; 127 | 128 | QTimer* _playTimer; 129 | float _playFrame; 130 | 131 | bool _recording; 132 | 133 | void _LoadTakeTracker(int Id); 134 | void _AdvanceTakeFrame(int Camera, int Frames); 135 | 136 | public: 137 | 138 | void viewFeed(int TrackedId, int StreamMode); 139 | void selectTracker(LiveTracker* Tracker); 140 | //LiveTracker* GetTracker(int TrackerId); 141 | void changeMask(LiveTracker* Tracker); 142 | void updateTakeList(); 143 | }; -------------------------------------------------------------------------------- /VectorSynth/make.bat: -------------------------------------------------------------------------------- 1 | call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86_amd64 2 | E:\Qt\5.8\msvc2015_64\bin\qmake.exe VectorSynth.pro 3 | pause -------------------------------------------------------------------------------- /VectorSynth/objLoader.cpp: -------------------------------------------------------------------------------- 1 | #include "objLoader.h" 2 | 3 | #include 4 | #include 5 | #include 6 | 7 | float _ParseInt(char* Buffer, int* Index, int Length) 8 | { 9 | int part = 0; 10 | bool neg = false; 11 | 12 | int ret; 13 | 14 | while (*Index < Length && (Buffer[*Index] > '9' || Buffer[*Index] < '0') && Buffer[*Index] != '-') 15 | (*Index)++; 16 | 17 | // sign 18 | if (Buffer[*Index] == '-') 19 | { 20 | neg = true; 21 | (*Index)++; 22 | } 23 | 24 | // integer part 25 | while (*Index < Length && !(Buffer[*Index] > '9' || Buffer[*Index] < '0')) 26 | part = part * 10 + (Buffer[(*Index)++] - '0'); 27 | 28 | ret = neg ? (part * -1) : part; 29 | return ret; 30 | } 31 | 32 | float _ParseFloat(char* Buffer, int* Index, int Length) 33 | { 34 | int part = 0; 35 | bool neg = false; 36 | 37 | float ret; 38 | 39 | // find start 40 | while (*Index < Length && (Buffer[*Index] < '0' || Buffer[*Index] > '9') && Buffer[*Index] != '-' && Buffer[*Index] != '.') 41 | (*Index)++; 42 | 43 | // sign 44 | if (Buffer[*Index] == '-') 45 | { 46 | neg = true; 47 | (*Index)++; 48 | } 49 | 50 | // integer part 51 | while (*Index < Length && !(Buffer[*Index] > '9' || Buffer[*Index] < '0')) 52 | part = part * 10 + (Buffer[(*Index)++] - '0'); 53 | 54 | ret = neg ? (float)(part * -1) : (float)part; 55 | 56 | // float part 57 | if (*Index < Length && Buffer[*Index] == '.') 58 | { 59 | (*Index)++; 60 | double mul = 1; 61 | part = 0; 62 | 63 | while (*Index < Length && !(Buffer[*Index] > '9' || Buffer[*Index] < '0')) 64 | { 65 | part = part * 10 + (Buffer[*Index] - '0'); 66 | mul *= 10; 67 | (*Index)++; 68 | } 69 | 70 | if (neg) 71 | ret -= (float)part / (float)mul; 72 | else 73 | ret += (float)part / (float)mul; 74 | 75 | } 76 | 77 | // scientific part 78 | if (*Index < Length && (Buffer[*Index] == 'e' || Buffer[*Index] == 'E')) 79 | { 80 | (*Index)++; 81 | neg = (Buffer[*Index] == '-'); *Index++; 82 | part = 0; 83 | while (*Index < Length && !(Buffer[*Index] > '9' || Buffer[*Index] < '0')) 84 | { 85 | part = part * 10 + (Buffer[(*Index)++] - '0'); 86 | } 87 | 88 | if (neg) 89 | ret /= (float)pow(10.0, (double)part); 90 | else 91 | ret *= (float)pow(10.0, (double)part); 92 | } 93 | 94 | return ret; 95 | } 96 | 97 | struct HashNode 98 | { 99 | int vertId; 100 | HashNode* next; 101 | }; 102 | 103 | HashNode** hashMap = new HashNode*[64007]; 104 | HashNode* hashNodes = new HashNode[64007]; 105 | int hashNodeCount = 0; 106 | 107 | vsVertex* verts = new vsVertex[64000]; 108 | vec3* tempVerts = new vec3[64000]; 109 | vec2* tempUVs = new vec2[64000]; 110 | vec3* tempNormals = new vec3[64000]; 111 | uint16_t* tris = new uint16_t[192000]; 112 | 113 | int tempVertCount = 0; 114 | int tempUVCount = 0; 115 | int tempNormalCount = 0; 116 | int triCount = 0; 117 | int vertCount = 0; 118 | 119 | int _GetUniqueVert(char* Buffer, int* Index, int Length) 120 | { 121 | int vertId = _ParseInt(Buffer, Index, Length); (*Index)++; 122 | int uvId = _ParseInt(Buffer, Index, Length); (*Index)++; 123 | int normalId = _ParseInt(Buffer, Index, Length); 124 | 125 | uint32_t hash = 0; 126 | hash += ((int*)(tempVerts + (vertId - 1)))[0]; 127 | hash += ((int*)(tempVerts + (vertId - 1)))[1]; 128 | hash += ((int*)(tempVerts + (vertId - 1)))[2]; 129 | 130 | hash += ((int*)(tempUVs + (uvId - 1)))[0]; 131 | hash += ((int*)(tempUVs + (uvId - 1)))[1]; 132 | 133 | hash += ((int*)(tempNormals + (normalId - 1)))[0]; 134 | hash += ((int*)(tempNormals + (normalId - 1)))[1]; 135 | hash += ((int*)(tempNormals + (normalId - 1)))[2]; 136 | 137 | hash %= 64007; 138 | 139 | // See if hash exists 140 | int vertIndex = -1; 141 | HashNode* next = hashMap[hash]; 142 | while (next != NULL) 143 | { 144 | if (verts[next->vertId].pos == tempVerts[vertId - 1] && 145 | verts[next->vertId].uv == tempUVs[uvId - 1] && 146 | verts[next->vertId].normal == tempNormals[normalId - 1]) 147 | { 148 | vertIndex = next->vertId; 149 | break; 150 | } 151 | else 152 | { 153 | next = next->next; 154 | } 155 | } 156 | 157 | if (vertIndex == -1) 158 | { 159 | verts[vertCount].pos = tempVerts[vertId - 1]; 160 | verts[vertCount].uv = tempUVs[uvId - 1]; 161 | verts[vertCount].normal = tempNormals[normalId - 1]; 162 | 163 | HashNode* hashNode = &hashNodes[hashNodeCount++]; 164 | hashNode->next = hashMap[hash]; 165 | hashNode->vertId = vertCount; 166 | hashMap[hash] = hashNode; 167 | 168 | return vertCount++; 169 | } 170 | 171 | return vertIndex; 172 | } 173 | 174 | vsOBJModel CreateOBJ(const char* FileName) 175 | { 176 | vsOBJModel result = {}; 177 | 178 | tempVertCount = 0; 179 | tempUVCount = 0; 180 | tempNormalCount = 0; 181 | triCount = 0; 182 | vertCount = 0; 183 | memset(hashMap, 0, sizeof(hashMap) * 64007); 184 | 185 | FILE* file = fopen(FileName, "rb"); 186 | fseek(file, 0, SEEK_END); 187 | int fileLen = ftell(file); 188 | fseek(file, 0, SEEK_SET); 189 | char* fileBuffer = new char[fileLen + 1]; 190 | fread(fileBuffer, 1, fileLen, file); 191 | fileBuffer[fileLen] = 0; 192 | 193 | int idx = 0; 194 | char c = fileBuffer[idx++]; 195 | 196 | while (c != 0) 197 | { 198 | if (c == 'v') 199 | { 200 | c = fileBuffer[idx++]; 201 | 202 | if (c == ' ') 203 | { 204 | vec3 attr; 205 | attr.x = _ParseFloat(fileBuffer, &idx, fileLen); idx++; 206 | attr.z = _ParseFloat(fileBuffer, &idx, fileLen); idx++; 207 | attr.y = _ParseFloat(fileBuffer, &idx, fileLen); idx++; 208 | c = fileBuffer[idx++]; 209 | 210 | if (attr.x == 0.0f) attr.x = 0.0f; 211 | if (attr.y == 0.0f) attr.y = 0.0f; 212 | if (attr.z == 0.0f) attr.z = 0.0f; 213 | //attr.x = -attr.x; 214 | 215 | tempVerts[tempVertCount++] = attr; 216 | } 217 | else if (c == 't') 218 | { 219 | vec2 attr; 220 | attr.x = _ParseFloat(fileBuffer, &idx, fileLen); idx++; 221 | attr.y = _ParseFloat(fileBuffer, &idx, fileLen); idx++; 222 | c = fileBuffer[idx++]; 223 | 224 | if (attr.x == 0.0f) attr.x = 0.0f; 225 | if (attr.y == 0.0f) attr.y = 0.0f; 226 | 227 | tempUVs[tempUVCount++] = attr; 228 | } 229 | else if (c == 'n') 230 | { 231 | vec3 attr; 232 | attr.x = _ParseFloat(fileBuffer, &idx, fileLen); idx++; 233 | attr.z = _ParseFloat(fileBuffer, &idx, fileLen); idx++; 234 | attr.y = _ParseFloat(fileBuffer, &idx, fileLen); idx++; 235 | c = fileBuffer[idx++]; 236 | 237 | if (attr.x == 0.0f) attr.x = 0.0f; 238 | if (attr.y == 0.0f) attr.y = 0.0f; 239 | if (attr.z == 0.0f) attr.z = 0.0f; 240 | 241 | tempNormals[tempNormalCount++] = attr; 242 | } 243 | } 244 | else if (c == 'f') 245 | { 246 | c = fileBuffer[idx++]; 247 | 248 | int rootVertId = _GetUniqueVert(fileBuffer, &idx, fileLen); 249 | int currVertId = _GetUniqueVert(fileBuffer, &idx, fileLen); 250 | c = fileBuffer[idx++]; 251 | 252 | while (c == ' ') 253 | { 254 | int nextVertId = currVertId; 255 | currVertId = _GetUniqueVert(fileBuffer, &idx, fileLen); 256 | tris[triCount++] = rootVertId; 257 | tris[triCount++] = currVertId; 258 | tris[triCount++] = nextVertId; 259 | c = fileBuffer[idx++]; 260 | } 261 | } 262 | else 263 | { 264 | while (c != '\n' && c != 0) 265 | c = fileBuffer[idx++]; 266 | 267 | if (c == '\n') 268 | c = fileBuffer[idx++]; 269 | } 270 | } 271 | 272 | delete[] fileBuffer; 273 | fclose(file); 274 | 275 | // TODO: The object just contains pointers to our permanent storage. 276 | result.verts = verts; 277 | result.indices = tris; 278 | result.vertCount = vertCount; 279 | result.indexCount = triCount; 280 | 281 | //std::cout << "Loaded OBJ " << FileName << " in " << (time * 1000.0) << "ms Verts: " << vertCount << " Tris: " << (triCount / 3) << "\n"; 282 | 283 | /* 284 | // Determine how good the hash table usage is. 285 | int maxCollisions = 0; 286 | int emptyBuckets = 0; 287 | 288 | int entryCount[5] = {}; 289 | 290 | for (int i = 0; i < 64007; ++i) 291 | { 292 | if (hashMap[i] == NULL) 293 | { 294 | ++emptyBuckets; 295 | entryCount[0]++; 296 | } 297 | else 298 | { 299 | int entries = 0; 300 | HashNode *next = hashMap[i]; 301 | while (next != NULL) 302 | { 303 | ++entries; 304 | next = next->next; 305 | } 306 | 307 | if (entries > maxCollisions) 308 | maxCollisions = entries; 309 | 310 | entryCount[entries]++; 311 | } 312 | } 313 | 314 | std::cout << "Hash stats - Empty Buckets: " << emptyBuckets << " Max Entires: " << maxCollisions << "\n"; 315 | 316 | for (int i = 0; i < 5; ++i) 317 | { 318 | std::cout << "Entries[" << i << "]: " << entryCount[i] << "\n"; 319 | } 320 | //*/ 321 | 322 | return result; 323 | } -------------------------------------------------------------------------------- /VectorSynth/objLoader.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | typedef uint16_t u16; 6 | 7 | struct vec2 8 | { 9 | float x; 10 | float y; 11 | 12 | bool operator==(const vec2& R) const; 13 | }; 14 | 15 | inline bool vec2::operator==(const vec2& R) const 16 | { 17 | return (x == R.x && y == R.y); 18 | } 19 | 20 | struct vec3 21 | { 22 | float x; 23 | float y; 24 | float z; 25 | 26 | bool operator==(const vec3& R) const; 27 | }; 28 | 29 | inline bool vec3::operator==(const vec3& R) const 30 | { 31 | return (x == R.x && y == R.y && z == R.z); 32 | } 33 | 34 | struct vec4 35 | { 36 | float x; 37 | float y; 38 | float z; 39 | float w; 40 | }; 41 | 42 | struct vsTexVertex 43 | { 44 | vec3 pos; 45 | vec2 uv; 46 | }; 47 | 48 | struct vsVertex 49 | { 50 | vec3 pos; 51 | vec2 uv; 52 | vec3 normal; 53 | }; 54 | 55 | struct vsOBJModel 56 | { 57 | vsVertex* verts; 58 | u16* indices; 59 | int vertCount; 60 | int indexCount; 61 | }; 62 | 63 | vsOBJModel CreateOBJ(const char* FileName); 64 | -------------------------------------------------------------------------------- /VectorSynth/sceneView.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | #include 10 | #include 11 | #include 12 | #include 13 | 14 | #include "take.h" 15 | 16 | struct VertexData 17 | { 18 | QVector3D position; 19 | QVector3D color; 20 | }; 21 | 22 | struct VertexDataShaded 23 | { 24 | QVector3D position; 25 | QVector3D color; 26 | QVector3D normal; 27 | }; 28 | 29 | class Model 30 | { 31 | public: 32 | 33 | QOpenGLBuffer vertBuffer; 34 | QOpenGLBuffer indexBuffer; 35 | int indexCount; 36 | 37 | Model(); 38 | void createFromOBJ(QString Filename); 39 | }; 40 | 41 | struct FontGlyph 42 | { 43 | QRect bounds; 44 | float advance; 45 | float uvX1; 46 | float uvY1; 47 | float uvX2; 48 | float uvY2; 49 | }; 50 | 51 | struct FontVertex 52 | { 53 | QVector3D pos; 54 | QVector2D uv; 55 | QVector4D col; 56 | }; 57 | 58 | class SceneView : public QOpenGLWidget, protected QOpenGLFunctions 59 | { 60 | public: 61 | 62 | Take* take; 63 | int timelineFrame; 64 | bool selectWorldBasis; 65 | QVector3D _lastSelectedPos[3]; 66 | bool showMarkerSources; 67 | bool showRays; 68 | bool showExpandedMarkers; 69 | 70 | SceneView(QWidget *Parent = 0); 71 | 72 | void tick(); 73 | 74 | QVector3D backColor; 75 | 76 | void gizmoClear(); 77 | void gizmoPush(VertexData Vert); 78 | 79 | protected: 80 | 81 | void initializeGL(); 82 | void resizeGL(int W, int H); 83 | void paintGL(); 84 | void mousePressEvent(QMouseEvent* Event); 85 | void mouseMoveEvent(QMouseEvent* Event); 86 | void mouseReleaseEvent(QMouseEvent* Event); 87 | void wheelEvent(QWheelEvent* Event); 88 | 89 | private: 90 | 91 | enum TextAlignment 92 | { 93 | SVTA_LEFT, 94 | SVTA_MIDDLE, 95 | SVTA_RIGHT, 96 | }; 97 | 98 | QImage* _fontSheet; 99 | FontGlyph _fontGlyphs[256]; 100 | QOpenGLTexture* _fontTexture; 101 | QOpenGLShaderProgram _fontShader; 102 | int _fontSpriteMax; 103 | int _fontSpriteCount; 104 | QOpenGLBuffer _fontVertBuf; 105 | FontVertex* _fontVerts; 106 | 107 | bool _initialized; 108 | 109 | QOpenGLShaderProgram _minimalShader; 110 | QOpenGLShaderProgram _basicShader; 111 | QOpenGLShaderProgram _shadedShader; 112 | 113 | int _gizmoBufferSize; 114 | int _gizmoIndex; 115 | QOpenGLBuffer _gizmoBuffer; 116 | VertexData *_gizmoData; 117 | 118 | /* 119 | QOpenGLBuffer _arrayBuf; 120 | QOpenGLBuffer _indexBuf; 121 | int _indexCount; 122 | */ 123 | 124 | QOpenGLBuffer _gridArrayBuffer; 125 | int _gridPrimCount; 126 | 127 | QMatrix4x4 _projMat; 128 | 129 | QElapsedTimer _time; 130 | int64_t _lastTime; 131 | 132 | bool _mouseLeft; 133 | bool _mouseRight; 134 | QPointF _mouseDownPos; 135 | QPointF _mouseMovedPos; 136 | 137 | QMatrix4x4 _camViewMat; 138 | float _camX; 139 | float _camY; 140 | float _camZoom; 141 | QVector3D _camTranslate; 142 | 143 | Model _sphereModel; 144 | 145 | QVector3D _pickPos; 146 | QVector3D _pickDir; 147 | 148 | int _selectedIdx; 149 | 150 | void _mousePick(QVector2D ScreenPos); 151 | void _drawText(int X, int Y, QString Text, QVector4D Color, float Scale, TextAlignment Alignment); 152 | 153 | bool _projectToScreen(QVector3D Pos, QVector2D& ScreenPoint); 154 | }; -------------------------------------------------------------------------------- /VectorSynth/serverThreadWorker.cpp: -------------------------------------------------------------------------------- 1 | #include "serverThreadWorker.h" 2 | 3 | void ServerThreadWorker::OnStart() 4 | { 5 | qDebug() << "Starting Server Thread Worker" << QThread::currentThreadId(); 6 | 7 | takeStartFrameId = 0; 8 | 9 | _tcpServer = new QTcpServer(this); 10 | connect(_tcpServer, &QTcpServer::newConnection, this, &ServerThreadWorker::OnTcpServerConnectionAvailable); 11 | _tcpServer->listen(QHostAddress::Any, 8000); 12 | } 13 | 14 | TrackerConnection* ServerThreadWorker::LockConnection(int TrackerId) 15 | { 16 | _connectionMutex.lock(); 17 | 18 | TrackerConnection* tracker = _GetTracker(TrackerId); 19 | 20 | if (!tracker) 21 | { 22 | _connectionMutex.unlock(); 23 | return 0; 24 | } 25 | 26 | tracker->Lock(); 27 | 28 | return tracker; 29 | } 30 | 31 | void ServerThreadWorker::UnlockConnection(TrackerConnection* Tracker) 32 | { 33 | if (!Tracker) 34 | return; 35 | 36 | Tracker->Unlock(); 37 | _connectionMutex.unlock(); 38 | } 39 | 40 | void ServerThreadWorker::OnTcpServerConnectionAvailable() 41 | { 42 | QTcpSocket* tcpSocket = _tcpServer->nextPendingConnection(); 43 | 44 | if (tcpSocket) 45 | { 46 | _connectionMutex.lock(); 47 | qDebug() << "New TCP Client" << QThread::currentThreadId(); 48 | tcpSocket->setReadBufferSize(1024 * 1024); 49 | TrackerConnection* nc = new TrackerConnection(++_nextConnectionId, tcpSocket, this); 50 | _connections[_nextConnectionId] = nc; 51 | connect(nc, &TrackerConnection::OnNewFrame, this, &ServerThreadWorker::OnNewFrame); 52 | connect(nc, &TrackerConnection::OnNewMarkersFrame, this, &ServerThreadWorker::OnNewMarkersFrame); 53 | connect(nc, &TrackerConnection::OnDisconnected, this, &ServerThreadWorker::OnDisconnected); 54 | connect(nc, &TrackerConnection::OnInfoUpdate, this, &ServerThreadWorker::OnInfoUpdate); 55 | _connectionMutex.unlock(); 56 | 57 | emit OnTrackerConnected(_nextConnectionId); 58 | } 59 | } 60 | 61 | void ServerThreadWorker::OnDisconnected(TrackerConnection* Tracker) 62 | { 63 | qDebug() << "Tracker Disconnected"; 64 | 65 | _connectionMutex.lock(); 66 | int trackerId = Tracker->id; 67 | _connections.erase(trackerId); 68 | delete Tracker; 69 | _connectionMutex.unlock(); 70 | 71 | emit OnTrackerDisconnected(trackerId); 72 | } 73 | void ServerThreadWorker::OnInfoUpdate(TrackerConnection* Tracker) 74 | { 75 | emit OnTrackerInfoUpdate(Tracker->id); 76 | } 77 | 78 | void ServerThreadWorker::OnNewFrame(TrackerConnection* Tracker) 79 | { 80 | emit OnTrackerFrame(Tracker->id); 81 | } 82 | 83 | void ServerThreadWorker::OnNewMarkersFrame(TrackerConnection* Tracker) 84 | { 85 | emit OnTrackerMarkersFrame(Tracker->id); 86 | } 87 | 88 | void ServerThreadWorker::OnMaskChange(int ClientId, QByteArray Data) 89 | { 90 | TrackerConnection* tracker = _GetTracker(ClientId); 91 | 92 | if (tracker) 93 | { 94 | memcpy(tracker->props.maskData, Data.data(), sizeof(tracker->props.maskData)); 95 | 96 | char asciiMask[64 * 44]; 97 | 98 | for (int i = 0; i < 64 * 44; ++i) 99 | { 100 | asciiMask[i] = tracker->props.maskData[i] + '0'; 101 | } 102 | 103 | //QByteArray mask((char*)tracker->maskData, sizeof(tracker->maskData)); 104 | 105 | QString cmd = QString("sm,") + QString::fromLatin1(asciiMask, sizeof(tracker->props.maskData)) + QString("\n"); 106 | QByteArray data(cmd.toLatin1()); 107 | 108 | tracker->socket->write(data); 109 | } 110 | } 111 | 112 | void ServerThreadWorker::OnSendData(int ClientId, QByteArray Data) 113 | { 114 | if (ClientId == -1) 115 | { 116 | for (std::map::iterator it = _connections.begin(); it != _connections.end(); ++it) 117 | it->second->socket->write(Data); 118 | } 119 | else 120 | { 121 | TrackerConnection* tracker = _GetTracker(ClientId); 122 | if (tracker) 123 | { 124 | tracker->socket->write(Data); 125 | } 126 | } 127 | } 128 | 129 | void ServerThreadWorker::OnUpdateTracker(uint32_t SerialId, QByteArray Props) 130 | { 131 | for (std::map::iterator it = _connections.begin(); it != _connections.end(); ++it) 132 | { 133 | TrackerConnection* t = it->second; 134 | if (t->serial == SerialId) 135 | { 136 | t->UpdateProperties(Props); 137 | } 138 | } 139 | } 140 | 141 | void ServerThreadWorker::OnViewFeed(int ClientId, int StreamMode) 142 | { 143 | //qDebug() << "View Feed" << ClientId << QThread::currentThreadId(); 144 | //_StopAllTrackerCams(); 145 | 146 | TrackerConnection* tracker = _GetTracker(ClientId); 147 | if (tracker) 148 | { 149 | if (StreamMode == 0) 150 | { 151 | tracker->socket->write("ec\n"); 152 | tracker->streamMode = 0; 153 | tracker->streaming = false; 154 | } 155 | else if (StreamMode == 1) 156 | { 157 | tracker->socket->write("cm,0\n"); 158 | tracker->socket->write("sc\n"); 159 | tracker->streamMode = 1; 160 | tracker->streaming = true; 161 | } 162 | else if (StreamMode == 2) 163 | { 164 | tracker->socket->write("cm,1\n"); 165 | tracker->socket->write("sc\n"); 166 | tracker->streamMode = 2; 167 | tracker->streaming = true; 168 | } 169 | } 170 | } 171 | 172 | void ServerThreadWorker::InternalRecordingStart() 173 | { 174 | for (std::map::iterator it = _connections.begin(); it != _connections.end(); ++it) 175 | { 176 | it->second->socket->write("sc\n"); 177 | } 178 | } 179 | 180 | void ServerThreadWorker::OnResetFrameIds() 181 | { 182 | takeStartFrameId = 0; 183 | } 184 | 185 | void ServerThreadWorker::OnStartRecording(QString TakeName) 186 | { 187 | qDebug() << "Start Recording"; 188 | 189 | takeStartFrameId = 0; 190 | 191 | for (std::map::iterator it = _connections.begin(); it != _connections.end(); ++it) 192 | { 193 | //it->second->socket->write("ec\n"); 194 | //it->second->streaming = false; 195 | //it->second->recording = false; 196 | it->second->StartRecording(TakeName); 197 | } 198 | } 199 | 200 | void ServerThreadWorker::OnStopRecording() 201 | { 202 | qDebug() << "Stop Recording"; 203 | for (std::map::iterator it = _connections.begin(); it != _connections.end(); ++it) 204 | { 205 | //it->second->socket->write("ec\n"); 206 | it->second->StopRecording(); 207 | } 208 | } 209 | 210 | void ServerThreadWorker::OnStartCalibrating(int TrackerId) 211 | { 212 | for (std::map::iterator it = _connections.begin(); it != _connections.end(); ++it) 213 | { 214 | it->second->decoder->findCalibrationSheet = false; 215 | } 216 | 217 | TrackerConnection* tracker = _GetTracker(TrackerId); 218 | qDebug() << "Start calibrating tracker " << TrackerId; 219 | if (tracker) 220 | { 221 | tracker->decoder->findCalibrationSheet = true; 222 | } 223 | } 224 | 225 | void ServerThreadWorker::OnStopCalibrating() 226 | { 227 | qDebug() << "Stop Calibrating"; 228 | for (std::map::iterator it = _connections.begin(); it != _connections.end(); ++it) 229 | { 230 | it->second->decoder->findCalibrationSheet = false; 231 | } 232 | } 233 | 234 | TrackerConnection* ServerThreadWorker::_GetTracker(int ClientId) 235 | { 236 | if (_connections.find(ClientId) == _connections.end()) 237 | { 238 | return 0; 239 | } 240 | 241 | return _connections[ClientId]; 242 | } 243 | 244 | void ServerThreadWorker::_StopAllTrackerCams() 245 | { 246 | for (std::map::iterator it = _connections.begin(); it != _connections.end(); ++it) 247 | { 248 | it->second->socket->write("ec\n"); 249 | it->second->streaming = false; 250 | it->second->recording = false; 251 | } 252 | } -------------------------------------------------------------------------------- /VectorSynth/serverThreadWorker.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include "trackerConnection.h" 6 | 7 | QT_USE_NAMESPACE 8 | 9 | class ServerThreadWorker : public QThread 10 | { 11 | Q_OBJECT 12 | 13 | signals: 14 | 15 | void OnTrackerConnected(int TrackedId); 16 | void OnTrackerDisconnected(int TrackedId); 17 | void OnTrackerFrame(int TrackedId); 18 | void OnTrackerMarkersFrame(int TrackedId); 19 | void OnTrackerInfoUpdate(int TrackerId); 20 | 21 | public slots: 22 | 23 | void OnStart(); 24 | void OnTcpServerConnectionAvailable(); 25 | void OnDisconnected(TrackerConnection* Tracker); 26 | void OnNewFrame(TrackerConnection* Tracker); 27 | void OnNewMarkersFrame(TrackerConnection* Tracker); 28 | void OnInfoUpdate(TrackerConnection* Tracker); 29 | void OnSendData(int ClientId, QByteArray Data); 30 | void OnUpdateTracker(unsigned int SerialId, QByteArray Props); 31 | void OnMaskChange(int ClientId, QByteArray Data); 32 | void OnViewFeed(int ClientId, int StreamMode); 33 | void InternalRecordingStart(); 34 | void OnResetFrameIds(); 35 | void OnStartRecording(QString TakeName); 36 | void OnStopRecording(); 37 | void OnStartCalibrating(int TrackerId); 38 | void OnStopCalibrating(); 39 | 40 | public: 41 | 42 | int64_t takeStartFrameId; 43 | 44 | TrackerConnection* LockConnection(int TrackerId); 45 | void UnlockConnection(TrackerConnection* Tracker); 46 | 47 | private: 48 | 49 | QMutex _connectionMutex; 50 | 51 | std::map _connections; 52 | 53 | //bool _recording = false; 54 | int _nextConnectionId = 0; 55 | QTcpServer* _tcpServer; 56 | 57 | void _StopAllTrackerCams(); 58 | TrackerConnection* _GetTracker(int ClientId); 59 | }; -------------------------------------------------------------------------------- /VectorSynth/take.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | #include "decoder.h" 7 | #include "liveTracker.h" 8 | #include "takeTracker.h" 9 | 10 | class MarkerCalib 11 | { 12 | public: 13 | 14 | QVector3D pos; 15 | int frame; 16 | }; 17 | 18 | class Marker3D 19 | { 20 | public: 21 | 22 | enum Type 23 | { 24 | T_UNLABLED, 25 | T_LABLED, 26 | T_GHOST, 27 | }; 28 | 29 | QVector3D pos; 30 | QList sources; 31 | int id; 32 | QVector3D velocity; 33 | QVector3D bVelocity; 34 | Type type; 35 | }; 36 | 37 | class MarkerGroup 38 | { 39 | public: 40 | 41 | QVector3D pos; 42 | QVector3D avgPos; 43 | int count; 44 | QList sources; 45 | }; 46 | 47 | class MarkerLabel 48 | { 49 | public: 50 | 51 | QString name; 52 | }; 53 | 54 | class Take 55 | { 56 | public: 57 | 58 | QString name; 59 | 60 | QList trackers; 61 | std::map liveTrackers; 62 | int selectedTracker; 63 | 64 | int timeEnd; 65 | int timeFrames; 66 | int frameDuration; 67 | bool isLive; 68 | bool isRecording; 69 | 70 | std::vector> markers; 71 | std::map labels; 72 | std::vector refMarkers; 73 | 74 | Take(); 75 | ~Take(); 76 | 77 | void Destroy(); 78 | void LoadTake(QString Name); 79 | void Save(); 80 | 81 | void SetFrame(int TimelineFrame); 82 | void Build2DMarkers(int StartFrame, int EndFrame); 83 | void Build3DMarkers(int StartFrame, int EndFrame); 84 | void BuildLabels(int StartFrame, int EndFrame); 85 | void BuildExtrinsics(int StartFrame, int EndFrame); 86 | void BundleAdjust(int StartFrame, int EndFrame); 87 | void SaveSSBAFile(); 88 | 89 | protected: 90 | 91 | void _AdjustRuntime(); 92 | void _ClosestPointsLines(QVector3D P1, QVector3D D1, QVector3D P2, QVector3D D2, QVector3D* C1, QVector3D* C2); 93 | void _BuildPose(int StartFrame, int EndFrame, TakeTracker* Root, TakeTracker* Tracker, std::vector& Markers, cv::Mat& Pose); 94 | }; 95 | 96 | class LiveTake : public Take 97 | { 98 | public: 99 | 100 | LiveTake(); 101 | }; 102 | 103 | class LoadedTake : public Take 104 | { 105 | public: 106 | 107 | LoadedTake(); 108 | }; -------------------------------------------------------------------------------- /VectorSynth/takeTracker.cpp: -------------------------------------------------------------------------------- 1 | #include "takeTracker.h" 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include "sceneView.h" 9 | 10 | QMatrix4x4 TakeTracker::WorldFromPose(cv::Mat Pose) 11 | { 12 | QMatrix4x4 result; 13 | 14 | cv::Mat camR = cv::Mat(3, 3, CV_64F); 15 | cv::Mat camT = cv::Mat(3, 1, CV_64F); 16 | 17 | for (int iY = 0; iY < 3; ++iY) 18 | { 19 | for (int iX = 0; iX < 3; ++iX) 20 | { 21 | camR.at(iX, iY) = Pose.at(iX, iY); 22 | } 23 | } 24 | 25 | for (int iX = 0; iX < 3; ++iX) 26 | { 27 | camT.at(iX, 0) = Pose.at(iX, 3); 28 | } 29 | 30 | cv::Mat trueT = -(camR).t() * camT; 31 | 32 | result(0, 0) = camR.at(0, 0); 33 | result(1, 0) = camR.at(0, 1); 34 | result(2, 0) = camR.at(0, 2); 35 | result(3, 0) = 0; 36 | 37 | result(0, 1) = camR.at(1, 0); 38 | result(1, 1) = camR.at(1, 1); 39 | result(2, 1) = camR.at(1, 2); 40 | result(3, 1) = 0; 41 | 42 | result(0, 2) = camR.at(2, 0); 43 | result(1, 2) = camR.at(2, 1); 44 | result(2, 2) = camR.at(2, 2); 45 | result(3, 2) = 0; 46 | 47 | result(0, 3) = trueT.at(0); 48 | result(1, 3) = trueT.at(1); 49 | result(2, 3) = trueT.at(2); 50 | result(3, 3) = 1; 51 | 52 | qDebug() << "OrigT:" << Pose.at(0, 3) << Pose.at(1, 3) << Pose.at(2, 3); 53 | qDebug() << "TrueT:" << trueT.at(0) << trueT.at(1) << trueT.at(2); 54 | 55 | trueT = -(camR) * trueT; 56 | 57 | qDebug() << "RestT:" << trueT.at(0) << trueT.at(1) << trueT.at(2); 58 | 59 | 60 | return result; 61 | } 62 | 63 | cv::Mat TakeTracker::PoseFromWorld(QMatrix4x4 World) 64 | { 65 | cv::Mat pose = cv::Mat::eye(3, 4, CV_64F); 66 | pose.at(0, 0) = World(0, 0); 67 | pose.at(0, 1) = World(1, 0); 68 | pose.at(0, 2) = World(2, 0); 69 | //pose.at(0, 3) = 0; 70 | pose.at(1, 0) = World(0, 1); 71 | pose.at(1, 1) = World(1, 1); 72 | pose.at(1, 2) = World(2, 1); 73 | //pose.at(1, 3) = 0; 74 | pose.at(2, 0) = World(0, 2); 75 | pose.at(2, 1) = World(1, 2); 76 | pose.at(2, 2) = World(2, 2); 77 | //pose.at(2, 3) = 0; 78 | 79 | cv::Mat trueT = cv::Mat(3, 1, CV_64F); 80 | trueT.at(0) = World(0, 3); 81 | trueT.at(1) = World(1, 3); 82 | trueT.at(2) = World(2, 3); 83 | 84 | cv::Mat camR = cv::Mat(3, 3, CV_64F); 85 | for (int iY = 0; iY < 3; ++iY) 86 | { 87 | for (int iX = 0; iX < 3; ++iX) 88 | { 89 | camR.at(iX, iY) = pose.at(iX, iY); 90 | } 91 | } 92 | 93 | cv::Mat camT = -(camR) * trueT; 94 | 95 | pose.at(0, 3) = camT.at(0); 96 | pose.at(1, 3) = camT.at(1); 97 | pose.at(2, 3) = camT.at(2); 98 | //pose.at(3, 3) = 1; 99 | 100 | return pose; 101 | } 102 | 103 | TakeTracker* TakeTracker::Create(int Id, QString TakeName, uint32_t Serial, QString FilePath, LiveTracker* LiveTracker) 104 | { 105 | QFile file(FilePath); 106 | 107 | qDebug() << "Tracker: Load" << Id << TakeName << Serial << FilePath; 108 | 109 | if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) 110 | { 111 | qDebug() << "Tracker: Load file failed"; 112 | return 0; 113 | } 114 | 115 | QByteArray fileData = file.readAll(); 116 | file.close(); 117 | QJsonObject trackerObj = QJsonDocument::fromJson(fileData).object(); 118 | 119 | TakeTracker* tracker = new TakeTracker(); 120 | tracker->id = Id; 121 | tracker->takeName = TakeName; 122 | tracker->serial = Serial; 123 | tracker->liveTracker = LiveTracker; 124 | tracker->name = trackerObj["name"].toString(); 125 | tracker->exposure = trackerObj["exposure"].toInt(); 126 | tracker->iso = trackerObj["iso"].toInt(); 127 | tracker->frameCount = 0; 128 | tracker->vidPlaybackFrame = 0; 129 | tracker->mode = 0; 130 | 131 | LiveTracker->loaded = true; 132 | LiveTracker->id = tracker->id; 133 | LiveTracker->name = tracker->name; 134 | LiveTracker->serial = tracker->serial; 135 | 136 | tracker->decoder = new Decoder(); 137 | 138 | QJsonArray jsonIntrinMat = trackerObj["intrinsic"].toObject()["camera"].toArray(); 139 | QJsonArray jsonIntrinDist = trackerObj["intrinsic"].toObject()["distortion"].toArray(); 140 | QJsonArray jsonExtrinProj = trackerObj["extrinsic"].toObject()["pose"].toArray(); 141 | 142 | // Default cam calibration. 143 | /* 144 | calibDistCoeffs.at(0) = -0.332945; 145 | calibDistCoeffs.at(1) = 0.12465; 146 | calibDistCoeffs.at(2) = 0.0020142; 147 | calibDistCoeffs.at(3) = 0.000755178; 148 | calibDistCoeffs.at(4) = -0.029228; 149 | 150 | calibCameraMatrix.at(0, 0) = 740; 151 | calibCameraMatrix.at(1, 0) = 0; 152 | calibCameraMatrix.at(2, 0) = 0; 153 | calibCameraMatrix.at(0, 1) = 0; 154 | calibCameraMatrix.at(1, 1) = 740; 155 | calibCameraMatrix.at(2, 1) = 0; 156 | calibCameraMatrix.at(0, 2) = 512; 157 | calibCameraMatrix.at(1, 2) = 352; 158 | calibCameraMatrix.at(2, 2) = 1; 159 | */ 160 | 161 | tracker->distCoefs = cv::Mat::zeros(5, 1, CV_64F); 162 | tracker->distCoefs.at(0) = jsonIntrinDist[0].toDouble(); 163 | tracker->distCoefs.at(1) = jsonIntrinDist[1].toDouble(); 164 | tracker->distCoefs.at(2) = jsonIntrinDist[2].toDouble(); 165 | tracker->distCoefs.at(3) = jsonIntrinDist[3].toDouble(); 166 | tracker->distCoefs.at(4) = jsonIntrinDist[4].toDouble(); 167 | 168 | tracker->camMat = cv::Mat::eye(3, 3, CV_64F); 169 | tracker->camMat.at(0, 0) = jsonIntrinMat[0].toDouble(); 170 | tracker->camMat.at(1, 0) = jsonIntrinMat[1].toDouble(); 171 | tracker->camMat.at(2, 0) = jsonIntrinMat[2].toDouble(); 172 | tracker->camMat.at(0, 1) = jsonIntrinMat[3].toDouble(); 173 | tracker->camMat.at(1, 1) = jsonIntrinMat[4].toDouble(); 174 | tracker->camMat.at(2, 1) = jsonIntrinMat[5].toDouble(); 175 | tracker->camMat.at(0, 2) = jsonIntrinMat[6].toDouble(); 176 | tracker->camMat.at(1, 2) = jsonIntrinMat[7].toDouble(); 177 | tracker->camMat.at(2, 2) = jsonIntrinMat[8].toDouble(); 178 | 179 | cv::Mat pose = cv::Mat::eye(3, 4, CV_64F); 180 | for (int iY = 0; iY < 4; ++iY) 181 | { 182 | for (int iX = 0; iX < 3; ++iX) 183 | { 184 | pose.at(iX, iY) = jsonExtrinProj[iY * 3 + iX].toDouble(); 185 | } 186 | } 187 | 188 | QString maskStr = trackerObj["mask"].toString(); 189 | for (int i = 0; i < maskStr.size(); ++i) 190 | { 191 | tracker->mask[i] = maskStr[i].toLatin1() - 48; 192 | } 193 | LiveTracker->setMask(tracker->mask); 194 | memcpy(tracker->decoder->frameMaskData, tracker->mask, sizeof(mask)); 195 | 196 | //optCamMat = getOptimalNewCameraMatrix(calibCameraMatrix, calibDistCoeffs, Size(VID_W, VID_H), 0.0, Size(VID_W, VID_H), NULL, false); 197 | tracker->camMatOpt = cv::getOptimalNewCameraMatrix(tracker->camMat, tracker->distCoefs, cv::Size(VID_W, VID_H), 0.0, cv::Size(VID_W, VID_H), NULL, true); 198 | tracker->SetPose(pose); 199 | 200 | QString infoFilePath = "project/" + TakeName + "/" + QString::number(Serial) + ".trakvid"; 201 | FILE* vidFile = fopen(infoFilePath.toLatin1(), "rb"); 202 | 203 | if (vidFile) 204 | { 205 | tracker->mode = 1; 206 | 207 | fseek(vidFile, 0, SEEK_END); 208 | int dataSize = ftell(vidFile); 209 | fseek(vidFile, 0, SEEK_SET); 210 | tracker->takeClipData = new uint8_t[dataSize]; 211 | fread(tracker->takeClipData, dataSize, 1, vidFile); 212 | fclose(vidFile); 213 | 214 | int wp = 0; 215 | int rp = 0; 216 | bool prevFrameWasData = false; 217 | int prevWritePtr = 0; 218 | int prevSize = 0; 219 | 220 | tracker->vidFrameData.clear(); 221 | 222 | while (rp < dataSize) 223 | { 224 | if (rp >= dataSize - 20) 225 | break; 226 | 227 | uint8_t* md = tracker->takeClipData + rp; 228 | int size = *(int*)&md[0]; 229 | int type = *(int*)&md[4]; 230 | float avgMasterOffset = *(float*)&md[8]; 231 | int64_t frameId = *(int64_t*)&md[12]; 232 | rp += 20; 233 | 234 | if (rp + size >= dataSize) 235 | break; 236 | 237 | memcpy(tracker->takeClipData + wp, tracker->takeClipData + rp, size); 238 | rp += size; 239 | 240 | if (type == 2) 241 | { 242 | // Skip data frame, but remember wp for next frame start. 243 | prevFrameWasData = true; 244 | prevWritePtr = wp; 245 | prevSize = size; 246 | } 247 | else 248 | { 249 | while (tracker->frameCount < frameId) 250 | { 251 | // Dummy Frame. 252 | VidFrameData vfdDummy = {}; 253 | vfdDummy.type = 3; 254 | vfdDummy.index = tracker->frameCount++; 255 | vfdDummy.size = 0; 256 | vfdDummy.bufferPosition = 0; 257 | tracker->vidFrameData.push_back(vfdDummy); 258 | } 259 | 260 | VidFrameData vfd = {}; 261 | vfd.type = type; 262 | vfd.index = tracker->frameCount++; 263 | vfd.size = size; 264 | vfd.bufferPosition = wp; 265 | 266 | if (prevFrameWasData) 267 | { 268 | prevFrameWasData = false; 269 | vfd.bufferPosition = prevWritePtr; 270 | vfd.size += prevSize; 271 | } 272 | 273 | tracker->vidFrameData.push_back(vfd); 274 | } 275 | 276 | wp += size; 277 | } 278 | } 279 | 280 | infoFilePath = "project/" + TakeName + "/" + QString::number(Serial) + ".trakblobs"; 281 | FILE* blobFile = fopen(infoFilePath.toLatin1(), "rb"); 282 | 283 | if (blobFile) 284 | { 285 | tracker->mode = 2; 286 | 287 | while (!feof(blobFile)) 288 | { 289 | int frameSize; 290 | fread(&frameSize, sizeof(frameSize), 1, blobFile); 291 | 292 | blobDataHeader header; 293 | fread(&header, sizeof(header), 1, blobFile); 294 | 295 | while (tracker->frameCount < header.frameId) 296 | { 297 | // Dummy Frame. 298 | VidFrameData vfdDummy = {}; 299 | vfdDummy.type = 3; 300 | vfdDummy.index = tracker->frameCount++; 301 | vfdDummy.size = 0; 302 | vfdDummy.bufferPosition = 0; 303 | tracker->vidFrameData.push_back(vfdDummy); 304 | } 305 | 306 | VidFrameData vfd = {}; 307 | vfd.type = 0; 308 | vfd.index = tracker->frameCount++; 309 | vfd.size = 0; 310 | vfd.bufferPosition = 0; 311 | 312 | for (int i = 0; i < header.blobCount; ++i) 313 | { 314 | blob b; 315 | fread(&b, sizeof(b), 1, blobFile); 316 | 317 | Marker2D marker = {}; 318 | marker.pos = QVector2D(b.cX, b.cY); 319 | marker.bounds = QVector4D(b.minX, b.minY, b.maxX, b.maxY); 320 | marker.distPos = QVector2D(b.cX, b.cY); 321 | marker.trackerId = Id; 322 | marker.markerId = vfd.newMarkers.size(); 323 | vfd.newMarkers.push_back(marker); 324 | } 325 | 326 | tracker->vidFrameData.push_back(vfd); 327 | 328 | for (int i = 0; i < header.foundRegionCount; ++i) 329 | { 330 | region r; 331 | fread(&r, sizeof(r), 1, blobFile); 332 | } 333 | 334 | //qDebug() << "TRAKBLOBS" << header.frameId << header.blobCount << header.foundRegionCount; 335 | } 336 | 337 | fclose(blobFile); 338 | } 339 | else 340 | { 341 | qDebug() << "No blobs file"; 342 | } 343 | 344 | qDebug() << "Tracker: Loaded" << Serial; 345 | 346 | //* 347 | for (int i = 0; i < tracker->vidFrameData.count(); ++i) 348 | { 349 | VidFrameData* vfdp = &tracker->vidFrameData[i]; 350 | qDebug() << "Post - Index:" << i << "Frame:" << vfdp->index << "Type:" << vfdp->type; 351 | } 352 | //*/ 353 | 354 | return tracker; 355 | } 356 | 357 | TakeTracker::TakeTracker() 358 | { 359 | drawMarkerFrameIndex = 0; 360 | takeClipData = 0; 361 | } 362 | 363 | TakeTracker::~TakeTracker() 364 | { 365 | } 366 | 367 | void TakeTracker::SetCamDist(cv::Mat Cam, cv::Mat Dist) 368 | { 369 | camMat = Cam.clone(); 370 | distCoefs = Dist.clone(); 371 | 372 | camMatOpt = cv::getOptimalNewCameraMatrix(camMat, distCoefs, cv::Size(VID_W, VID_H), 0.0, cv::Size(VID_W, VID_H), NULL, true); 373 | 374 | std::stringstream s; 375 | s << camMat << "\n\n" << camMatOpt; 376 | qDebug() << "New cam/dist:" << s.str().c_str(); 377 | 378 | projMat = camMatOpt * rtMat; 379 | } 380 | 381 | void TakeTracker::SetPose(cv::Mat Pose) 382 | { 383 | rtMat = Pose.clone(); 384 | worldMat = WorldFromPose(rtMat); 385 | 386 | QVector4D hwPos = worldMat * QVector4D(0, 0, 0, 1); 387 | worldPos.setX(hwPos.x() / hwPos.w()); 388 | worldPos.setY(hwPos.y() / hwPos.w()); 389 | worldPos.setZ(hwPos.z() / hwPos.w()); 390 | 391 | projMat = camMatOpt * rtMat; 392 | 393 | // Rebuild rays for 2D markers. 394 | } 395 | 396 | void TakeTracker::DecodeFrame(int FrameIndex, int KeyFrameIndex) 397 | { 398 | int frameCount = FrameIndex - KeyFrameIndex + 1; 399 | int procFrame = KeyFrameIndex; 400 | 401 | QElapsedTimer pt; 402 | pt.start(); 403 | 404 | while (frameCount-- > 0) 405 | { 406 | if (procFrame < 0 || procFrame >= vidFrameData.count()) 407 | { 408 | qDebug() << "Out of Range Local Frame Decode"; 409 | return; 410 | } 411 | 412 | VidFrameData* vfd = &vidFrameData[procFrame++]; 413 | 414 | if (vfd->type == 3) 415 | { 416 | decoder->ShowBlankFrame(); 417 | decoder->blankFrame = true; 418 | continue; 419 | } 420 | 421 | int consumed = 0; 422 | bool frame = decoder->DoDecodeSingleFrame(takeClipData + vfd->bufferPosition, vfd->size, &consumed); 423 | decoder->blankFrame = false; 424 | 425 | if (!frame) 426 | qDebug() << "Failed to Decode Frame"; 427 | } 428 | 429 | _currentDecodeFrameIndex = procFrame; 430 | int t = pt.elapsed(); 431 | 432 | //qDebug() << "Perf" << t; 433 | } 434 | 435 | void TakeTracker::AdvanceFrame(int FrameCount) 436 | { 437 | DecodeFrame(_currentDecodeFrameIndex + FrameCount - 1, _currentDecodeFrameIndex); 438 | } 439 | 440 | /* 441 | void TakeTracker::AdvanceFrame(int FrameCount) 442 | { 443 | int frameCount = FrameCount; 444 | int procFrame = currentFrameIndex; 445 | 446 | QElapsedTimer pt; 447 | pt.start(); 448 | 449 | while (frameCount > 0) 450 | { 451 | int consumed = 0; 452 | 453 | VidFrameData* vfd = &vidFrameData[procFrame++]; 454 | bool frame = decoder->DoDecodeSingleFrame(takeClipData + vfd->bufferPosition, vfd->size, &consumed); 455 | 456 | if (procFrame >= vidFrameData.count()) 457 | return; 458 | 459 | if (frame) 460 | qDebug() << "SHOULD NEVER BE TRUE"; 461 | 462 | if (!frame && vfd->type != 2) 463 | { 464 | VidFrameData* vfdn = &vidFrameData[procFrame]; 465 | frame = decoder->DoDecodeSingleFrame(takeClipData + vfdn->bufferPosition, vfdn->size, &consumed); 466 | frameCount--; 467 | } 468 | 469 | //qDebug() << "Decode Frame" << frame << consumed; 470 | } 471 | 472 | currentFrameIndex = procFrame; 473 | int t = pt.elapsed(); 474 | 475 | //qDebug() << "Perf" << t; 476 | } 477 | */ 478 | 479 | void TakeTracker::Save() 480 | { 481 | qDebug() << "Tracker: Save" << id << takeName << name; 482 | 483 | QByteArray jsonBytes = GetProps().GetJSON(true); 484 | QFile file("project/" + takeName + "/" + QString::number(serial) + ".tracker"); 485 | 486 | if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) 487 | { 488 | qDebug() << "Tracker: Save file failed"; 489 | return; 490 | } 491 | 492 | file.write(jsonBytes); 493 | file.close(); 494 | } 495 | 496 | void TakeTracker::Build2DMarkers(int StartFrame, int EndFrame) 497 | { 498 | qDebug() << "Build 2D Markers" << name; 499 | 500 | int localStartFrame = StartFrame; 501 | int localEndFrame = EndFrame; 502 | 503 | if (localStartFrame < 0) 504 | localStartFrame = 0; 505 | else if (localStartFrame >= vidFrameData.count()) 506 | localStartFrame = vidFrameData.count(); 507 | 508 | if (localEndFrame < 0) 509 | localEndFrame = 0; 510 | else if (localEndFrame >= vidFrameData.count()) 511 | localEndFrame = vidFrameData.count(); 512 | 513 | // Find actual start frame. 514 | for (int i = localStartFrame; i <= localEndFrame; ++i) 515 | { 516 | if (vidFrameData[i].type != 3) 517 | break; 518 | else 519 | localStartFrame++; 520 | } 521 | 522 | // Find keyframe for start frame. 523 | int keyFrameIndex = localStartFrame; 524 | while (keyFrameIndex >= 0) 525 | { 526 | if (vidFrameData[keyFrameIndex].type == 1) 527 | { 528 | break; 529 | } 530 | --keyFrameIndex; 531 | } 532 | 533 | if (keyFrameIndex == -1) 534 | { 535 | qDebug() << "Can't find frames to process"; 536 | return; 537 | } 538 | 539 | DecodeFrame(localStartFrame, keyFrameIndex); 540 | 541 | for (int i = localStartFrame + 1; i <= localEndFrame; ++i) 542 | { 543 | QElapsedTimer tmr; 544 | tmr.start(); 545 | AdvanceFrame(1); 546 | qint64 t1 = tmr.nsecsElapsed(); 547 | vidFrameData[i].newMarkers = decoder->ProcessFrameNewMarkers(); 548 | qint64 t2 = tmr.nsecsElapsed(); 549 | 550 | float tTotal = (t2 / 1000) / 1000.0; 551 | float tDecode = (t1 / 1000) / 1000.0; 552 | float tCentroids = ((t2 - t1) / 1000) / 1000.0; 553 | 554 | qDebug() << i << "Markers:" << vidFrameData[i].newMarkers.count() << "Time:" << tTotal << tDecode << tCentroids; 555 | } 556 | 557 | UndistortMarkers(StartFrame, EndFrame); 558 | } 559 | 560 | void TakeTracker::UndistortMarkers(int StartFrame, int EndFrame) 561 | { 562 | int localStartFrame = StartFrame; 563 | int localEndFrame = EndFrame; 564 | 565 | if (localStartFrame < 0) 566 | localStartFrame = 0; 567 | else if (localStartFrame >= vidFrameData.count()) 568 | localStartFrame = vidFrameData.count(); 569 | 570 | if (localEndFrame < 0) 571 | localEndFrame = 0; 572 | else if (localEndFrame >= vidFrameData.count()) 573 | localEndFrame = vidFrameData.count(); 574 | 575 | for (int i = localStartFrame; i <= localEndFrame; ++i) 576 | { 577 | // Undistort 578 | if (vidFrameData[i].newMarkers.size() > 0) 579 | { 580 | cv::Mat_ matPoint(1, vidFrameData[i].newMarkers.size()); 581 | for (int j = 0; j < vidFrameData[i].newMarkers.size(); ++j) 582 | matPoint(j) = cv::Point2f(vidFrameData[i].newMarkers[j].distPos.x(), vidFrameData[i].newMarkers[j].distPos.y()); 583 | 584 | cv::Mat matOutPoints; 585 | // NOTE: Just use the opt calib matrix. 586 | cv::undistortPoints(matPoint, matOutPoints, camMat, distCoefs, cv::noArray(), camMatOpt); 587 | //cv::undistortPoints(matPoint, matOutPoints, camMatOpt, decoder->_calibDistCoeffs, cv::noArray(), camMatOpt); 588 | 589 | // TODO: Clip markers. 590 | for (int j = 0; j < matOutPoints.size().width; ++j) 591 | { 592 | cv::Point2f p = matOutPoints.at(j); 593 | 594 | // TODO: Check for bounds clipping, need to remove marker. 595 | //if (p.x >= 0 && p.x < VID_W && p.y >= 0 && p.y < VID_H) 596 | { 597 | vidFrameData[i].newMarkers[j].pos = QVector2D(p.x, p.y); 598 | } 599 | } 600 | } 601 | } 602 | } 603 | 604 | void TakeTracker::BuildRays(int StartFrame, int EndFrame) 605 | { 606 | UndistortMarkers(StartFrame, EndFrame); 607 | 608 | int localStartFrame = StartFrame; 609 | int localEndFrame = EndFrame; 610 | 611 | if (localStartFrame < 0) 612 | localStartFrame = 0; 613 | else if (localStartFrame >= vidFrameData.count()) 614 | localStartFrame = vidFrameData.count(); 615 | 616 | if (localEndFrame < 0) 617 | localEndFrame = 0; 618 | else if (localEndFrame >= vidFrameData.count()) 619 | localEndFrame = vidFrameData.count(); 620 | 621 | std::vector points; 622 | std::vector elines; 623 | 624 | cv::Matx33d m33((double*)camMatOpt.ptr()); 625 | cv::Matx33d m33Inv = m33.inv(); 626 | 627 | for (int i = localStartFrame; i <= localEndFrame; ++i) 628 | { 629 | // Project rays. 630 | for (int j = 0; j < vidFrameData[i].newMarkers.count(); ++j) 631 | { 632 | Marker2D* m = &vidFrameData[i].newMarkers[j]; 633 | 634 | cv::Matx31d imgPt(m->pos.x(), m->pos.y(), 1); 635 | imgPt = m33Inv * imgPt; 636 | QVector3D d((float)imgPt(0, 0), (float)imgPt(1, 0), (float)imgPt(2, 0)); 637 | d.normalize(); 638 | 639 | m->worldRayD = (worldMat * QVector4D(d, 0)).toVector3D(); 640 | } 641 | } 642 | } 643 | 644 | bool TakeTracker::ConvertTimelineToFrame(int TimelineFrame, int* KeyFrameIndex, int* FrameIndex) 645 | { 646 | int keyFrameIndex = -1; 647 | int frameIndex = -1; 648 | int l = 0; 649 | int r = vidFrameData.count() - 1; 650 | 651 | *KeyFrameIndex = keyFrameIndex; 652 | *FrameIndex = frameIndex; 653 | 654 | while (l <= r) 655 | { 656 | int m = l + (r - l) / 2; 657 | 658 | int mIdx = vidFrameData[m].index; 659 | 660 | if (mIdx == TimelineFrame) 661 | { 662 | frameIndex = m; 663 | break; 664 | } 665 | 666 | if (mIdx < TimelineFrame) 667 | { 668 | l = m + 1; 669 | } 670 | else 671 | { 672 | r = m - 1; 673 | } 674 | } 675 | 676 | keyFrameIndex = frameIndex; 677 | 678 | //qDebug() << "Search:" << TimelineFrame << "FrameIndex:" << frameIndex; 679 | 680 | while (keyFrameIndex >= 0) 681 | { 682 | if (vidFrameData[keyFrameIndex].type == 1) 683 | { 684 | break; 685 | } 686 | --keyFrameIndex; 687 | } 688 | 689 | if (frameIndex != -1 && keyFrameIndex != -1) 690 | { 691 | //if (vidFrameData[frameIndex].type == 3) 692 | //return false; 693 | 694 | *KeyFrameIndex = keyFrameIndex; 695 | *FrameIndex = frameIndex; 696 | return true; 697 | } 698 | 699 | return false; 700 | } 701 | 702 | VidFrameData* TakeTracker::GetLocalFrame(int TimelineFrame) 703 | { 704 | int localFrame = TimelineFrame; 705 | 706 | if (localFrame < 0 || localFrame >= vidFrameData.count()) 707 | return 0; 708 | 709 | return &vidFrameData[localFrame]; 710 | } 711 | 712 | QVector2D TakeTracker::ProjectPoint(QVector3D P) 713 | { 714 | cv::Mat wp(4, 1, CV_64F); 715 | wp.at(0) = P.x(); 716 | wp.at(1) = P.y(); 717 | wp.at(2) = P.z(); 718 | wp.at(3) = 1.0; 719 | 720 | cv::Mat imgPt = projMat * wp; 721 | 722 | QVector2D result; 723 | result.setX(imgPt.at(0) / imgPt.at(2)); 724 | result.setY(imgPt.at(1) / imgPt.at(2)); 725 | 726 | return result; 727 | } 728 | 729 | TrackerProperties TakeTracker::GetProps() 730 | { 731 | TrackerProperties result; 732 | 733 | result.name = name; 734 | memcpy(result.maskData, mask, sizeof(mask)); 735 | result.exposure = exposure; 736 | result.iso = iso; 737 | result.distCoefs = distCoefs.clone(); 738 | result.camMat = camMat.clone(); 739 | result.rtMat = rtMat.clone(); 740 | 741 | return result; 742 | } -------------------------------------------------------------------------------- /VectorSynth/takeTracker.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | #include "decoder.h" 7 | #include "liveTracker.h" 8 | #include "trackerConnection.h" 9 | 10 | class VidFrameData 11 | { 12 | public: 13 | 14 | int type; 15 | int index; 16 | int size; 17 | int bufferPosition; 18 | QList newMarkers; 19 | }; 20 | 21 | class TakeTracker 22 | { 23 | public: 24 | 25 | static QMatrix4x4 WorldFromPose(cv::Mat Pose); 26 | static cv::Mat PoseFromWorld(QMatrix4x4 World); 27 | 28 | static TakeTracker* Create(int Id, QString TakeName, uint32_t Serial, QString FilePath, LiveTracker* LiveTracker); 29 | 30 | // Take. 31 | QString takeName; 32 | LiveTracker* liveTracker; 33 | int drawMarkerFrameIndex; 34 | int vidPlaybackFrame; 35 | 36 | // Tracker params. 37 | int id; 38 | uint32_t serial; 39 | 40 | QString name; 41 | int exposure; 42 | int iso; 43 | uint8_t mask[64 * 44]; 44 | cv::Mat distCoefs; 45 | cv::Mat camMatOpt; 46 | cv::Mat camMat; 47 | cv::Mat projMat; // camMatOpt * [R|t] 48 | cv::Mat rtMat; // [R|t] 49 | QMatrix4x4 worldMat; 50 | QVector3D worldPos; 51 | 52 | // Frame data. 53 | Decoder* decoder; 54 | uint8_t* takeClipData; 55 | int frameCount; 56 | QList vidFrameData; 57 | int mode; 58 | 59 | TakeTracker(); 60 | ~TakeTracker(); 61 | 62 | void DecodeFrame(int FrameIndex, int KeyFrameIndex); 63 | void AdvanceFrame(int FrameCount); 64 | void Save(); 65 | void UndistortMarkers(int StartFrame, int EndFrame); 66 | void Build2DMarkers(int StartFrame, int EndFrame); 67 | void BuildRays(int StartFrame, int EndFrame); 68 | bool ConvertTimelineToFrame(int TimelineFrame, int* KeyFrameIndex, int* FrameIndex); 69 | VidFrameData* GetLocalFrame(int TimelineFrame); 70 | void SetPose(cv::Mat Pose); 71 | void SetCamDist(cv::Mat Cam, cv::Mat Dist); 72 | QVector2D ProjectPoint(QVector3D P); 73 | TrackerProperties GetProps(); 74 | 75 | private: 76 | 77 | int _currentDecodeFrameIndex; 78 | }; -------------------------------------------------------------------------------- /VectorSynth/timelineWidget.cpp: -------------------------------------------------------------------------------- 1 | #include "timelineWidget.h" 2 | #include 3 | #include 4 | #include 5 | 6 | QT_USE_NAMESPACE 7 | 8 | TimelineWidget::TimelineWidget(QWidget* Parent) : 9 | QWidget(Parent) 10 | { 11 | setBackgroundRole(QPalette::Base); 12 | setAutoFillBackground(true); 13 | setMouseTracking(true); 14 | 15 | setMinimumSize(256, 40); 16 | 17 | _mainFont = QFont("Arial", 8); 18 | 19 | setSizePolicy(QSizePolicy::MinimumExpanding, QSizePolicy::Preferred); 20 | 21 | totalFrames = 1000; 22 | selectedFrame = 0; 23 | rangeStartFrame = 50; 24 | rangeEndFrame = 200; 25 | } 26 | 27 | void TimelineWidget::paintEvent(QPaintEvent* Event) 28 | { 29 | Q_UNUSED(Event); 30 | 31 | QPainter qp(this); 32 | qp.fillRect(0, 0, width(), height(), QColor(28, 30, 32)); 33 | qp.fillRect(0, 25, width(), 40 - 25, QColor(43, 45, 51)); 34 | 35 | int markCount = width() / 64; 36 | 37 | if (markCount > totalFrames) 38 | markCount = totalFrames; 39 | 40 | float spacing = (float)width() / markCount; 41 | 42 | int markValueInc = totalFrames / markCount; 43 | 44 | qp.setBrush(Qt::BrushStyle::NoBrush); 45 | qp.setPen(QPen(QColor::fromRgb(100, 100, 100))); 46 | 47 | for (int i = 0; i < markCount; ++i) 48 | { 49 | int x = i * spacing; 50 | qp.drawLine(x, 15, x, 24); 51 | qp.drawText(QPointF(x, 14), QString::number(markValueInc * i)); 52 | } 53 | 54 | qp.setRenderHint(QPainter::RenderHint::Antialiasing, true); 55 | 56 | int runnerX = (int)(((float)width() / totalFrames) * selectedFrame); 57 | 58 | int startRunnerX = (int)(((float)width() / totalFrames) * rangeStartFrame); 59 | int endRunnerX = (int)(((float)width() / totalFrames) * rangeEndFrame); 60 | 61 | qp.setBrush(QBrush(QColor::fromRgb(255, 255, 255, 100))); 62 | qp.setPen(Qt::PenStyle::NoPen); 63 | qp.drawRoundedRect(runnerX, 0, 10, 25, 4, 4); 64 | 65 | qp.setBrush(QBrush(QColor::fromRgb(100, 100, 100, 255))); 66 | qp.setPen(Qt::PenStyle::NoPen); 67 | 68 | qp.drawRect(startRunnerX + 5, 25, endRunnerX - startRunnerX, 5); 69 | 70 | qp.setPen(QPen(QColor::fromRgb(43, 45, 51))); 71 | qp.drawRoundedRect(startRunnerX, 23, 10, 10, 4, 4); 72 | qp.drawRoundedRect(endRunnerX, 23, 10, 10, 4, 4); 73 | 74 | qp.setRenderHint(QPainter::RenderHint::Antialiasing, false); 75 | } 76 | 77 | void TimelineWidget::mousePressEvent(QMouseEvent* Event) 78 | { 79 | if (Event->button() == Qt::MouseButton::LeftButton) 80 | { 81 | QPointF pos = Event->localPos(); 82 | _grabbedComponent = 0; 83 | 84 | if (pos.y() < 25) 85 | { 86 | _grabbedComponent = 1; 87 | } 88 | else 89 | { 90 | int startRunnerX = (int)(((float)width() / totalFrames) * rangeStartFrame); 91 | int endRunnerX = (int)(((float)width() / totalFrames) * rangeEndFrame); 92 | 93 | if (pos.x() >= startRunnerX && pos.x() <= startRunnerX + 10) 94 | _grabbedComponent = 2; 95 | else if (pos.x() >= endRunnerX && pos.x() <= endRunnerX + 10) 96 | _grabbedComponent = 3; 97 | } 98 | 99 | _updateInteract(pos.x()); 100 | } 101 | } 102 | 103 | void TimelineWidget::mouseMoveEvent(QMouseEvent* Event) 104 | { 105 | if (Event->buttons() & Qt::MouseButton::LeftButton) 106 | { 107 | QPointF pos = Event->localPos(); 108 | _updateInteract(pos.x()); 109 | } 110 | } 111 | 112 | void TimelineWidget::_updateInteract(int LocalXPixel) 113 | { 114 | int frame = _getFrame(LocalXPixel); 115 | 116 | if (_grabbedComponent == 1) 117 | { 118 | selectedFrame = frame; 119 | emit valueChanged(selectedFrame); 120 | } 121 | else if (_grabbedComponent == 2) 122 | { 123 | if (frame >= rangeEndFrame) 124 | frame = rangeEndFrame - 1; 125 | 126 | rangeStartFrame = frame; 127 | } 128 | else if (_grabbedComponent == 3) 129 | { 130 | if (frame <= rangeStartFrame) 131 | frame = rangeStartFrame + 1; 132 | 133 | rangeEndFrame = frame; 134 | } 135 | 136 | update(); 137 | } 138 | 139 | int TimelineWidget::_getFrame(int LocalXPixel) 140 | { 141 | if (LocalXPixel < 0) 142 | LocalXPixel = 0; 143 | else if (LocalXPixel >= width()) 144 | LocalXPixel = width() - 1; 145 | 146 | float t = (float)LocalXPixel / (width() - 1); 147 | 148 | return t * (totalFrames - 1); 149 | } 150 | 151 | void TimelineWidget::setParams(int TotalFrames) 152 | { 153 | totalFrames = TotalFrames; 154 | selectedFrame = 0; 155 | rangeStartFrame = 0; 156 | rangeEndFrame = TotalFrames - 10; 157 | update(); 158 | } 159 | 160 | void TimelineWidget::adjustSelectedFrame(int Value) 161 | { 162 | setFrame(selectedFrame + Value); 163 | } 164 | 165 | void TimelineWidget::setFrame(int Value) 166 | { 167 | selectedFrame = Value; 168 | 169 | if (selectedFrame < 0) 170 | selectedFrame = 0; 171 | else if (selectedFrame >= totalFrames) 172 | selectedFrame = totalFrames - 1; 173 | 174 | emit valueChanged(selectedFrame); 175 | 176 | update(); 177 | } -------------------------------------------------------------------------------- /VectorSynth/timelineWidget.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | QT_USE_NAMESPACE 6 | 7 | class TimelineWidget : public QWidget 8 | { 9 | Q_OBJECT 10 | signals: 11 | void valueChanged(int Value); 12 | 13 | public: 14 | int totalFrames; 15 | int selectedFrame; 16 | int rangeStartFrame; 17 | int rangeEndFrame; 18 | 19 | TimelineWidget(QWidget* Parent = 0); 20 | void setParams(int TotalFrames); 21 | void adjustSelectedFrame(int Value); 22 | void setFrame(int Value); 23 | 24 | protected: 25 | void paintEvent(QPaintEvent* Event); 26 | void mousePressEvent(QMouseEvent* Event); 27 | void mouseMoveEvent(QMouseEvent* Event); 28 | //void mouseReleaseEvent(QMouseEvent* Event); 29 | 30 | private: 31 | QFont _mainFont; 32 | int _grabbedComponent; 33 | 34 | void _updateInteract(int LocalXPixel); 35 | int _getFrame(int LocalXPixel); 36 | }; -------------------------------------------------------------------------------- /VectorSynth/tracker.cpp: -------------------------------------------------------------------------------- 1 | #include "tracker.h" 2 | 3 | Tracker::Tracker() 4 | { 5 | 6 | } 7 | 8 | Tracker::~Tracker() 9 | { 10 | } -------------------------------------------------------------------------------- /VectorSynth/tracker.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | class Tracker 4 | { 5 | public: 6 | 7 | Tracker(); 8 | ~Tracker(); 9 | 10 | private: 11 | 12 | }; -------------------------------------------------------------------------------- /VectorSynth/trackerConnection.cpp: -------------------------------------------------------------------------------- 1 | #include "trackerConnection.h" 2 | #include "serverThreadWorker.h" 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | void TrackerProperties::Populate(QByteArray JSON) 9 | { 10 | QJsonObject trackerObj = QJsonDocument::fromJson(JSON).object(); 11 | 12 | name = trackerObj["name"].toString(); 13 | exposure = trackerObj["exposure"].toInt(); 14 | iso = trackerObj["iso"].toInt(); 15 | 16 | QJsonArray jsonIntrinMat = trackerObj["intrinsic"].toObject()["camera"].toArray(); 17 | QJsonArray jsonIntrinDist = trackerObj["intrinsic"].toObject()["distortion"].toArray(); 18 | QJsonArray jsonExtrinProj = trackerObj["extrinsic"].toObject()["pose"].toArray(); 19 | 20 | distCoefs = cv::Mat::zeros(5, 1, CV_64F); 21 | distCoefs.at(0) = jsonIntrinDist[0].toDouble(); 22 | distCoefs.at(1) = jsonIntrinDist[1].toDouble(); 23 | distCoefs.at(2) = jsonIntrinDist[2].toDouble(); 24 | distCoefs.at(3) = jsonIntrinDist[3].toDouble(); 25 | distCoefs.at(4) = jsonIntrinDist[4].toDouble(); 26 | 27 | camMat = cv::Mat::eye(3, 3, CV_64F); 28 | camMat.at(0, 0) = jsonIntrinMat[0].toDouble(); 29 | camMat.at(1, 0) = jsonIntrinMat[1].toDouble(); 30 | camMat.at(2, 0) = jsonIntrinMat[2].toDouble(); 31 | camMat.at(0, 1) = jsonIntrinMat[3].toDouble(); 32 | camMat.at(1, 1) = jsonIntrinMat[4].toDouble(); 33 | camMat.at(2, 1) = jsonIntrinMat[5].toDouble(); 34 | camMat.at(0, 2) = jsonIntrinMat[6].toDouble(); 35 | camMat.at(1, 2) = jsonIntrinMat[7].toDouble(); 36 | camMat.at(2, 2) = jsonIntrinMat[8].toDouble(); 37 | 38 | rtMat = cv::Mat::eye(3, 4, CV_64F); 39 | for (int iY = 0; iY < 4; ++iY) 40 | { 41 | for (int iX = 0; iX < 3; ++iX) 42 | { 43 | rtMat.at(iX, iY) = jsonExtrinProj[iY * 3 + iX].toDouble(); 44 | } 45 | } 46 | 47 | QString maskStr = trackerObj["mask"].toString(); 48 | for (int i = 0; i < maskStr.size(); ++i) 49 | { 50 | maskData[i] = maskStr[i].toLatin1() - 48; 51 | } 52 | } 53 | 54 | QByteArray TrackerProperties::GetJSON(bool Pretty) 55 | { 56 | QJsonObject jsonObj; 57 | jsonObj["name"] = name; 58 | 59 | QString maskString; 60 | for (int i = 0; i < sizeof(maskData); ++i) 61 | { 62 | maskString += maskData[i] + 48; 63 | } 64 | jsonObj["mask"] = maskString; 65 | 66 | QJsonArray jsonIntrinMat; 67 | jsonIntrinMat.append(camMat.at(0, 0)); 68 | jsonIntrinMat.append(camMat.at(1, 0)); 69 | jsonIntrinMat.append(camMat.at(2, 0)); 70 | jsonIntrinMat.append(camMat.at(0, 1)); 71 | jsonIntrinMat.append(camMat.at(1, 1)); 72 | jsonIntrinMat.append(camMat.at(2, 1)); 73 | jsonIntrinMat.append(camMat.at(0, 2)); 74 | jsonIntrinMat.append(camMat.at(1, 2)); 75 | jsonIntrinMat.append(camMat.at(2, 2)); 76 | 77 | QJsonArray jsonIntrinDist; 78 | jsonIntrinDist.append(distCoefs.at(0)); 79 | jsonIntrinDist.append(distCoefs.at(1)); 80 | jsonIntrinDist.append(distCoefs.at(2)); 81 | jsonIntrinDist.append(distCoefs.at(3)); 82 | jsonIntrinDist.append(distCoefs.at(4)); 83 | 84 | QJsonObject jsonIntrin; 85 | jsonIntrin["camera"] = jsonIntrinMat; 86 | jsonIntrin["distortion"] = jsonIntrinDist; 87 | 88 | jsonObj["intrinsic"] = jsonIntrin; 89 | 90 | QJsonArray pose; 91 | for (int iY = 0; iY < 4; ++iY) 92 | { 93 | for (int iX = 0; iX < 3; ++iX) 94 | { 95 | pose.append(rtMat.at(iX, iY)); 96 | } 97 | } 98 | 99 | QJsonObject jsonExtrin; 100 | jsonExtrin["pose"] = pose; 101 | 102 | jsonObj["extrinsic"] = jsonExtrin; 103 | 104 | QJsonDocument jsonDoc(jsonObj); 105 | QByteArray jsonBytes; 106 | 107 | if (Pretty) 108 | jsonBytes = jsonDoc.toJson(QJsonDocument::JsonFormat::Indented); 109 | else 110 | jsonBytes = jsonDoc.toJson(QJsonDocument::JsonFormat::Compact); 111 | 112 | return jsonBytes; 113 | } 114 | 115 | TrackerConnection::TrackerConnection(int Id, QTcpSocket* Socket, QObject* Parent) : 116 | QObject(Parent), 117 | id(Id), 118 | accepted(false), 119 | streamMode(0), 120 | streaming(false), 121 | recording(false), 122 | recordFile(0), 123 | socket(Socket), 124 | _recvLength(0), 125 | _recvState(0), 126 | _recvPacketId(0), 127 | _serverThreadWorker((ServerThreadWorker*)Parent) 128 | { 129 | memset(props.maskData, 1, sizeof(props.maskData)); 130 | decoder = new Decoder(); 131 | 132 | connect(Socket, &QTcpSocket::readyRead, this, &TrackerConnection::OnTcpSocketReadyRead); 133 | connect(Socket, &QTcpSocket::disconnected, this, &TrackerConnection::OnTcpSocketDisconnected); 134 | 135 | Socket->write("gi\n"); 136 | } 137 | 138 | TrackerConnection::~TrackerConnection() 139 | { 140 | StopRecording(); 141 | } 142 | 143 | void TrackerConnection::Lock() 144 | { 145 | _mutex.lock(); 146 | } 147 | 148 | void TrackerConnection::Unlock() 149 | { 150 | _mutex.unlock(); 151 | } 152 | 153 | void TrackerConnection::StartRecording(QString TakeName) 154 | { 155 | StopRecording(); 156 | 157 | if (streamMode == 1 || streamMode == 2) 158 | { 159 | QByteArray jsonBytes = props.GetJSON(true); 160 | QFile file("project/" + TakeName + "/" + QString::number(serial) + ".tracker"); 161 | 162 | if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) 163 | { 164 | qDebug() << "Tracker: Save file failed"; 165 | return; 166 | } 167 | 168 | file.write(jsonBytes); 169 | file.close(); 170 | } 171 | 172 | char fileName[256]; 173 | 174 | if (streamMode == 1) 175 | { 176 | sprintf(fileName, "project\\%s\\%u.trakvid", TakeName.toLatin1().data(), serial); 177 | recordFile = fopen(fileName, "wb"); 178 | recording = true; 179 | _gotDataFrame = false; 180 | } 181 | else if (streamMode == 2) 182 | { 183 | sprintf(fileName, "project\\%s\\%u.trakblobs", TakeName.toLatin1().data(), serial); 184 | recordFile = fopen(fileName, "wb"); 185 | recording = true; 186 | } 187 | } 188 | 189 | void TrackerConnection::StopRecording() 190 | { 191 | if (recordFile) 192 | fclose(recordFile); 193 | 194 | recordFile = 0; 195 | recording = false; 196 | _gotDataFrame = false; 197 | } 198 | 199 | void TrackerConnection::RecordData(uint8_t* Data, int Len) 200 | { 201 | fwrite(Data, Len, 1, recordFile); 202 | //fflush(recordFile); 203 | } 204 | 205 | void TrackerConnection::UpdateProperties(QByteArray Props) 206 | { 207 | props.Populate(Props); 208 | QString cmd = "sp," + Props + "\n"; 209 | 210 | qDebug() << "Update:" << cmd; 211 | socket->write(cmd.toUtf8()); 212 | } 213 | 214 | void TrackerConnection::OnTcpSocketDisconnected() 215 | { 216 | qDebug() << "Client" << id << "Disconnected"; 217 | emit OnDisconnected(this); 218 | } 219 | 220 | void TrackerConnection::OnTcpSocketReadyRead() 221 | { 222 | while (socket->bytesAvailable()) 223 | { 224 | // NOTE: Socket buffer (socket->bytesAvailable()) only updates on next Qt gui loop. 225 | 226 | if (_recvState == 0) 227 | { 228 | _recvLength = 0; 229 | _recvPacketId = 0; 230 | 231 | if (socket->bytesAvailable() >= 4) 232 | { 233 | socket->read((char*)_recvBuffer, 4); 234 | _recvPacketId = _recvBuffer[0]; 235 | _recvState = 1; 236 | } 237 | else 238 | { 239 | break; 240 | } 241 | } 242 | else if (_recvPacketId == 3) 243 | { 244 | if (socket->bytesAvailable() >= 20) 245 | { 246 | socket->read((char*)_recvBuffer, 20); 247 | 248 | _recvFrameSize = *(int*)&_recvBuffer[0]; 249 | int frameType = *(int*)&_recvBuffer[4]; 250 | avgMasterOffset = *(float*)&_recvBuffer[8]; 251 | int64_t tempLatestFrameId = *(int64_t*)&_recvBuffer[12]; 252 | 253 | if (tempLatestFrameId > 0) 254 | latestFrameId = tempLatestFrameId; 255 | 256 | if (frameType == 2) 257 | { 258 | _gotDataFrame = true; 259 | if (_serverThreadWorker->takeStartFrameId == 0) 260 | { 261 | _serverThreadWorker->takeStartFrameId = latestFrameId + 1; 262 | } 263 | } 264 | 265 | if (tempLatestFrameId > 0) 266 | { 267 | latestFrameId -= _serverThreadWorker->takeStartFrameId; 268 | *(int64_t*)&_recvBuffer[12] = latestFrameId; 269 | } 270 | 271 | if (recording && _gotDataFrame) 272 | { 273 | RecordData(_recvBuffer, 20); 274 | } 275 | 276 | _recvState = 2; 277 | _recvPacketId = 0; 278 | } 279 | else 280 | { 281 | break; 282 | } 283 | } 284 | else if (_recvPacketId == 4) 285 | { 286 | if (socket->bytesAvailable() >= 12) 287 | { 288 | qDebug() << "Got GI"; 289 | 290 | socket->read((char*)_recvBuffer, 12); 291 | version = *(int*)&_recvBuffer[0]; 292 | serial = *(int*)&_recvBuffer[4]; 293 | _recvFrameSize = *(int*)&_recvBuffer[8]; 294 | _recvState = 4; 295 | _recvPacketId = 0; 296 | } 297 | else 298 | { 299 | break; 300 | } 301 | } 302 | else if (_recvPacketId == 5) 303 | { 304 | // NOTE: Realtime blob packet header. 305 | 306 | if (socket->bytesAvailable() >= 16) 307 | { 308 | socket->read((char*)_recvBuffer, 16); 309 | _recvFrameSize = *(int*)&_recvBuffer[0]; 310 | latestFrameId = *(int64_t*)&_recvBuffer[4]; 311 | avgMasterOffset = *(float*)&_recvBuffer[12]; 312 | 313 | if (_serverThreadWorker->takeStartFrameId == 0) 314 | { 315 | _serverThreadWorker->takeStartFrameId = latestFrameId; 316 | } 317 | 318 | latestFrameId -= _serverThreadWorker->takeStartFrameId; 319 | *(int64_t*)&_recvBuffer[4] = latestFrameId; 320 | 321 | _recvState = 3; 322 | _recvPacketId = 0; 323 | _recvLength = 16; 324 | } 325 | else 326 | { 327 | break; 328 | } 329 | } 330 | else if (_recvState == 2) 331 | { 332 | // NOTE: Recv realtime image stream. 333 | 334 | int readBytes = socket->read((char*)_recvBuffer, _recvFrameSize); 335 | _recvFrameSize -= readBytes; 336 | 337 | if (recording && _gotDataFrame) 338 | { 339 | RecordData(_recvBuffer, readBytes); 340 | } 341 | 342 | //* 343 | if (streaming) 344 | { 345 | if (decoder->DoDecode(_recvBuffer, readBytes)) 346 | { 347 | Lock(); 348 | decoder->TransferFrameToBuffer(postFrameData); 349 | Unlock(); 350 | 351 | emit OnNewFrame(this); 352 | } 353 | } 354 | //*/ 355 | 356 | if (_recvFrameSize == 0) 357 | _recvState = 0; 358 | } 359 | else if (_recvState == 3) 360 | { 361 | // NOTE: Recv realtime blob stream. 362 | 363 | //qDebug() << "Test Avail" << socket->bytesAvailable(); 364 | if (socket->bytesAvailable() > 0 && _recvLength < _recvFrameSize + 4) 365 | { 366 | int readBytes = socket->read((char*)_recvBuffer + _recvLength, (_recvFrameSize + 4) - _recvLength); 367 | _recvLength += readBytes; 368 | 369 | //qDebug() << "Total" << _recvLength << "/" << _recvFrameSize; 370 | 371 | if (_recvLength == _recvFrameSize + 4) 372 | { 373 | ++decoder->newFrames; 374 | decoder->dataRecvBytes += _recvFrameSize + 4; 375 | 376 | if (recording) 377 | { 378 | RecordData(_recvBuffer, _recvFrameSize + 4); 379 | } 380 | else if (streaming) 381 | { 382 | Lock(); 383 | memcpy(markerData, _recvBuffer + 4, _recvFrameSize); 384 | markerDataSize = _recvFrameSize; 385 | Unlock(); 386 | 387 | emit OnNewMarkersFrame(this); 388 | } 389 | 390 | _recvState = 0; 391 | } 392 | } 393 | else 394 | { 395 | break; 396 | } 397 | } 398 | else if (_recvState == 4) 399 | { 400 | if (socket->bytesAvailable() >= _recvFrameSize) 401 | { 402 | socket->read((char*)_recvBuffer, _recvFrameSize); 403 | 404 | QByteArray jsonData((char*)_recvBuffer, _recvFrameSize); 405 | //qDebug() << _recvFrameSize << QString::fromUtf8(jsonData); 406 | 407 | props.Populate(jsonData); 408 | 409 | /* 410 | props.name = "Unnamed"; 411 | memcpy(props.maskData, _recvBuffer + 8, sizeof(props.maskData)); 412 | 413 | for (int i = 0; i < sizeof(props.maskData); ++i) 414 | { 415 | props.maskData[i] -= 48; 416 | } 417 | */ 418 | 419 | accepted = true; 420 | _recvState = 0; 421 | 422 | emit OnInfoUpdate(this); 423 | } 424 | else 425 | { 426 | break; 427 | } 428 | } 429 | } 430 | } -------------------------------------------------------------------------------- /VectorSynth/trackerConnection.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include "decoder.h" 6 | #include 7 | 8 | QT_USE_NAMESPACE 9 | 10 | class ServerThreadWorker; 11 | 12 | class TrackerProperties 13 | { 14 | public: 15 | 16 | QString name; 17 | uint8_t maskData[64 * 44]; 18 | int exposure; 19 | int iso; 20 | cv::Mat distCoefs; 21 | cv::Mat camMat; 22 | cv::Mat rtMat; 23 | 24 | void Populate(QByteArray JSON); 25 | QByteArray GetJSON(bool Pretty); 26 | }; 27 | 28 | class TrackerConnection : public QObject 29 | { 30 | Q_OBJECT 31 | 32 | public: 33 | 34 | // Connection. 35 | bool accepted; 36 | bool streaming; 37 | bool recording; 38 | int streamMode; 39 | FILE* recordFile; 40 | QTcpSocket* socket; 41 | 42 | // Tracker params. 43 | uint32_t id; 44 | int version; 45 | uint32_t serial; 46 | 47 | //QString name; 48 | //uint8_t maskData[64 * 44]; 49 | TrackerProperties props; 50 | 51 | // Frame data. 52 | Decoder* decoder; 53 | float avgMasterOffset; 54 | int64_t latestFrameId; 55 | uint8_t postFrameData[VID_W * VID_H * 3]; 56 | uint8_t markerData[1024 * 10]; 57 | int markerDataSize; 58 | 59 | TrackerConnection(int Id, QTcpSocket* Socket, QObject* Parent); 60 | ~TrackerConnection(); 61 | 62 | void StartRecording(QString TakeName); 63 | void StopRecording(); 64 | 65 | void RecordData(uint8_t* Data, int Len); 66 | void UpdateProperties(QByteArray Props); 67 | 68 | signals: 69 | 70 | void OnDisconnected(TrackerConnection* Tracker); 71 | void OnNewFrame(TrackerConnection* Tracker); 72 | void OnNewMarkersFrame(TrackerConnection* Tracker); 73 | void OnInfoUpdate(TrackerConnection* Tracker); 74 | 75 | public slots: 76 | 77 | void OnTcpSocketDisconnected(); 78 | void OnTcpSocketReadyRead(); 79 | 80 | void Lock(); 81 | void Unlock(); 82 | 83 | private: 84 | 85 | QMutex _mutex; 86 | uint8_t _recvBuffer[1024 * 1024]; 87 | int _recvLength; 88 | int _recvState; 89 | int _recvPacketId; 90 | int _recvFrameSize; 91 | bool _gotDataFrame; 92 | 93 | ServerThreadWorker* _serverThreadWorker; 94 | }; -------------------------------------------------------------------------------- /timesync/Makefile: -------------------------------------------------------------------------------- 1 | output: ./main.o 2 | g++ ./main.o -o ./timesync -lrt 3 | 4 | ./bin/main.o: main.cpp 5 | g++ -O2 -c main.cpp -o ./bin/main.o 6 | 7 | clean: rm ./*.o 8 | 9 | -------------------------------------------------------------------------------- /timesync/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | typedef unsigned char u8; 16 | typedef unsigned short u16; 17 | typedef unsigned int u32; 18 | typedef unsigned long long u64; 19 | typedef char i8; 20 | typedef short i16; 21 | typedef int i32; 22 | typedef long long i64; 23 | typedef float f32; 24 | typedef double f64; 25 | 26 | i64 rttBuffer[100]; 27 | i64 rttBufferSorted[100]; 28 | i32 rttBufferIdx = 0; 29 | i64 rttMedian = 0; 30 | float rttMedianFiltered = 0.0f; 31 | 32 | i64 offsetBuffer[100]; 33 | i64 offsetBufferSorted[100]; 34 | i32 offsetBufferIdx = 0; 35 | i64 offsetMedian = 0; 36 | float offsetMedianFiltered = 0.0f; 37 | 38 | struct timeEntry 39 | { 40 | i64 localTime; 41 | i64 masterTime; 42 | }; 43 | 44 | timeEntry timeEntries[1000]; 45 | i32 timeEntryIdx = 0; 46 | 47 | //------------------------------------------------------------------------------------------------------------ 48 | // Utility functions. 49 | //------------------------------------------------------------------------------------------------------------ 50 | inline u64 GetUS() 51 | { 52 | //struct timespec t1, t2; 53 | //clock_gettime(CLOCK_MONOTONIC, &t1); 54 | 55 | timespec time; 56 | clock_gettime(CLOCK_REALTIME, &time); 57 | 58 | return time.tv_sec * 1000000 + time.tv_nsec / 1000; 59 | } 60 | 61 | inline u32 GetMS() 62 | { 63 | return (GetUS() / 1000); 64 | } 65 | 66 | //------------------------------------------------------------------------------------------------------------ 67 | // Application entry. 68 | //------------------------------------------------------------------------------------------------------------ 69 | int cmpfunc(const void* a, const void* b) 70 | { 71 | return (*(i64*)a - *(i64*)b); 72 | } 73 | 74 | int main(int argc, char** argv) 75 | { 76 | std::cout << "timesync running...\n"; 77 | 78 | int s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP); 79 | 80 | struct timeval tv; 81 | tv.tv_sec = 0; 82 | tv.tv_usec = 100000; 83 | if (setsockopt(s, SOL_SOCKET, SO_RCVTIMEO,&tv,sizeof(tv)) < 0) { 84 | perror("Error"); 85 | } 86 | 87 | std::cout << s << "\n"; 88 | 89 | struct sockaddr_in hostAddr = {}; 90 | memset((char *) &hostAddr, 0, sizeof(hostAddr)); 91 | hostAddr.sin_family = AF_INET; 92 | hostAddr.sin_port = htons(4894); 93 | 94 | std::cout << inet_aton("192.168.1.107", &hostAddr.sin_addr) << "\n"; 95 | 96 | u64 startTimeUs = GetUS(); 97 | i64 startMasterUs = 0; 98 | 99 | FILE* f = fopen("log.txt", "w"); 100 | 101 | memset(rttBuffer, 0, sizeof(rttBuffer)); 102 | memset(rttBufferSorted, 0, sizeof(rttBuffer)); 103 | 104 | while (1) 105 | { 106 | char msg[1500]; 107 | 108 | *(i64*)(msg + 8) = (GetUS() - startTimeUs) + offsetMedianFiltered + startMasterUs + (i64)(rttMedianFiltered * 0.5f); 109 | *(i64*)msg = GetUS() - startTimeUs; 110 | 111 | int res = sendto(s, msg, 16, 0, (struct sockaddr*)&hostAddr, sizeof(hostAddr)); 112 | //std::cout << "Send " << res << "\n"; 113 | 114 | struct sockaddr_in recvAddr; 115 | unsigned int recvAddrLen = sizeof(recvAddr); 116 | res = recvfrom(s, msg, sizeof(msg), 0, (struct sockaddr*)&recvAddr, &recvAddrLen); 117 | 118 | i64 recvTimeUs = GetUS() - startTimeUs; 119 | 120 | if (res == -1) 121 | { 122 | std::cout << "TIME OUT BITCHES\n"; 123 | continue; 124 | } 125 | 126 | i64 sendTimeUs = *(i64*)msg; 127 | i64 rttUs = recvTimeUs - sendTimeUs; 128 | i64 masterTimeUs = *(i64*)(msg + 8) - startMasterUs - (i64)(rttMedianFiltered * 0.5f); 129 | 130 | if (startMasterUs == 0) 131 | startMasterUs = masterTimeUs; 132 | 133 | std::cout << "Recv " << res << ": " << (u64)rttUs << " " << masterTimeUs << " " << (masterTimeUs - recvTimeUs) << "\n"; 134 | fprintf(f, "%f %f %f\n", ((float)(recvTimeUs) / 1000.0f), ((float)(masterTimeUs) / 1000.0f), ((float)(rttUs) / 1000.0f)); 135 | fflush(f); 136 | 137 | rttBuffer[rttBufferIdx++] = rttUs; 138 | if (rttBufferIdx == 100) 139 | rttBufferIdx = 0; 140 | 141 | memcpy(rttBufferSorted, rttBuffer, sizeof(rttBuffer)); 142 | 143 | u64 t = GetUS(); 144 | qsort(rttBufferSorted, 100, sizeof(i64), cmpfunc); 145 | t = GetUS() - t; 146 | //std::cout << "Sort time: " << (i64)t << "\n"; 147 | 148 | rttMedian = rttBufferSorted[49]; 149 | rttMedianFiltered = rttMedianFiltered * 0.95f + rttMedian * 0.05f; 150 | 151 | std::cout << "Median rtt: " << rttMedian << " " << rttMedianFiltered << "\n"; 152 | 153 | offsetBuffer[offsetBufferIdx++] = masterTimeUs - recvTimeUs; 154 | if (offsetBufferIdx == 100) 155 | offsetBufferIdx = 0; 156 | 157 | memcpy(offsetBufferSorted, offsetBuffer, sizeof(offsetBuffer)); 158 | 159 | qsort(offsetBufferSorted, 100, sizeof(i64), cmpfunc); 160 | 161 | offsetMedian = offsetBufferSorted[49]; 162 | offsetMedianFiltered = offsetMedianFiltered * 0.95f + offsetMedian * 0.05f; 163 | 164 | std::cout << "Median offset: " << offsetMedian << " " << offsetMedianFiltered << "\n"; 165 | 166 | 167 | 168 | 169 | usleep(100000); 170 | } 171 | 172 | close(s); 173 | 174 | return 0; 175 | } -------------------------------------------------------------------------------- /timing.js: -------------------------------------------------------------------------------- 1 | const dgram = require('dgram'); 2 | const NS_PER_SEC = 1e9; 3 | 4 | let socket = dgram.createSocket('udp4'); 5 | socket.bind(45455); 6 | 7 | let prevTime = 0; 8 | let startTime = getUS(); 9 | let prevIndex = 0; 10 | 11 | function getUS() 12 | { 13 | let pt = process.hrtime(); 14 | return Math.floor((pt[0] * NS_PER_SEC + pt[1]) / 1000000); 15 | } 16 | 17 | socket.on('listening', () => { 18 | 19 | }); 20 | 21 | let timeMap = {}; 22 | 23 | socket.on('message', (message, remote) => { 24 | let t = getUS(); 25 | //let d = t - prevTime; 26 | //prevTime = t; 27 | let d = t - startTime; 28 | 29 | /* 30 | if (remote.address == '192.168.1.106') { 31 | prevIndex = parseInt(message); 32 | } else { 33 | let indexD = parseInt(message) - prevIndex; 34 | console.log(indexD); 35 | } 36 | */ 37 | 38 | timeMap[remote.address] = parseInt(message); 39 | 40 | //console.log(d + ' ' + message + ' ' + remote.address); 41 | }); 42 | 43 | setInterval(() => { 44 | 45 | console.log('-------------------------------------------'); 46 | 47 | let zeroTime = -1; 48 | 49 | for (var key in timeMap) { 50 | 51 | let t = timeMap[key]; 52 | 53 | if (zeroTime == -1) { 54 | zeroTime = t; 55 | } 56 | 57 | console.log(key, t - zeroTime); 58 | } 59 | 60 | }, 500); -------------------------------------------------------------------------------- /volume calibration process.txt: -------------------------------------------------------------------------------- 1 | Volume & camera calibration. 2 | --------------------------------------------------------------------------------------- 3 | 4 | Build 2D markers. 5 | --------------------------------------------------------------------------------------- 6 | - Undistort with pre calibrated K, D(k1, k2, p1, p2). 7 | - Clip to view bounds. (Maybe not required? - depends on distortion issues). 8 | 9 | 10 | Phase 1 - Build rough extrinsic parameters. 11 | --------------------------------------------------------------------------------------- 12 | 13 | - For each camera & root camera pair: 14 | - Find corresponding points. 15 | - Look for frames that have a single 2D marker across all cameras and assume a match. 16 | (Closest point match should remove outliers) 17 | - Find fundamental matrix using Levenberg-Marquardt. 18 | - Epiline errors may be useful to minimize sync video error. 19 | - Essential matrix = Kt * fMat * K. 20 | - Recover R,t for second camera in pair relative to first. 21 | - Uses faux focal length and principal point. 22 | - t is unit vector, needs to be scaled. 23 | - Calculate camera matrices. 24 | - Pairwise triangulate 3D markers. 25 | - Build scale value by comparing 3D markers to root pair markers. 26 | - Scale t by scale value. 27 | - Calculate camera matrices. 28 | 29 | - Output: Need R, t - relative to world 0,0,0 (Root camera). 30 | 31 | 32 | Phase 2 - Create refined extrinsic & intrinsic parameters. 33 | --------------------------------------------------------------------------------------- 34 | - Inputs: K, R, t, D(k1, k2, p1, p2) 35 | - Bundle adjust. 36 | - Outputs: K', R', t', D(k1, k2, p1, p2)' 37 | 38 | 39 | Calculate camera matrices. 40 | --------------------------------------------------------------------------------------- 41 | - Inputs: K, R, t 42 | 43 | Cw (Camera world matrix) = [R|(-Rt * t)] 44 | Pu (Camera unit projection matrix) = [R|t] 45 | P (Camera projection matrix) = K[R|t] 46 | 47 | 48 | Build 3D markers. 49 | --------------------------------------------------------------------------------------- 50 | Assumes no distortion. 51 | 52 | - Inputs: K, W 53 | 54 | - Build 2D marker rays in world space. 55 | - CSR(camera space ray direction) = Kinv[vec3(undistorted point pixel coords,1)] 56 | - WSR(world space ray direction) = W * vec4(CSR,0) 57 | - WSO(world space ray origin) = W * vec4(0,0,0,1) 58 | 59 | - Find corresponding rays. 60 | - A: Match 2D marker rays in undistorted camera space using pixel distance. 61 | - Project ray into camera space. P projects world to undistorted camspace. 62 | - B: Match 2D marker rays in world space using world unit distance. 63 | 64 | - Find closest point between corresponding rays. --------------------------------------------------------------------------------