├── .gitignore
├── Images
├── IMG_4391.jpeg
├── IMG_4396.jpeg
├── IMG_4397.jpeg
├── ScreenShot01.jpg
├── ScreenShot02.jpg
├── ScreenShot03.jpg
├── ScreenShot04.jpg
├── ScreenShot05.jpg
└── faceFeatures.png
├── Info.plist
├── Installer
└── LAUWebSetup.iss
├── LAUWebCameraCapture.pro
├── README.md
├── Shaders
├── calTagXYZW.frag
├── calTagXYZW.vert
├── displayCalTag.frag
├── displayCalTag.vert
├── displayRGBVideo.frag
├── displayRGBVideo.vert
├── equalizeHistogramRGBAFilter.frag
├── equalizeHistogramRGBAFilter.vert
├── filterAdaptiveThreshold.frag
├── filterAdaptiveThreshold.vert
├── filterBinaryLookUpTable.frag
├── filterBinaryLookUpTable.vert
├── filterBinaryMedian.frag
├── filterBinaryMedian.vert
├── filterDrawFace.frag
├── filterDrawFace.vert
├── filterHarrisCornersA.frag
├── filterHarrisCornersA.vert
├── filterHarrisCornersB.frag
├── filterHarrisCornersB.vert
├── filterHarrisCornersC.frag
├── filterHarrisCornersC.vert
├── filterHarrisCornersD.frag
├── filterHarrisCornersD.vert
├── filterHarrisCornersE.frag
├── filterHarrisCornersE.vert
├── filterMaxRGBA.frag
├── filterMaxRGBA.vert
├── filterMinRGBA.frag
├── filterMinRGBA.vert
├── filterRGBAtoGray.frag
├── filterRGBAtoGray.vert
├── filterRandomMappingA.frag
├── filterRandomMappingA.vert
├── filterRandomMappingB.frag
├── filterRandomMappingB.vert
├── filterSkewBlur.frag
├── filterSkewBlur.vert
├── filterSobelEdge.frag
├── filterSobelEdge.vert
├── filterXGaussian.frag
├── filterXGaussian.vert
├── filterYGaussian.frag
├── filterYGaussian.vert
├── rgbaToHistogramFilter.frag
├── rgbaToHistogramFilter.geom
└── rgbaToHistogramFilter.vert
├── laufacialfeaturedetectorglwidget.cpp
├── laufacialfeaturedetectorglwidget.h
├── lauffmpegobject.cpp
├── lauffmpegobject.h
├── lauharriscornerdetectorglwidget.cpp
├── lauharriscornerdetectorglwidget.h
├── lauhistogramequalizationglwidget.cpp
├── lauhistogramequalizationglwidget.h
├── laumemoryobject.cpp
├── laumemoryobject.h
├── laurandomizepixelsglwidget.cpp
├── laurandomizepixelsglwidget.h
├── lausobeledgedetectorglwidget.cpp
├── lausobeledgedetectorglwidget.h
├── lautiredetectorglfilter.cpp
├── lautiredetectorglfilter.h
├── lauvideoglwidget.cpp
├── lauvideoglwidget.h
├── lauvideosurface.cpp
├── lauvideosurface.h
├── lauwebcameracapture.qrc
├── lauwebcamerawidget.cpp
├── lauwebcamerawidget.h
└── main.cpp
/.gitignore:
--------------------------------------------------------------------------------
1 | LAUWebCameraCapture.pro.user.4.10-pre1
2 | LAUWebCameraCapture.pro.user.4.9-pre1
3 | LAUWebCameraCapture.pro.user
4 | build/
5 | Installer/LAUWebCameraSetup.exe
6 |
--------------------------------------------------------------------------------
/Images/IMG_4391.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/drhalftone/Qt-OpenCV-OpenGL-FFMPEG/e76550e5adf3eba3381922f3077ff505dbce84f6/Images/IMG_4391.jpeg
--------------------------------------------------------------------------------
/Images/IMG_4396.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/drhalftone/Qt-OpenCV-OpenGL-FFMPEG/e76550e5adf3eba3381922f3077ff505dbce84f6/Images/IMG_4396.jpeg
--------------------------------------------------------------------------------
/Images/IMG_4397.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/drhalftone/Qt-OpenCV-OpenGL-FFMPEG/e76550e5adf3eba3381922f3077ff505dbce84f6/Images/IMG_4397.jpeg
--------------------------------------------------------------------------------
/Images/ScreenShot01.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/drhalftone/Qt-OpenCV-OpenGL-FFMPEG/e76550e5adf3eba3381922f3077ff505dbce84f6/Images/ScreenShot01.jpg
--------------------------------------------------------------------------------
/Images/ScreenShot02.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/drhalftone/Qt-OpenCV-OpenGL-FFMPEG/e76550e5adf3eba3381922f3077ff505dbce84f6/Images/ScreenShot02.jpg
--------------------------------------------------------------------------------
/Images/ScreenShot03.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/drhalftone/Qt-OpenCV-OpenGL-FFMPEG/e76550e5adf3eba3381922f3077ff505dbce84f6/Images/ScreenShot03.jpg
--------------------------------------------------------------------------------
/Images/ScreenShot04.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/drhalftone/Qt-OpenCV-OpenGL-FFMPEG/e76550e5adf3eba3381922f3077ff505dbce84f6/Images/ScreenShot04.jpg
--------------------------------------------------------------------------------
/Images/ScreenShot05.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/drhalftone/Qt-OpenCV-OpenGL-FFMPEG/e76550e5adf3eba3381922f3077ff505dbce84f6/Images/ScreenShot05.jpg
--------------------------------------------------------------------------------
/Images/faceFeatures.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/drhalftone/Qt-OpenCV-OpenGL-FFMPEG/e76550e5adf3eba3381922f3077ff505dbce84f6/Images/faceFeatures.png
--------------------------------------------------------------------------------
/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleExecutable
6 | LAUWebCamerarCapture
7 | CFBundleGetInfoString
8 | Created by Qt/QMake
9 | CFBundleIconFile
10 |
11 | CFBundleIdentifier
12 | com.drhalftone.com.LAUWebCamerarCapture
13 | CFBundlePackageType
14 | APPL
15 | CFBundleSignature
16 | ????
17 | LSMinimumSystemVersion
18 | 10.12
19 | NSMicrophoneUsageDescription
20 | This file was generated by Qt/QMake.
21 | NSCameraUsageDescription
22 | This file was generated by Qt/QMake.
23 | NOTE
24 | This file was generated by Qt/QMake.
25 | NSPrincipalClass
26 | NSApplication
27 | NSSupportsAutomaticGraphicsSwitching
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/Installer/LAUWebSetup.iss:
--------------------------------------------------------------------------------
1 | ; Script generated by the Inno Setup Script Wizard.
2 | ; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
3 |
4 | #define MyAppName "LAUWebCameraEffectsWidget"
5 | #define MyAppVersion "0.9"
6 | #define MyAppPublisher "Dr. Daniel L. Lau"
7 | #define MyAppURL "https://www.drhalftone.com"
8 | #define MyAppExeName "LAUWebCameraCapture.exe"
9 |
10 | [Setup]
11 | ; NOTE: The value of AppId uniquely identifies this application. Do not use the same AppId value in installers for other applications.
12 | ; (To generate a new GUID, click Tools | Generate GUID inside the IDE.)
13 | AppId={{C969550E-3DB4-46D2-A642-646441F61D9C}
14 | AppName={#MyAppName}
15 | AppVersion={#MyAppVersion}
16 | ;AppVerName={#MyAppName} {#MyAppVersion}
17 | AppPublisher={#MyAppPublisher}
18 | AppPublisherURL={#MyAppURL}
19 | AppSupportURL={#MyAppURL}
20 | AppUpdatesURL={#MyAppURL}
21 | DefaultDirName={autopf}\{#MyAppName}
22 | DefaultGroupName={#MyAppName}
23 | AllowNoIcons=yes
24 | ; Uncomment the following line to run in non administrative install mode (install for current user only.)
25 | ;PrivilegesRequired=lowest
26 | OutputDir=C:\Users\dllau\Developer\Qt-OpenCV-OpenGL-FFMPEG\Installer
27 | OutputBaseFilename=LAUWebCameraSetup
28 | Compression=lzma
29 | SolidCompression=yes
30 | WizardStyle=modern
31 |
32 | [Languages]
33 | Name: "english"; MessagesFile: "compiler:Default.isl"
34 |
35 | [Tasks]
36 | Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
37 |
38 | [Files]
39 | Source: "C:\Users\dllau\Developer\Qt-OpenCV-OpenGL-FFMPEG\build\Desktop_Qt_5_15_2_MSVC2019_64bit-Release\release\{#MyAppExeName}"; DestDir: "{app}"; Flags: ignoreversion
40 | Source: "C:\Users\dllau\Developer\Qt-OpenCV-OpenGL-FFMPEG\build\Desktop_Qt_5_15_2_MSVC2019_64bit-Release\release\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
41 | ; NOTE: Don't use "Flags: ignoreversion" on any shared system files
42 |
43 | [Icons]
44 | Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"
45 | Name: "{autodesktop}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; Tasks: desktopicon
46 |
47 | [Run]
48 | Filename: "{app}\{#MyAppExeName}"; Description: "{cm:LaunchProgram,{#StringChange(MyAppName, '&', '&&')}}"; Flags: nowait postinstall skipifsilent
49 |
50 |
--------------------------------------------------------------------------------
/LAUWebCameraCapture.pro:
--------------------------------------------------------------------------------
1 | #-------------------------------------------------
2 | #
3 | # Project created by QtCreator 2017-12-24T17:00:22
4 | #
5 | #-------------------------------------------------
6 |
7 | CONFIG -= visage
8 | CONFIG -= ffmpeg
9 |
10 | QT += core gui multimedia widgets multimediawidgets opengl
11 | TARGET = LAUWebCamerasCapture
12 | TEMPLATE = app
13 |
14 | # The following define makes your compiler emit warnings if you use
15 | # any feature of Qt which has been marked as deprecated (the exact warnings
16 | # depend on your compiler). Please consult the documentation of the
17 | # deprecated API in order to know how to port your code away from it.
18 | DEFINES += QT_DEPRECATED_WARNINGS
19 |
20 | # You can also make your code fail to compile if you use deprecated APIs.
21 | # In order to do so, uncomment the following line.
22 | # You can also select to disable deprecated APIs only up to a certain version of Qt.
23 | #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
24 |
25 | QMAKE_CXXFLAGS += -fdeclspec
26 |
27 | SOURCES += \
28 | main.cpp \
29 | laumemoryobject.cpp \
30 | lauvideosurface.cpp \
31 | lauvideoglwidget.cpp \
32 | lauwebcamerawidget.cpp \
33 | lautiredetectorglfilter.cpp \
34 | laurandomizepixelsglwidget.cpp \
35 | lausobeledgedetectorglwidget.cpp \
36 | lauharriscornerdetectorglwidget.cpp \
37 | lauhistogramequalizationglwidget.cpp \
38 | laufacialfeaturedetectorglwidget.cpp
39 |
40 | HEADERS += \
41 | laumemoryobject.h \
42 | lauvideosurface.h \
43 | lauvideoglwidget.h \
44 | lauwebcamerawidget.h \
45 | lautiredetectorglfilter.h \
46 | laurandomizepixelsglwidget.h \
47 | lausobeledgedetectorglwidget.h \
48 | lauharriscornerdetectorglwidget.h \
49 | lauhistogramequalizationglwidget.h \
50 | laufacialfeaturedetectorglwidget.h
51 |
52 | RESOURCES += lauwebcameracapture.qrc
53 |
54 | ffmpeg {
55 | HEADERS += lauffmpegobject.h
56 | SOURCES += lauffmpegobject.cpp
57 | }
58 |
59 | unix:macx {
60 | CONFIG += c++11
61 | INCLUDEPATH += /usr/local/include/opencv4 /usr/local/include/TIFF
62 | DEPENDPATH += /usr/local/include/opencv4 /usr/local/include/TIFF
63 | LIBS += /usr/local/lib/libopencv_core.dylib /usr/local/lib/libopencv_objdetect.dylib
64 | LIBS += /usr/local/lib/libopencv_imgproc.dylib /usr/local/lib/libopencv_calib3d.dylib
65 | LIBS += /usr/local/lib/libopencv_highgui.dylib /usr/local/lib/libopencv_ml.dylib
66 | LIBS += /usr/local/lib/libopencv_face.dylib /usr/local/lib/libtiff.5.dylib
67 |
68 | QMAKE_CXXFLAGS += -msse2 -msse3 -mssse3 -msse4.1
69 |
70 | QMAKE_INFO_PLIST = Info.plist
71 |
72 | ffmpeg {
73 | INCLUDEPATH += /usr/local/include
74 | DEPENDPATH += /usr/local/include
75 | LIBS += /usr/local/lib/libavcodec.dylib /usr/local/lib/libavdevice.dylib
76 | LIBS += /usr/local/lib/libavfilter.dylib /usr/local/lib/libavformat.dylib
77 | LIBS += /usr/local/lib/libavutil.dylib /usr/local/lib/libopus.dylib
78 | LIBS += /usr/local/lib/libswresample.dylib /usr/local/lib/libswscale.dylib
79 | }
80 |
81 | visage {
82 | DEFINES += USEVISAGE
83 | INCLUDEPATH += $$PWD/../visageSDK-macOS/include
84 | DEPENDPATH += $$PWD/../visageSDK-macOS/include
85 | LIBS += -framework CoreFoundation -framework Foundation -framework AppKit -framework Accelerate -L$$PWD/../visageSDK-macOS/lib -lVisageAnalyser -lVisageGaze -lVisageVision
86 | }
87 | }
88 |
89 | unix:!macx {
90 | CONFIG += c++11
91 | INCLUDEPATH += /usr/local/opt/opencv/include
92 | DEPENDPATH += /usr/local/opt/opencv/include
93 | LIBS += -L/usr/local/lib -lopencv_core -lopencv_objdetect -lopencv_imgproc -lopencv_calib3d -lopencv_highgui -lopencv_ml
94 | }
95 |
96 | win32 {
97 | INCLUDEPATH += $$quote(C:/usr/include)
98 | DEPENDPATH += $$quote(C:/usr/include)
99 | LIBS += -L$$quote(C:/usr/lib) -ltiff -lopengl32
100 | CONFIG += c++11
101 |
102 | INCLUDEPATH += $$quote(C:/usr/opencv/include)
103 | DEPENDPATH += $$quote(C:/usr/opencv/include)
104 | LIBS += -L$$quote(C:/usr/opencv/x64/vc17/lib)
105 | CONFIG(release, debug|release): LIBS += -lopencv_core490 -lopencv_objdetect490 -lopencv_imgproc490 -lopencv_calib3d490 -lopencv_highgui490 -lopencv_ml490 -lopencv_face490 -lopencv_videoio490
106 | CONFIG(debug, debug|release): LIBS += -lopencv_core490d -lopencv_objdetect490d -lopencv_imgproc490d -lopencv_calib3d490d -lopencv_highgui490d -lopencv_ml490d -lopencv_face490d -lopencv_videoio490d
107 |
108 | # GET WINDOWS LIBRARIES FROM https://github.com/mcmtroffaes/ffmpeg-msvc-build
109 | ffmpeg {
110 | INCLUDEPATH += $$quote(C:/usr/ffmpeg/include)
111 | DEPENDPATH += $$quote(C:/usr/ffmpeg/include)
112 | LIBS += -lmf -lmfplat -lmfplay -lmfreadwrite -lmfuuid -lStrmiids -lole32 -luser32 -lBcrypt
113 | LIBS += -L$$quote(C:/usr/ffmpeg/debug/lib) -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lopus -lswresample -lswscale -lvpxmdd
114 | }
115 |
116 | visage {
117 | DEFINES += USEVISAGE
118 | INCLUDEPATH += $$quote(C:/usr/visageSDK/include)
119 | DEPENDPATH += $$quote(C:/usr/visageSDK/include)
120 | LIBS += -L$$quote(C:/usr/visageSDK/lib) -llibVisageAnalyser64 -llibVisageGaze64 -llibVisageVision64
121 | }
122 | }
123 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Welcome to the Qt-OpenCV-OpenGL wiki! This is a Qt application that displays live video from your webcam, and applies various video processing routines, namely OpenCV filters. However, unlike OpenCV which is CUDA centric, this project makes use of OpenGL shaders to replace as much of the CPU-based OpenCV filters. Users should see this project as a teaching tool for building their own filters. It should be noted that all of these filters run at 30 fps.
2 |
3 | Here is an example of displaying raw video:
4 |
5 | 
6 |
7 | Here is an example of facial feature tracking. For this to work, you will need to also download the pre-trained face detector model (https://github.com/opencv/opencv/blob/master/data/haarcascades/haarcascade_frontalface_alt2.xml) and the pre-trained facial feature point model (https://github.com/kurnianggoro/GSOC2017/blob/master/data/lbfmodel.yaml). You will be prompted by a file dialog to locate these files on you system.
8 |
9 | 
10 |
11 | Here is an example of Harris feature detection:
12 |
13 | 
14 |
15 | Here is an example of Sobel edge detection:
16 |
17 | 
18 |
19 | Here is an example of randomly swapping pixels around (please note this takes a long time to launch). This purpose of this filter is to create a way to visualize the video frames color histogram without the psycho-visual effects of the scene. So on the left, you have the raw video. On the right, I'm using a psuedo-random permutation/swapping of pixels from the raw video.
20 |
21 | 
22 |
--------------------------------------------------------------------------------
/Shaders/calTagXYZW.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE INCOMING COLOR TEXTURE
4 | uniform vec2 qt_offset; // HOLDS THE ROW AND COLUMN OFFSET SO THAT CENTER OF FIELD OF VIEW IS (0,0)
5 | uniform float[30] qt_transform; // THIS MATRIX CONVERTS FROM ROW/COLUMN TO WORLD XY
6 |
7 | layout(location = 0, index = 0) out vec4 qt_fragColor;
8 |
9 | void main()
10 | {
11 | // GET THE PIXEL COORDINATE OF THE CURRENT FRAGMENT
12 | int col = int(gl_FragCoord.x);
13 | int row = int(gl_FragCoord.y);
14 |
15 | // CONVERT ROW AND COLUMN COORDINATES TO FLOATS
16 | float c = (float(col) - qt_offset.x)/50.0;
17 | float r = (float(row) - qt_offset.y)/50.0;
18 |
19 | // CALCULATE THE WORLD XY-COORDINATES USING THE SUPPLIED PROJECTION MATRIX
20 | qt_fragColor.r = (c*c*c*c)*qt_transform[0] + (c*c*c*r)*qt_transform[1] +
21 | (c*c*r*r)*qt_transform[2] + (c*r*r*r)*qt_transform[3] + (r*r*r*r)*qt_transform[4] +
22 | (c*c*c)*qt_transform[5] + (c*c*r)*qt_transform[6] + (c*r*r)*qt_transform[7] +
23 | (r*r*r)*qt_transform[8] + (c*c)*qt_transform[9] + (r*c)*qt_transform[10] +
24 | (r*r)*qt_transform[11] + (c)*qt_transform[12] + (r)*qt_transform[13] + qt_transform[14] ;
25 | qt_fragColor.g = (c*c*c*c)*qt_transform[15] + (c*c*c*r)*qt_transform[16] +
26 | (c*c*r*r)*qt_transform[17] + (c*r*r*r)*qt_transform[18] + (r*r*r*r)*qt_transform[19] +
27 | (c*c*c)*qt_transform[20] + (c*c*r)*qt_transform[21] + (c*r*r)*qt_transform[22] +
28 | (r*r*r)*qt_transform[23] + (c*c)*qt_transform[24] + (r*c)*qt_transform[25] +
29 | (r*r)*qt_transform[26] + (c)*qt_transform[27] + (r)*qt_transform[28] + qt_transform[29];
30 | qt_fragColor.b = texelFetch(qt_texture, ivec2(col,row), 0).r;
31 | qt_fragColor.a = 1.0;
32 | }
33 |
--------------------------------------------------------------------------------
/Shaders/calTagXYZW.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/displayCalTag.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE XYZ+TEXTURE COORDINATES
4 | in vec2 qt_coordinate; // HOLDS THE TEXTURE COORDINATE FROM THE VERTEX SHADER
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | // CONVERT THE TEXTURE COORDINATE TO PIXEL COORDINATE
11 | ivec2 size = textureSize(qt_texture, 0);
12 | ivec2 coord = ivec2(round(vec2(size) * qt_coordinate));
13 |
14 | // GET THE PIXEL COORDINATE OF THE CURRENT FRAGMENT
15 | qt_fragColor = texelFetch(qt_texture, coord, 0);
16 |
17 | float x = qt_fragColor.r;
18 | float y = qt_fragColor.g;
19 |
20 | if (abs(x - round(x)) < 0.05 || abs(y - round(y)) < 0.05){
21 | qt_fragColor = vec4(1.0, 0.0, 0.0, 1.0);
22 | } else {
23 | qt_fragColor = qt_fragColor.bbba;
24 | }
25 | return;
26 | }
27 |
--------------------------------------------------------------------------------
/Shaders/displayCalTag.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 | out vec2 qt_coordinate; // OUTPUT COORDINATE TO FRAGMENT SHADER
5 |
6 | void main(void)
7 | {
8 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
9 | gl_Position = qt_vertex;
10 | qt_coordinate = (vec2(qt_vertex.x, qt_vertex.y) + 1.0)/2.0;
11 | }
12 |
--------------------------------------------------------------------------------
/Shaders/displayRGBVideo.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE XYZ+TEXTURE COORDINATES
4 | in vec2 qt_coordinate; // HOLDS THE TEXTURE COORDINATE FROM THE VERTEX SHADER
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | // GET THE PIXEL COORDINATE OF THE CURRENT FRAGMENT
11 | qt_fragColor = texture(qt_texture, qt_coordinate, 0);
12 | }
13 |
--------------------------------------------------------------------------------
/Shaders/displayRGBVideo.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform bool qt_flip = false; // TELL THE SHADER TO FLIP THE INCOMING TEXTURE FOR DISPLAY
4 |
5 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
6 | out vec2 qt_coordinate; // OUTPUT COORDINATE TO FRAGMENT SHADER
7 |
8 | void main(void)
9 | {
10 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
11 | gl_Position = qt_vertex;
12 | qt_coordinate = (vec2(qt_vertex.x, qt_vertex.y) + 1.0)/2.0;
13 |
14 | if (qt_flip){
15 | qt_coordinate.y = 1.0 - qt_coordinate.y;
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Shaders/equalizeHistogramRGBAFilter.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE INCOMING GRAYSCALE IMAGE
4 | uniform sampler2D qt_histogram; // THIS TEXTURE HOLDS THE EQUALIZING HISTOGRAMS
5 |
6 | uniform int qt_blockSizeX; // THE SIZE OF THE SUBBLOCKS IN COLUMNS
7 | uniform int qt_blockSizeY; // THE SIZE OF THE SUBBLOCKS IN ROWS
8 | uniform int qt_blocksPerCol; // NUMBER OF SUBBLOCKS PER ROW OF SUBBLOCKS
9 | uniform int qt_blocksPerRow; // NUMBER OF SUBBLOCKS PER ROW OF SUBBLOCKS
10 |
11 | uniform float qt_scale = 1.0f; // AMPLIFIES THE GRAY LEVEL FOR LESS THAN 16 BITS PER PIXEL
12 |
13 | layout(location = 0, index = 0) out vec4 qt_fragColor;
14 |
15 | void main()
16 | {
17 | // CONVERT THE COORDINATE TO UNITS OF BLOCKS
18 | float xCrd = min(max(gl_FragCoord.x / float(qt_blockSizeX) - 0.5, 0.0), float(qt_blocksPerRow-1));
19 | float yCrd = min(max(gl_FragCoord.y / float(qt_blockSizeY) - 0.5, 0.0), float(qt_blocksPerCol-1));
20 |
21 | // FIND THE FOUR CORNERS SURROUNDING THE CURRENT PIXEL
22 | vec2 topLft = vec2(floor(xCrd), floor(yCrd));
23 | vec2 topRgt = vec2(ceil(xCrd), floor(yCrd));
24 | vec2 botLft = vec2(floor(xCrd), ceil(yCrd));
25 | vec2 botRgt = vec2(ceil(xCrd), ceil(yCrd));
26 |
27 | // CALCULATE THE LAMBDA COORDINATE FOR INTERPOLATION
28 | vec2 lambda = vec2(xCrd,yCrd) - topLft;
29 |
30 | // CALCULATE XY-COORDINATES INTO HISTOGRAM TEXTURE
31 | ivec4 xCoord = ivec4(255.0 * qt_scale * texelFetch(qt_texture, ivec2(gl_FragCoord.xy), 0));
32 |
33 | int yCoordTopLft = qt_blocksPerCol * int(topLft.x) + int(topLft.y);
34 | int yCoordTopRgt = qt_blocksPerCol * int(topRgt.x) + int(topRgt.y);
35 | int yCoordBotLft = qt_blocksPerCol * int(botLft.x) + int(botLft.y);
36 | int yCoordBotRgt = qt_blocksPerCol * int(botRgt.x) + int(botRgt.y);
37 |
38 | vec4 pixTopLft;
39 | pixTopLft.r = texelFetch(qt_histogram, ivec2(xCoord.r, yCoordTopLft), 0).r;
40 | pixTopLft.g = texelFetch(qt_histogram, ivec2(xCoord.g, yCoordTopLft), 0).g;
41 | pixTopLft.b = texelFetch(qt_histogram, ivec2(xCoord.b, yCoordTopLft), 0).b;
42 | pixTopLft.a = 1.0f;
43 |
44 | vec4 pixTopRgt;
45 | pixTopRgt.r = texelFetch(qt_histogram, ivec2(xCoord.r, yCoordTopRgt), 0).r;
46 | pixTopRgt.g = texelFetch(qt_histogram, ivec2(xCoord.g, yCoordTopRgt), 0).g;
47 | pixTopRgt.b = texelFetch(qt_histogram, ivec2(xCoord.b, yCoordTopRgt), 0).b;
48 | pixTopRgt.a = 1.0f;
49 |
50 | pixTopLft = (1.0 - lambda.x) * pixTopLft + lambda.x * pixTopRgt;
51 |
52 | vec4 pixBotLft;
53 | pixBotLft.r = texelFetch(qt_histogram, ivec2(xCoord.r, yCoordBotLft), 0).r;
54 | pixBotLft.g = texelFetch(qt_histogram, ivec2(xCoord.g, yCoordBotLft), 0).g;
55 | pixBotLft.b = texelFetch(qt_histogram, ivec2(xCoord.b, yCoordBotLft), 0).b;
56 | pixBotLft.a = 1.0f;
57 |
58 | vec4 pixBotRgt;
59 | pixBotRgt.r = texelFetch(qt_histogram, ivec2(xCoord.r, yCoordBotRgt), 0).r;
60 | pixBotRgt.g = texelFetch(qt_histogram, ivec2(xCoord.g, yCoordBotRgt), 0).g;
61 | pixBotRgt.b = texelFetch(qt_histogram, ivec2(xCoord.b, yCoordBotRgt), 0).b;
62 | pixBotRgt.a = 1.0f;
63 |
64 | pixBotLft = (1.0 - lambda.x) * pixBotLft + lambda.x * pixBotRgt;
65 |
66 | qt_fragColor = (1.0 - lambda.y) * pixTopLft + lambda.y * pixBotLft;
67 | }
68 |
--------------------------------------------------------------------------------
/Shaders/equalizeHistogramRGBAFilter.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterAdaptiveThreshold.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS SAMPLER HOLDS THE IMAGE TEXTURE
4 | uniform sampler2D qt_threshold; // THIS SAMPLER HOLDS THE IMAGE TEXTURE
5 | uniform bool qt_invert; // SAYS IF WE SHOULD LOOK FOR ABOVE OR BELOW THE THRESHOLD
6 | uniform float qt_offset; // OFFSET ADDED TO THE THRESHOLD BEFORE QUANTIZATION
7 |
8 | layout(location = 0, index = 0) out vec4 qt_fragColor;
9 |
10 | void main()
11 | {
12 | vec4 pixel = texelFetch(qt_texture, ivec2(gl_FragCoord.xy), 0);
13 | vec4 thrsh = texelFetch(qt_threshold, ivec2(gl_FragCoord.xy), 0);
14 |
15 | if (qt_invert) {
16 | qt_fragColor = vec4(lessThan(pixel.rgb, thrsh.rgb + qt_offset), 1.0);
17 | } else {
18 | qt_fragColor = vec4(greaterThan(pixel.rgb, thrsh.rgb + qt_offset), 1.0);
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/Shaders/filterAdaptiveThreshold.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterBinaryLookUpTable.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE BINARY TEXTURE
4 | uniform sampler1D qt_lookUpTable; // THIS TEXSTURE HOLDS THE LOOK-UP TABLE
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | // GET THE FRAGMENT PIXEL COORDINATE
11 | ivec2 coord = ivec2(gl_FragCoord.xy);
12 |
13 | // CALCULATE THE INDEX BASED ON THE 3X3 WINDOW
14 | float index = 256.0 * float(texelFetch(qt_texture, coord + ivec2(-1,-1), 0).r > 0.5) +
15 | 128.0 * float(texelFetch(qt_texture, coord + ivec2( 0,-1), 0).r > 0.5) +
16 | 64.0 * float(texelFetch(qt_texture, coord + ivec2( 1,-1), 0).r > 0.5) +
17 | 32.0 * float(texelFetch(qt_texture, coord + ivec2(-1, 0), 0).r > 0.5) +
18 | 16.0 * float(texelFetch(qt_texture, coord + ivec2( 0, 0), 0).r > 0.5) +
19 | 8.0 * float(texelFetch(qt_texture, coord + ivec2( 1, 0), 0).r > 0.5) +
20 | 4.0 * float(texelFetch(qt_texture, coord + ivec2(-1, 1), 0).r > 0.5) +
21 | 2.0 * float(texelFetch(qt_texture, coord + ivec2( 0, 1), 0).r > 0.5) +
22 | 1.0 * float(texelFetch(qt_texture, coord + ivec2( 1, 1), 0).r > 0.5);
23 |
24 | // READ THE OUTPUT PIXEL FROM THE LOOKUP TABLE
25 | qt_fragColor = texelFetch(qt_lookUpTable, int(index), 0).rrrr;
26 |
27 | return;
28 | }
29 |
--------------------------------------------------------------------------------
/Shaders/filterBinaryLookUpTable.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterBinaryMedian.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS SAMPLER HOLDS THE IMAGE TEXTURE
4 | uniform int qt_radius; // KEEPS TRACK OF THE RADIUS OF THE MEDIAN FILTER
5 | uniform float qt_threshold; // HOLDS THE THRESHOLD FOR CHOOSING THE OUTPUT
6 |
7 | layout(location = 0, index = 0) out vec4 qt_fragColor;
8 |
9 | void main()
10 | {
11 | vec4 cumSum = vec4(0.0, 0.0, 0.0, 0.0);
12 | for (int r=-qt_radius; r<=qt_radius; r++){
13 | for (int c=-qt_radius; c<=qt_radius; c++){
14 | cumSum += texelFetch(qt_texture, ivec2(gl_FragCoord.x+c, gl_FragCoord.y+r), 0);
15 | }
16 | }
17 | qt_fragColor = vec4(greaterThan(cumSum.rgb, vec3(qt_threshold, qt_threshold, qt_threshold)), 1.0);
18 | }
19 |
--------------------------------------------------------------------------------
/Shaders/filterBinaryMedian.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterDrawFace.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE XYZ+TEXTURE COORDINATES
4 | in vec2 qt_coordinate; // HOLDS THE TEXTURE COORDINATE FROM THE VERTEX SHADER
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | // GET THE PIXEL COORDINATE OF THE CURRENT FRAGMENT
11 | qt_fragColor = texture(qt_texture, qt_coordinate, 0);
12 | }
13 |
--------------------------------------------------------------------------------
/Shaders/filterDrawFace.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform vec2 qt_size;
4 |
5 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
6 | out vec2 qt_coordinate; // OUTPUT COORDINATE TO FRAGMENT SHADER
7 |
8 | void main(void)
9 | {
10 | // CONVERT THE TEXTURE COORDINATE TO A SCREEN COORDINATE
11 | gl_Position.xy = 2.0 * (qt_vertex.xy/qt_size.xy) - 1.0;
12 | gl_Position.zw = vec2(0.0, 1.0);
13 |
14 | // PASS THE TEXTURE COORDINATE OF TARGET TO THE FRAGMENT SHADER
15 | qt_coordinate = qt_vertex.zw / qt_size;
16 | }
17 |
--------------------------------------------------------------------------------
/Shaders/filterHarrisCornersA.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE RGB TEXTURE COORDINATES
4 |
5 | layout(location = 0, index = 0) out vec4 qt_fragColor;
6 |
7 | void main()
8 | {
9 | // GET THE FRAGMENT PIXEL COORDINATE
10 | ivec2 coord = ivec2(gl_FragCoord.xy);
11 |
12 | // PULL OUT A 3X3 WINDOW AROUND THE CURRENT PIXEL
13 | vec4 pixelB = texelFetch(qt_texture, coord + ivec2( 0,-1), 0);
14 | vec4 pixelH = texelFetch(qt_texture, coord + ivec2( 0, 1), 0);
15 |
16 | vec4 pixelD = texelFetch(qt_texture, coord + ivec2(-1, 0), 0);
17 | vec4 pixelF = texelFetch(qt_texture, coord + ivec2( 1, 0), 0);
18 |
19 | // CALCULATE THE PARTIAL DERIVATIVES IN THE ROW AND COLUMN DIRECTIONS
20 | vec4 dX = pixelF - pixelD;
21 | vec4 dY = pixelH - pixelB;
22 |
23 | float dGx = (0.2126 * dX.r + 0.7152 * dX.g + 0.0722 * dX.b);
24 | float dGy = (0.2126 * dY.r + 0.7152 * dY.g + 0.0722 * dY.b);
25 |
26 | // DERIVE THE HARRIS CORNER 2X2 MATRIX
27 | qt_fragColor.r = dGx * dGx;
28 | qt_fragColor.g = dGy * dGy;
29 | qt_fragColor.b = dGx * dGy;
30 | qt_fragColor.a = qt_fragColor.b/qt_fragColor.b;
31 |
32 | return;
33 | }
34 |
--------------------------------------------------------------------------------
/Shaders/filterHarrisCornersA.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterHarrisCornersB.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE RGB TEXTURE COORDINATES
4 |
5 | layout(location = 0, index = 0) out vec4 qt_fragColor;
6 |
7 | void main()
8 | {
9 | // GET THE FRAGMENT PIXEL COORDINATE
10 | ivec2 coord = ivec2(gl_FragCoord.xy);
11 |
12 | qt_fragColor = 0.0029690 * texelFetch(qt_texture, coord + ivec2(-2, -2), 0);
13 | qt_fragColor += 0.0133062 * texelFetch(qt_texture, coord + ivec2(-1, -2), 0);
14 | qt_fragColor += 0.0219382 * texelFetch(qt_texture, coord + ivec2( 0, -2), 0);
15 | qt_fragColor += 0.0133062 * texelFetch(qt_texture, coord + ivec2( 1, -2), 0);
16 | qt_fragColor += 0.0029690 * texelFetch(qt_texture, coord + ivec2( 2, -2), 0);
17 |
18 | qt_fragColor += 0.0133062 * texelFetch(qt_texture, coord + ivec2(-2, -1), 0);
19 | qt_fragColor += 0.0596343 * texelFetch(qt_texture, coord + ivec2(-1, -1), 0);
20 | qt_fragColor += 0.0983203 * texelFetch(qt_texture, coord + ivec2( 0, -1), 0);
21 | qt_fragColor += 0.0596343 * texelFetch(qt_texture, coord + ivec2( 1, -1), 0);
22 | qt_fragColor += 0.0133062 * texelFetch(qt_texture, coord + ivec2( 2, -1), 0);
23 |
24 | qt_fragColor += 0.0219382 * texelFetch(qt_texture, coord + ivec2(-2, 0), 0);
25 | qt_fragColor += 0.0983203 * texelFetch(qt_texture, coord + ivec2(-1, 0), 0);
26 | qt_fragColor += 0.1621028 * texelFetch(qt_texture, coord + ivec2( 0, 0), 0);
27 | qt_fragColor += 0.0983203 * texelFetch(qt_texture, coord + ivec2( 1, 0), 0);
28 | qt_fragColor += 0.0219382 * texelFetch(qt_texture, coord + ivec2( 2, 0), 0);
29 |
30 | qt_fragColor += 0.0133062 * texelFetch(qt_texture, coord + ivec2(-2, 1), 0);
31 | qt_fragColor += 0.0596343 * texelFetch(qt_texture, coord + ivec2(-1, 1), 0);
32 | qt_fragColor += 0.0983203 * texelFetch(qt_texture, coord + ivec2( 0, 1), 0);
33 | qt_fragColor += 0.0596343 * texelFetch(qt_texture, coord + ivec2( 1, 1), 0);
34 | qt_fragColor += 0.0133062 * texelFetch(qt_texture, coord + ivec2( 2, 1), 0);
35 |
36 | qt_fragColor += 0.0029690 * texelFetch(qt_texture, coord + ivec2(-2, 2), 0);
37 | qt_fragColor += 0.0133062 * texelFetch(qt_texture, coord + ivec2(-1, 2), 0);
38 | qt_fragColor += 0.0219382 * texelFetch(qt_texture, coord + ivec2( 0, 2), 0);
39 | qt_fragColor += 0.0133062 * texelFetch(qt_texture, coord + ivec2( 1, 2), 0);
40 | qt_fragColor += 0.0029690 * texelFetch(qt_texture, coord + ivec2( 2, 2), 0);
41 |
42 | return;
43 | }
44 |
--------------------------------------------------------------------------------
/Shaders/filterHarrisCornersB.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterHarrisCornersC.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE RGB TEXTURE COORDINATES
4 | uniform float qt_scaleFactor; // THIS HOLDS THE K CONSTANT FOR EDGE DETECTION
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | // GET THE CURRENT PIXEL
11 | vec4 pixel = texelFetch(qt_texture, ivec2(gl_FragCoord.xy), 0);
12 |
13 | // DERIVE THE HARRIS CORNER 2X2 MATRIX FOR THE Z AND G DIMENSIONS
14 | qt_fragColor.r = (pixel.x * pixel.y - pixel.z * pixel.z) - qt_scaleFactor * (pixel.x + pixel.y) * (pixel.x + pixel.y);
15 | qt_fragColor.g = qt_fragColor.r;
16 |
17 | // DERIVE ISNAN FLAGS FOR THE Z AND G DIMENSIONS
18 | qt_fragColor.ba = qt_fragColor.rg /qt_fragColor.rg;
19 |
20 | return;
21 | }
22 |
--------------------------------------------------------------------------------
/Shaders/filterHarrisCornersC.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterHarrisCornersD.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE RGB TEXTURE COORDINATES
4 | uniform float qt_threshold; // THIS HOLDS THE K CONSTANT FOR EDGE DETECTION
5 | uniform int qt_radius; // THIS HOLDS THE K CONSTANT FOR EDGE DETECTION
6 |
7 | layout(location = 0, index = 0) out vec4 qt_fragColor;
8 |
9 | void main()
10 | {
11 | // GET THE CURRENT PIXEL COORDINATE
12 | ivec2 coord = ivec2(gl_FragCoord.xy);
13 |
14 | // GET THE CURRENT PIXEL
15 | vec4 pixel = texelFetch(qt_texture, coord, 0);
16 |
17 | // SEE IF THE CURRENT PIXEL IS A LOCAL MAX
18 | qt_fragColor = qt_threshold * vec4(0.001f, 0.001f, 0.001f, 0.001f);
19 | for (int r=-qt_radius; r<=qt_radius; r++){
20 | for (int c=-qt_radius; c<=qt_radius; c++){
21 | qt_fragColor = max(qt_fragColor, texelFetch(qt_texture, coord+ivec2(c,r), 0));
22 | }
23 | }
24 | qt_fragColor = vec4(equal(qt_fragColor, pixel));
25 |
26 | return;
27 | }
28 |
--------------------------------------------------------------------------------
/Shaders/filterHarrisCornersD.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterHarrisCornersE.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_textureA; // THIS TEXTURE HOLDS THE XYZG TEXTURE
4 | uniform sampler2D qt_textureB; // THIS TEXTURE HOLDS THE RED CORNER TEXTURE
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | // GET THE CURRENT PIXEL COORDINATE
11 | ivec2 coord = ivec2(gl_FragCoord.xy);
12 |
13 | // GRAB THE INPUT PIXEL
14 | qt_fragColor = texelFetch(qt_textureA, coord, 0);
15 |
16 | // ADD THE BINARY CORNER PIXEL TO THE SCAN TEXTURE WHERE
17 | // CORNER PIXELS WILL HAVE A TEXTURE GREATER THAN 1.0
18 | vec2 pixel = texelFetch(qt_textureB, coord + ivec2(-1, -1), 0).rg;
19 | pixel = max(pixel, texelFetch(qt_textureB, coord + ivec2( 0, -1), 0).rg);
20 | pixel = max(pixel, texelFetch(qt_textureB, coord + ivec2( 1, -1), 0).rg);
21 | pixel = max(pixel, texelFetch(qt_textureB, coord + ivec2(-1, 0), 0).rg);
22 | pixel = max(pixel, texelFetch(qt_textureB, coord + ivec2( 0, 0), 0).rg);
23 | pixel = max(pixel, texelFetch(qt_textureB, coord + ivec2( 1, 0), 0).rg);
24 | pixel = max(pixel, texelFetch(qt_textureB, coord + ivec2(-1, 1), 0).rg);
25 | pixel = max(pixel, texelFetch(qt_textureB, coord + ivec2( 0, 1), 0).rg);
26 | pixel = max(pixel, texelFetch(qt_textureB, coord + ivec2( 1, 1), 0).rg);
27 |
28 | qt_fragColor.rg += pixel.rg;
29 |
30 | return;
31 | }
32 |
--------------------------------------------------------------------------------
/Shaders/filterHarrisCornersE.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterMaxRGBA.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE RGB TEXTURE COORDINATES
4 | uniform int qt_radius = 1; // THIS HOLDS THE SIZE OF THE WINDOW
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | // GET THE FRAGMENT PIXEL COORDINATE
11 | ivec2 coord = ivec2(gl_FragCoord.xy);
12 |
13 | qt_fragColor = vec4(0.0, 0.0, 0.0, 0.0);
14 | for (int r = -qt_radius; r <= qt_radius; r++){
15 | for (int c = -qt_radius; c <= qt_radius; c++){
16 | qt_fragColor = max(qt_fragColor, texelFetch(qt_texture, ivec2(c,r) + coord, 0));
17 | }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Shaders/filterMaxRGBA.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterMinRGBA.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE RGB TEXTURE COORDINATES
4 | uniform int qt_radius = 1; // THIS HOLDS THE SIZE OF THE WINDOW
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | // GET THE FRAGMENT PIXEL COORDINATE
11 | ivec2 coord = ivec2(gl_FragCoord.xy);
12 |
13 | qt_fragColor = vec4(0.0, 0.0, 0.0, 0.0);
14 | for (int r = -qt_radius; r <= qt_radius; r++){
15 | for (int c = -qt_radius; c <= qt_radius; c++){
16 | qt_fragColor = min(qt_fragColor, texelFetch(qt_texture, ivec2(c,r) + coord, 0));
17 | }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/Shaders/filterMinRGBA.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterRGBAtoGray.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform bool qt_flip = false; // TELL THE SHADER TO FLIP THE INCOMING TEXTURE FOR DISPLAY
4 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE RGB TEXTURE COORDINATES
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | if (qt_flip){
11 | qt_fragColor = texelFetch(qt_texture, ivec2(int(gl_FragCoord.x), textureSize(qt_texture, 0).y - 1 - int(gl_FragCoord.y)), 0);
12 | } else {
13 | qt_fragColor = texelFetch(qt_texture, ivec2(gl_FragCoord.xy), 0);
14 | }
15 | qt_fragColor = qt_fragColor/qt_fragColor.a;
16 |
17 | float luminance = dot(qt_fragColor.rgb, vec3(0.2126, 0.7152, 0.0722));
18 | qt_fragColor = vec4(luminance, luminance, luminance, 1.0);
19 | }
20 |
--------------------------------------------------------------------------------
/Shaders/filterRGBAtoGray.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterRandomMappingA.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE RGB TEXTURE COORDINATES
4 | uniform sampler3D qt_map; // THIS TEXTURE HOLDS THE MAPPING TEXTURE COORDINATES
5 | uniform int qt_index; // THIS INTEGER HOLDS THE LAYERS OF THE MAPPING TEXTURE
6 |
7 | layout(location = 0, index = 0) out vec4 qt_fragColor;
8 |
9 | void main()
10 | {
11 | // PULL OUT A 3X3 WINDOW AROUND THE CURRENT PIXEL
12 | ivec2 coord = ivec2(65535.0 * texelFetch(qt_map, ivec3(ivec2(gl_FragCoord.xy), qt_index), 0).rg);
13 |
14 | // LOAD THE RANDOM PIXEL FROM THE INPUT TEXTURE
15 | qt_fragColor = texelFetch(qt_texture, coord, 0);
16 |
17 | return;
18 | }
19 |
--------------------------------------------------------------------------------
/Shaders/filterRandomMappingA.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterRandomMappingB.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_textureA; // THIS TEXTURE HOLDS THE XYZ+TEXTURE COORDINATES
4 | uniform sampler2D qt_textureB; // THIS TEXTURE HOLDS THE XYZ+TEXTURE COORDINATES
5 | in vec2 qt_coordinate; // HOLDS THE TEXTURE COORDINATE FROM THE VERTEX SHADER
6 |
7 | layout(location = 0, index = 0) out vec4 qt_fragColor;
8 |
9 | void main()
10 | {
11 | // GET THE PIXEL COORDINATE OF THE CURRENT FRAGMENT
12 | if (qt_coordinate.x < 0.5){
13 | qt_fragColor = texture(qt_textureA, vec2(2.0 * qt_coordinate.x - 0.0, qt_coordinate.y), 0).rgba;
14 | } else {
15 | qt_fragColor = texture(qt_textureB, vec2(2.0 * qt_coordinate.x - 1.0, qt_coordinate.y), 0).rgba;
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Shaders/filterRandomMappingB.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 | out vec2 qt_coordinate; // OUTPUT COORDINATE TO FRAGMENT SHADER
5 |
6 | void main(void)
7 | {
8 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
9 | gl_Position = qt_vertex;
10 | qt_coordinate = (vec2(qt_vertex.x, -qt_vertex.y) + 1.0)/2.0;
11 | }
12 |
--------------------------------------------------------------------------------
/Shaders/filterSkewBlur.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE RGB TEXTURE COORDINATES
4 | uniform int qt_radius;
5 | uniform float qt_skew;
6 |
7 | layout(location = 0, index = 0) out vec4 qt_fragColor;
8 |
9 | void main()
10 | {
11 | ivec2 sze = textureSize(qt_texture,0);
12 |
13 | qt_fragColor = vec4(0.0f, 0.0f, 0.0f, 0.0f);
14 | for (int dr = -qt_radius; dr <= qt_radius; dr++){
15 | float row = (float(gl_FragCoord.y) - float(dr)) / float(sze.y);
16 | float col = (float(gl_FragCoord.x) - qt_skew * float(dr)) / float(sze.x);
17 | qt_fragColor = qt_fragColor + texture(qt_texture, vec2(col, 1.0f - row));
18 | }
19 | qt_fragColor = qt_fragColor / qt_fragColor.a;
20 | return;
21 | }
22 |
--------------------------------------------------------------------------------
/Shaders/filterSkewBlur.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterSobelEdge.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE RGB TEXTURE COORDINATES
4 |
5 | layout(location = 0, index = 0) out vec4 qt_fragColor;
6 |
7 | void main()
8 | {
9 | // GET THE FRAGMENT PIXEL COORDINATE
10 | ivec2 coord = ivec2(gl_FragCoord.xy);
11 |
12 | vec4 pixelA = texelFetch(qt_texture, coord + ivec2(-1,-1), 0);
13 | vec4 pixelB = texelFetch(qt_texture, coord + ivec2( 0,-1), 0);
14 | vec4 pixelC = texelFetch(qt_texture, coord + ivec2( 1,-1), 0);
15 | vec4 pixelD = texelFetch(qt_texture, coord + ivec2(-1, 0), 0);
16 |
17 | vec4 pixelF = texelFetch(qt_texture, coord + ivec2( 1, 0), 0);
18 | vec4 pixelG = texelFetch(qt_texture, coord + ivec2(-1, 1), 0);
19 | vec4 pixelH = texelFetch(qt_texture, coord + ivec2( 0, 1), 0);
20 | vec4 pixelI = texelFetch(qt_texture, coord + ivec2( 1, 1), 0);
21 |
22 | // PULL OUT A 3X3 WINDOW AROUND THE CURRENT PIXEL
23 | vec4 sobelX = (pixelC + 3 * pixelF + pixelI) - (pixelA + 3 * pixelD + pixelG);
24 | vec4 sobelY = (pixelG + 3 * pixelH + pixelI) - (pixelA + 3 * pixelB + pixelC);
25 |
26 | qt_fragColor = 2 * abs(sobelX) + 2 * abs(sobelY);
27 |
28 | return;
29 | }
30 |
--------------------------------------------------------------------------------
/Shaders/filterSobelEdge.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterXGaussian.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS SAMPLER HOLDS THE IMAGE TEXTURE
4 | uniform int qt_width; // THIS INTEGER HOLDS THE SIZE OF THE FILTER KERNEL
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | ivec2 coord = ivec2(gl_FragCoord.xy);
11 |
12 | vec4 cumSum = vec4(0.0, 0.0, 0.0, 0.0);
13 | for (int c = -qt_width; c <= qt_width; c++){
14 | //float weight = exp(-float(c*c)/(2.0*qt_width*qt_width/9.0));
15 | //cumSum += weight * texelFetch(qt_texture, ivec2(coord.x+c, coord.y), 0);
16 | cumSum += texelFetch(qt_texture, ivec2(coord.x+c, coord.y), 0);
17 | }
18 | qt_fragColor = cumSum / cumSum.w;
19 | }
20 |
--------------------------------------------------------------------------------
/Shaders/filterXGaussian.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/filterYGaussian.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS SAMPLER HOLDS THE IMAGE TEXTURE
4 | uniform int qt_width; // THIS INTEGER HOLDS THE SIZE OF THE FILTER KERNEL
5 |
6 | layout(location = 0, index = 0) out vec4 qt_fragColor;
7 |
8 | void main()
9 | {
10 | ivec2 coord = ivec2(gl_FragCoord.xy);
11 |
12 | vec4 cumSum = vec4(0.0, 0.0, 0.0, 0.0);
13 | for (int r = -qt_width; r <= qt_width; r++){
14 | //float weight = exp(-float(r*r)/(2.0*qt_width*qt_width/9.0));
15 | //cumSum += weight * texelFetch(qt_texture, ivec2(coord.x, coord.y+r), 0);
16 | cumSum += texelFetch(qt_texture, ivec2(coord.x, coord.y+r), 0);
17 | }
18 | qt_fragColor = cumSum / cumSum.w;
19 | }
20 |
--------------------------------------------------------------------------------
/Shaders/filterYGaussian.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
4 |
5 | void main(void)
6 | {
7 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
8 | gl_Position = qt_vertex;
9 | }
10 |
--------------------------------------------------------------------------------
/Shaders/rgbaToHistogramFilter.frag:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | in vec4 qt_fragment; // OUTPUTS COLOR VALUES
4 |
5 | layout(location = 0, index = 0) out vec4 qt_fragColor;
6 |
7 | void main()
8 | {
9 | // PASS THROUGH THE RGB TEXTURE
10 | qt_fragColor = qt_fragment;
11 | }
12 |
--------------------------------------------------------------------------------
/Shaders/rgbaToHistogramFilter.geom:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform float qt_geometryMappingSlope; // MAPS THE SUBBLOCK COORDINATE TO A ROW COORDINATE IN THE HISTOGRAM BUFFER
4 | uniform float qt_geometryMappingOffst; // MAPS THE SUBBLOCK COORDINATE TO A ROW COORDINATE IN THE HISTOGRAM BUFFER
5 |
6 | layout(points) in;
7 | layout(points, max_vertices = 4) out;
8 |
9 | in vec4 qt_geometryA[]; // STORES THE TEXTURE INFORMATION
10 | in float qt_geometryB[]; // STORES THE SUBBLOCK COORDINATE
11 | out vec4 qt_fragment; // OUTPUTS COLOR VALUES
12 |
13 | void main()
14 | {
15 | gl_Position.x = qt_geometryA[0].r;
16 | gl_Position.y = qt_geometryMappingSlope * qt_geometryB[0] + qt_geometryMappingOffst;
17 | gl_Position.z = 0.00;
18 | gl_Position.a = 1.00;
19 | qt_fragment = vec4(1.0, 0.0, 0.0, 0.0);
20 | EmitVertex();
21 | EndPrimitive();
22 |
23 | gl_Position.x = qt_geometryA[0].g;
24 | qt_fragment = vec4(0.0, 1.0, 0.0, 0.0);
25 | EmitVertex();
26 | EndPrimitive();
27 |
28 | gl_Position.x = qt_geometryA[0].b;
29 | qt_fragment = vec4(0.0, 0.0, 1.0, 0.0);
30 | EmitVertex();
31 | EndPrimitive();
32 |
33 | gl_Position.x = qt_geometryA[0].a;
34 | qt_fragment = vec4(0.0, 0.0, 0.0, 1.0);
35 | EmitVertex();
36 | EndPrimitive();
37 | }
38 |
--------------------------------------------------------------------------------
/Shaders/rgbaToHistogramFilter.vert:
--------------------------------------------------------------------------------
1 | #version 330 core
2 |
3 | uniform sampler2D qt_texture; // THIS TEXTURE HOLDS THE INCOMING GRAYSCALE IMAGE
4 |
5 | uniform int qt_blockSizeX; // THE SIZE OF THE SUBBLOCKS IN COLUMNS
6 | uniform int qt_blockSizeY; // THE SIZE OF THE SUBBLOCKS IN ROWS
7 | uniform int qt_blocksPerCol; // NUMBER OF SUBBLOCKS PER ROW OF SUBBLOCKS
8 | uniform int qt_blocksPerRow; // NUMBER OF SUBBLOCKS PER ROW OF SUBBLOCKS
9 |
10 | uniform float qt_scale = 1.0f; // AMPLIFIES THE GRAY LEVEL FOR LESS THAN 16 BITS PER PIXEL
11 |
12 | in vec2 qt_vertex; // POINTS TO VERTICES PROVIDED BY USER ON CPU
13 |
14 | out vec4 qt_geometryA; // PASSES THE PIXEL TO THE GEOMETRY SHADER
15 | out float qt_geometryB; // PASSES THE PIXEL'S SUBBLOCK COORDINATE
16 |
17 | void main(void)
18 | {
19 | // CONVERT THE INCOMING PIXEL TO AN X-COORDINATE FOR THE HISTOGRAM BUFFER
20 | qt_geometryA = 1.9980 * qt_scale * texelFetch(qt_texture, ivec2(qt_vertex.xy), 0) - 0.9990;
21 |
22 | // CONVERT THE PIXEL COORDINATE INTO A SUBBLOCK COORDINATE (10 X 10)
23 | qt_geometryB = float(qt_blocksPerCol * (qt_vertex.x / qt_blockSizeX) + (qt_vertex.y / qt_blockSizeY));
24 |
25 | // COPY THE VERTEX COORDINATE TO THE GL POSITION
26 | gl_Position = vec4(0.0, 0.0, 0.0, 1.0);
27 | }
28 |
--------------------------------------------------------------------------------
/laufacialfeaturedetectorglwidget.cpp:
--------------------------------------------------------------------------------
1 | #include "laufacialfeaturedetectorglwidget.h"
2 | #include "locale.h"
3 |
4 | using namespace std;
5 | using namespace cv;
6 | using namespace cv::face;
7 |
8 | #ifdef USEVISAGE
9 | using namespace VisageSDK;
10 | #endif
11 |
12 | int groupList[68] = {15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 4, 14, 4, 14, 4, 4, 14, 4, 14, 4, 14, 14, 14, 14, 9, 9, 9, 9, 9, 3, 12, 12, 3, 12, 12, 3, 12, 12, 3, 12, 12, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 2, 2, 2, 2, 2, 2, 2, 2};
13 | int indexListA[68] = {2, 4, 6, 8, 10, 12, 14, 16, 17, 15, 13, 11, 9, 7, 5, 3, 1, 6, 4, 4, 2, 2, 1, 1, 3, 3, 5, 25, 24, 23, 22, 2, 4, 15, 5, 1, 12, 10, 6, 8, 8, 12, 11, 9, 5, 7, 7, 11, 4, 6, 9, 1, 10, 5, 3, 3, 2, 2, 2, 4, 5, 7, 2, 6, 4, 8, 3, 9};
14 | int indexListB[68] = {2, 4, 6, 8, 10, 12, 14, 16, 17, 15, 13, 11, 9, 7, 5, 3, 1, 6, 4, 4, 2, 2, 1, 1, 3, 3, 5, 25, 24, 23, 22, 2, 4, 15, 5, 1, 12, 10, 6, 8, 8, 12, 11, 9, 5, 7, 7, 11, 4, 6, 9, 1, 10, 5, 3, 7, 7, 2, 8, 8, 5, 7, 2, 6, 4, 8, 3, 9};
15 |
16 | /****************************************************************************/
17 | /****************************************************************************/
18 | /****************************************************************************/
19 | LAUFacialFeatureDetectorGLWidget::LAUFacialFeatureDetectorGLWidget(QWidget *parent) : LAUVideoGLWidget(parent), frameBufferObject(nullptr), visageTracker(nullptr)
20 | {
21 | #ifdef USEVISAGE
22 | inputImage = nullptr;
23 |
24 | QSettings settings;
25 | QString directory = settings.value("LAUFacialFeatureDetectorGLWidget::licenseKey", QStandardPaths::writableLocation(QStandardPaths::DocumentsLocation)).toString();
26 | QString string = QFileDialog::getOpenFileName(nullptr, QString("Load license key..."), directory, QString("*.vlc"));
27 | if (string.isEmpty() == false) {
28 | settings.setValue("LAUFacialFeatureDetectorGLWidget::licenseKey", QFileInfo(string).absolutePath());
29 |
30 | string = QFileInfo(string).path();
31 | initializeLicenseManager(string.toLatin1());
32 |
33 | string.append("/data/Facial Features Tracker - Low.cfg");
34 | visageTracker = new VisageTracker(string.toUtf8());
35 |
36 | VisageConfiguration configuration = visageTracker->getTrackerConfiguration();
37 | //configuration.setMaxFaceScale(1.0f);
38 | //configuration.setMinFaceScale(0.1f);
39 | //configuration.setFaceDetectorSensitivity(0.1f);
40 | //configuration.enableVNN();
41 | visageTracker->setTrackerConfiguration(configuration);
42 | }
43 | #else
44 | faceDetector = nullptr;
45 | subDivide = nullptr;
46 |
47 | QSettings settings;
48 | QString directory = settings.value("LAUFacialFeatureDetectorGLWidget::faceDetectorModel", QStandardPaths::writableLocation(QStandardPaths::DocumentsLocation)).toString();
49 | QString string = QFileDialog::getOpenFileName(nullptr, QString("Load classifier..."), directory, QString("*.xml"));
50 | if (string.isEmpty() == false) {
51 | settings.setValue("LAUFacialFeatureDetectorGLWidget::faceDetectorModel", QFileInfo(string).absolutePath());
52 |
53 | faceDetector = new CascadeClassifier();
54 | if (faceDetector->load(string.toStdString())) {
55 | QString directory = settings.value("LAUFacialFeatureDetectorGLWidget::faceMarkModel", QStandardPaths::writableLocation(QStandardPaths::DocumentsLocation)).toString();
56 | QString string = QFileDialog::getOpenFileName(nullptr, QString("Load classifier..."), directory, QString("*.yaml"));
57 | if (string.isEmpty() == false) {
58 | settings.setValue("LAUFacialFeatureDetectorGLWidget::faceMarkModel", QFileInfo(string).absolutePath());
59 |
60 | facemark = FacemarkLBF::create();
61 | facemark->loadModel(string.toStdString());
62 |
63 | // CREATE SUBDIVIDE OBJECT FOR EXTRACTING VORONOI DIAGRAM
64 | subDivide = new Subdiv2D();
65 | }
66 | }
67 | }
68 | #endif
69 | }
70 |
71 | /****************************************************************************/
72 | /****************************************************************************/
73 | /****************************************************************************/
74 | LAUFacialFeatureDetectorGLWidget::~LAUFacialFeatureDetectorGLWidget()
75 | {
76 | if (wasInitialized()) {
77 | makeCurrent();
78 | if (frameBufferObject) {
79 | delete frameBufferObject;
80 | }
81 | #ifdef USEVISAGE
82 | if (inputImage) {
83 | vsReleaseImageHeader(&inputImage);
84 | }
85 | if (visageTracker) {
86 | delete visageTracker;
87 | }
88 | #else
89 | if (faceDetector) {
90 | faceDetector.release();
91 | delete faceDetector;
92 | }
93 | if (facemark) {
94 | facemark.release();
95 | delete facemark;
96 | }
97 | if (subDivide) {
98 | subDivide.release();
99 | delete subDivide;
100 | }
101 | #endif
102 | }
103 | qDebug() << "LAUFacialFeatureDetectorGLWidget::~LAUFacialFeatureDetectorGLWidget()";
104 | }
105 |
106 | /****************************************************************************/
107 | /****************************************************************************/
108 | /****************************************************************************/
109 | void LAUFacialFeatureDetectorGLWidget::process()
110 | {
111 | // SEE IF WE NEED NEW FBOS
112 | if (videoTexture) {
113 | static int frameCounter = 0;
114 |
115 | // INITIALIZE THE FRAME BUFFER OBJECT BASED ON THE INCOMING TEXTURE SIZE
116 | if (frameBufferObject == nullptr) {
117 | // CREATE A FORMAT OBJECT FOR CREATING THE FRAME BUFFER
118 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
119 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
120 |
121 | // CREATE A NEW FRAME BUFFER OBJECT
122 | frameBufferObject = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
123 | frameBufferObject->release();
124 |
125 | #ifdef USEVISAGE
126 | // CREATE A VISAGE IMAGE FOR HOLDING THE GRAYSCALE FRAME ON THE CPU
127 | inputImage = vsCreateImageHeader(vsSize(videoTexture->width(), videoTexture->height()), 8, 1);
128 | #endif
129 | // CREATE A OPENCV MATRIX FOR HOLDING THE GRAYSCALE FRAME ON THE CPU
130 | videoFrame = Mat(videoTexture->height(), videoTexture->width(), CV_8UC3);
131 | grayFrame = Mat(videoTexture->height(), videoTexture->width(), CV_8U);
132 | } else if (frameBufferObject->width() != videoTexture->width() || frameBufferObject->height() != videoTexture->height()) {
133 | // DELETE THE OLD FRAMEBUFFEROBJECT BECAUSE IT IS NO LONGER THE CORRECT SIZE
134 | delete frameBufferObject;
135 |
136 | // CREATE A FORMAT OBJECT FOR CREATING THE FRAME BUFFER
137 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
138 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
139 |
140 | // CREATE A NEW FRAME BUFFER OBJECT
141 | frameBufferObject = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
142 | frameBufferObject->release();
143 |
144 | #ifdef USEVISAGE
145 | // CREATE A VISAGE IMAGE FOR HOLDING THE GRAYSCALE FRAME ON THE CPU
146 | if (inputImage) {
147 | vsReleaseImageHeader(&inputImage);
148 | }
149 | inputImage = vsCreateImageHeader(vsSize(videoTexture->width(), videoTexture->height()), 8, 1);
150 | #endif
151 | // CREATE A OPENCV MATRIX FOR HOLDING THE GRAYSCALE FRAME ON THE CPU
152 | videoFrame = Mat(videoTexture->height(), videoTexture->width(), CV_8UC3);
153 | grayFrame = Mat(videoTexture->height(), videoTexture->width(), CV_8U);
154 | }
155 |
156 | // SET CLEAR COLOR AS NOT A NUMBERS
157 | glClearColor(NAN, NAN, NAN, NAN);
158 |
159 | // CONVERT THE RGB TEXTURE INTO GRAYSCALE
160 | if (frameBufferObject->bind()) {
161 | if (programA.bind()) {
162 | // CLEAR THE FRAME BUFFER OBJECT
163 | glViewport(0, 0, frameBufferObject->width(), frameBufferObject->height());
164 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
165 |
166 | // BIND VBOS FOR DRAWING TRIANGLES ON SCREEN
167 | if (quadVertexBuffer.bind()) {
168 | if (quadIndexBuffer.bind()) {
169 | // BIND THE TEXTURE FROM THE ORIGINAL SCAN
170 | glActiveTexture(GL_TEXTURE0);
171 | videoTexture->bind();
172 | programA.setUniformValue("qt_texture", 0);
173 | programA.setUniformValue("qt_flip", true);
174 |
175 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
176 | glVertexAttribPointer(programA.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), nullptr);
177 | programA.enableAttributeArray("qt_vertex");
178 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
179 |
180 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
181 | quadIndexBuffer.release();
182 | }
183 | quadVertexBuffer.release();
184 | }
185 | programA.release();
186 | }
187 | frameBufferObject->release();
188 |
189 | #ifdef USEVISAGE
190 | // COPY FRAME BUFFER TEXTURE FROM GPU TO LOCAL CPU BUFFER
191 | glBindTexture(GL_TEXTURE_2D, frameBufferObject->texture());
192 | glGetTexImage(GL_TEXTURE_2D, 0, GL_RED, GL_UNSIGNED_BYTE, grayFrame.data);
193 |
194 | videoTexture->bind();
195 | glGetTexImage(GL_TEXTURE_2D, 0, GL_RGB, GL_UNSIGNED_BYTE, videoFrame.data);
196 |
197 | if (visageTracker) {
198 | int *numFaces = visageTracker->track(grayFrame.cols, grayFrame.rows, (const char *)grayFrame.data, faceData, VISAGE_FRAMEGRABBER_FMT_LUMINANCE, VISAGE_FRAMEGRABBER_ORIGIN_TL, 0, -1, 1);
199 | if (frameCounter > 2 && numFaces[0] == TRACK_STAT_OK) {
200 | // CREATE A VECTOR TO HOLD THE LANDMARKS FOR EACH DETECTED FACE
201 | vector landmarks(68);
202 |
203 | for (int n = 0; n < 68; n++) {
204 | FeaturePoint fpA = faceData[0].featurePoints2D->getFP(groupList[n], indexListA[n]);
205 | FeaturePoint fpB = faceData[0].featurePoints2D->getFP(groupList[n], indexListB[n]);
206 | float col = (fpA.pos[0] + fpB.pos[0]) / 2.0f * (float)videoFrame.cols;
207 | float row = (fpA.pos[1] + fpB.pos[1]) / 2.0f * (float)videoFrame.rows;
208 | landmarks.at(n) = Point2f(col, row);
209 | }
210 |
211 | for (int n = 0; n < landmarks.size(); n++) {
212 | int col = landmarks[n].x;
213 | int row = landmarks[n].y;
214 | circle(videoFrame, Point(col, row), 2, Scalar(0, 255, 0), -1);
215 | }
216 |
217 | //for (int group = 0; group < 14; group++){
218 | // // DRAW THE EYE FIDUCIALS ON THE VIDEO FRAME
219 | // int numFiducials = faceData[0].featurePoints2D->groupSize(group+1);
220 | // for (int n = 0; n < numFiducials; n++){
221 | // FeaturePoint fp = faceData[0].featurePoints2D->getFP(group+1,n+1);
222 | // int col = fp.pos[0] * videoFrame.cols;
223 | // int row = fp.pos[1] * videoFrame.rows;
224 | // circle(videoFrame, Point(col, row), 2, Scalar(0, 255, 0), -1);
225 | // }
226 | //}
227 | videoTexture->setData(QOpenGLTexture::RGB, QOpenGLTexture::UInt8, (const void *)videoFrame.data);
228 | }
229 | }
230 | #else
231 | // COPY FRAME BUFFER TEXTURE FROM GPU TO LOCAL CPU BUFFER
232 | glBindTexture(GL_TEXTURE_2D, frameBufferObject->texture());
233 | glGetTexImage(GL_TEXTURE_2D, 0, GL_RED, GL_UNSIGNED_BYTE, grayFrame.data);
234 |
235 | videoTexture->bind();
236 | glGetTexImage(GL_TEXTURE_2D, 0, GL_RGB, GL_UNSIGNED_BYTE, videoFrame.data);
237 |
238 | // CREATE A VECTOR OF RECTANGLES TO HOLD ONE RECTANGLE PER FACE
239 | vector faces;
240 | faceDetector->detectMultiScale(grayFrame, faces);
241 |
242 | // LETS KEEP TRACK OF HOW MANY FACE TRIANGLES WE NEED TO DRAW LATER
243 | int numFaceTriangles = 0;
244 |
245 | // NEW SEE IF FOUND AT LEAST ONE FACE
246 | if (faces.size() > 0) {
247 | // CREATE A VECTOR TO HOLD THE LANDMARKS FOR EACH DETECTED FACE
248 | vector< vector > landmarks;
249 | bool success = facemark->fit(grayFrame, faces, landmarks);
250 | if (success) {
251 | for (int n = 0; n < landmarks.size(); n++) {
252 | // Draws voronoi diagram
253 | vector< vector > facetList;
254 | vector facetCenters;
255 |
256 | // PREPARE THE SUBDIVIDE OBJECT FOR THE CURRENT FRAME SIZE
257 | subDivide->initDelaunay(Rect(-videoTexture->width() / 2, -videoTexture->height() / 2, 2 * videoTexture->width(), 2 * videoTexture->height()));
258 | for (int m = 0; m < landmarks.at(n).size(); m++) {
259 | subDivide->insert(landmarks.at(n).at(m));
260 | }
261 |
262 | // PUSH THE FOUR CORNERS OF THE VIDEO TEXTURE
263 | //subDivide->insert(Point2f(1, 1));
264 | //subDivide->insert(Point2f(1, videoTexture->height() - 1));
265 | //subDivide->insert(Point2f(videoTexture->width() - 1, 1));
266 | //subDivide->insert(Point2f(videoTexture->width() - 1, videoTexture->height() - 1));
267 |
268 | // DRAW THE FACE TRIANGLES ON THE VIDEO FRAME
269 | std::vector< Vec6f > triangleList;
270 | subDivide->getTriangleList(triangleList);
271 | for (n = 0; n < triangleList.size(); n++) {
272 | line(videoFrame, Point2f(triangleList.at(n)[0], triangleList.at(n)[1]), Point2f(triangleList.at(n)[2], triangleList.at(n)[3]), Scalar(0, 0, 255), 1, 8, 0);
273 | line(videoFrame, Point2f(triangleList.at(n)[2], triangleList.at(n)[3]), Point2f(triangleList.at(n)[4], triangleList.at(n)[5]), Scalar(0, 0, 255), 1, 8, 0);
274 | line(videoFrame, Point2f(triangleList.at(n)[4], triangleList.at(n)[5]), Point2f(triangleList.at(n)[0], triangleList.at(n)[1]), Scalar(0, 0, 255), 1, 8, 0);
275 | }
276 | videoTexture->setData(QOpenGLTexture::RGB, QOpenGLTexture::UInt8, (const void *)videoFrame.data);
277 | numFaceTriangles = triangleList.size();
278 |
279 | // COPY FACE TRIANGLE VERTICES TO THE GPU FOR DRAWING
280 | if (numFaceTriangles > 0 && faceVertexBuffer.bind()) {
281 | float *buffer = (float *)faceVertexBuffer.mapRange(0, 12 * numFaceTriangles * sizeof(float), QOpenGLBuffer::RangeWrite);
282 | if (buffer) {
283 | for (n = 0; n < triangleList.size(); n++) {
284 | // INSERT FIRST OF THREE VERTICES (INPUT AND OUTPUT POINTS)
285 | buffer[12 * n + 0] = triangleList.at(n)[0];
286 | buffer[12 * n + 1] = triangleList.at(n)[1];
287 | buffer[12 * n + 2] = triangleList.at(n)[0];
288 | buffer[12 * n + 3] = triangleList.at(n)[1];
289 |
290 | // INSERT SECOND OF THREE VERTICES (INPUT AND OUTPUT POINTS)
291 | buffer[12 * n + 4] = triangleList.at(n)[2];
292 | buffer[12 * n + 5] = triangleList.at(n)[3];
293 | buffer[12 * n + 6] = triangleList.at(n)[2];
294 | buffer[12 * n + 7] = triangleList.at(n)[3];
295 |
296 | // INSERT THIRD OF THREE VERTICES (INPUT AND OUTPUT POINTS)
297 | buffer[12 * n + 8] = triangleList.at(n)[4];
298 | buffer[12 * n + 9] = triangleList.at(n)[5];
299 | buffer[12 * n + 10] = triangleList.at(n)[4];
300 | buffer[12 * n + 11] = triangleList.at(n)[5];
301 | }
302 | faceVertexBuffer.unmap();
303 | } else {
304 | qDebug() << QString("faceVertexBuffer not mapped to CPU.") << glGetError();
305 | }
306 | faceVertexBuffer.release();
307 | }
308 | }
309 | }
310 | }
311 |
312 | // CHECK TO SEE IF WE HAVE ANY FACES TO DRAW
313 | if (numFaceTriangles > 0) {
314 | // CONVERT THE RGB TEXTURE INTO GRAYSCALE
315 | if (frameBufferObject->bind()) {
316 | if (programB.bind()) {
317 | // SET THE VIEWPOINT BUT DON'T CLEAR THE PREVIOUS CONTENTS OF THE BUFFER
318 | glViewport(0, 0, frameBufferObject->width(), frameBufferObject->height());
319 |
320 | // BIND VBOS FOR DRAWING TRIANGLES ON SCREEN
321 | if (faceVertexBuffer.bind()) {
322 | if (faceIndexBuffer.bind()) {
323 | qDebug() << "Drawing face triangles" << numFaceTriangles;
324 |
325 | // BIND THE INCOMING RGB VIDEO FRAME AS A TEXTURE
326 | glActiveTexture(GL_TEXTURE0);
327 | videoTexture->bind();
328 | programB.setUniformValue("qt_texture", 0);
329 |
330 | // TELL THE GLPROGRAM WHAT THE DIMENSIONS OF THE VIDEO FRAME
331 | programB.setUniformValue("qt_size", QPointF(videoTexture->width(), videoTexture->height()));
332 |
333 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
334 | glVertexAttribPointer(programB.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), nullptr);
335 | programB.enableAttributeArray("qt_vertex");
336 | glDrawElements(GL_TRIANGLES, 3 * numFaceTriangles, GL_UNSIGNED_INT, nullptr);
337 |
338 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
339 | faceIndexBuffer.release();
340 | }
341 | faceVertexBuffer.release();
342 | }
343 | programB.release();
344 | }
345 | frameBufferObject->release();
346 | }
347 | }
348 | #endif
349 | }
350 | frameCounter++;
351 | }
352 | }
353 |
354 | /****************************************************************************/
355 | /****************************************************************************/
356 | /****************************************************************************/
357 | void LAUFacialFeatureDetectorGLWidget::initialize()
358 | {
359 | // INITIALIZE THE UNDERLYING CLASS
360 | LAUVideoGLWidget::initialize();
361 |
362 | // CREATE VERTEX BUFFER TO HOLD FACIAL FEATURE TRIANGLES
363 | faceVertexBuffer = QOpenGLBuffer(QOpenGLBuffer::VertexBuffer);
364 | faceVertexBuffer.create();
365 | faceVertexBuffer.setUsagePattern(QOpenGLBuffer::DynamicDraw);
366 | if (faceVertexBuffer.bind()) {
367 | // ALLOCATE THE VERTEX BUFFER FOR HOLDING ENOUGH VEC4 FOR TWO FACES (SOURCE AND DESTINATION POINTS)
368 | faceVertexBuffer.allocate(200 * 12 * sizeof(float));
369 | faceVertexBuffer.release();
370 | }
371 |
372 | // CREATE INDEX BUFFER TO CONNECT VERTICES FOR FACIAL FEATURE TRIANGLES
373 | faceIndexBuffer = QOpenGLBuffer(QOpenGLBuffer::IndexBuffer);
374 | faceIndexBuffer.create();
375 | faceIndexBuffer.setUsagePattern(QOpenGLBuffer::StaticDraw);
376 | if (faceIndexBuffer.bind()) {
377 | faceIndexBuffer.allocate(200 * 3 * sizeof(unsigned int));
378 | unsigned int *indices = (unsigned int *)faceIndexBuffer.map(QOpenGLBuffer::WriteOnly);
379 | if (indices) {
380 | for (int n = 0; n < 200; n++) {
381 | indices[3 * n + 0] = 3 * n + 0;
382 | indices[3 * n + 1] = 3 * n + 1;
383 | indices[3 * n + 2] = 3 * n + 2;
384 | }
385 | faceIndexBuffer.unmap();
386 | } else {
387 | qDebug() << QString("indiceBufferA buffer mapped from GPU.");
388 | }
389 | faceIndexBuffer.release();
390 | }
391 |
392 | // NOW ADD OUR LIST OF HARRIS CORNER SHADER PROGRAMS
393 | setlocale(LC_NUMERIC, "C");
394 | programA.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterRGBAtoGray.vert");
395 | programA.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterRGBAtoGray.frag");
396 | programA.link();
397 |
398 | programB.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterDrawFace.vert");
399 | programB.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterDrawFace.frag");
400 | programB.link();
401 | setlocale(LC_ALL, "");
402 | }
403 |
404 | /****************************************************************************/
405 | /****************************************************************************/
406 | /****************************************************************************/
407 | void LAUFacialFeatureDetectorGLWidget::paint()
408 | {
409 | if (frameBufferObject == nullptr) {
410 | LAUVideoGLWidget::paint();
411 | } else {
412 | // SET THE VIEW PORT AND CLEAR THE SCREEN BUFFER
413 | glViewport(0, 0, localWidth, localHeight);
414 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
415 |
416 | // DISPLAY THE LAST FBO IN OUR LIST
417 | if (videoTexture) {
418 | if (program.bind()) {
419 | if (quadVertexBuffer.bind()) {
420 | if (quadIndexBuffer.bind()) {
421 | // SET THE ACTIVE TEXTURE ON THE GPU
422 | glActiveTexture(GL_TEXTURE0);
423 | //glBindTexture(GL_TEXTURE_2D, frameBufferObject->texture());
424 | videoTexture->bind();
425 | program.setUniformValue("qt_texture", 0);
426 | #ifdef Q_OS_WIN
427 | program.setUniformValue("qt_flip", false);
428 | #else
429 | program.setUniformValue("qt_flip", true);
430 | #endif
431 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
432 | program.setAttributeBuffer("qt_vertex", GL_FLOAT, 0, 4, 4 * sizeof(float));
433 | program.enableAttributeArray("qt_vertex");
434 |
435 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, nullptr);
436 |
437 | quadIndexBuffer.release();
438 | }
439 | quadVertexBuffer.release();
440 | }
441 | program.release();
442 | }
443 | }
444 | }
445 | }
446 |
--------------------------------------------------------------------------------
/laufacialfeaturedetectorglwidget.h:
--------------------------------------------------------------------------------
1 | #ifndef LAUFACIALFEATUREDETECTORGLWIDGET_H
2 | #define LAUFACIALFEATUREDETECTORGLWIDGET_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 |
9 | #ifdef USEVISAGE
10 | #include "visageVision.h"
11 | #include "VisageTracker.h"
12 | #endif
13 |
14 | #include "opencv2/face.hpp"
15 | #include "opencv2/core/core.hpp"
16 | #include "opencv2/imgproc/imgproc.hpp"
17 | #include "opencv2/highgui/highgui.hpp"
18 |
19 | #include "lauvideoglwidget.h"
20 |
21 | /****************************************************************************/
22 | /****************************************************************************/
23 | /****************************************************************************/
24 | class LAUFacialFeatureDetectorGLWidget : public LAUVideoGLWidget
25 | {
26 |
27 | public:
28 | explicit LAUFacialFeatureDetectorGLWidget(QWidget *parent = nullptr);
29 | ~LAUFacialFeatureDetectorGLWidget();
30 |
31 | QImage grabImage()
32 | {
33 | if (videoTexture) {
34 | makeCurrent();
35 | QImage image(videoTexture->width(), videoTexture->height(), QImage::Format_ARGB32);
36 | videoTexture->bind();
37 | glGetTexImage(GL_TEXTURE_2D, 0, GL_BGRA, GL_UNSIGNED_BYTE, (void *)image.constBits());
38 | return (image);
39 | }
40 | return (QImage());
41 | }
42 |
43 | void initialize();
44 | void process();
45 | void paint();
46 |
47 | private:
48 | QOpenGLBuffer faceVertexBuffer, faceIndexBuffer;
49 | QOpenGLFramebufferObject *frameBufferObject;
50 | QOpenGLShaderProgram programA, programB;
51 |
52 | #ifdef USEVISAGE
53 | VsImage *inputImage;
54 | VisageSDK::VisageTracker *visageTracker;
55 | VisageSDK::FaceData faceData[16];
56 | #else
57 | QObject *visageTracker;
58 | cv::Ptr subDivide;
59 | cv::Ptr faceDetector;
60 | cv::Ptr facemark;
61 | #endif
62 | cv::Mat videoFrame, grayFrame;
63 | };
64 |
65 | #endif // LAUFACIALFEATUREDETECTORGLWIDGET_H
66 |
--------------------------------------------------------------------------------
/lauffmpegobject.cpp:
--------------------------------------------------------------------------------
1 | #include "lauffmpegobject.h"
2 |
3 | #include
4 |
5 | LAUFFMpegObject::LAUFFMpegObject(QObject *parent) : QObject(parent)
6 | {
7 | qDebug() << "AVCoded Version:" << avcodec_version();
8 | }
9 |
--------------------------------------------------------------------------------
/lauffmpegobject.h:
--------------------------------------------------------------------------------
1 | #ifndef LAUFFMPEGOBJECT_H
2 | #define LAUFFMPEGOBJECT_H
3 |
4 | #include
5 |
6 | extern "C"
7 | {
8 | #include "libavutil/opt.h"
9 | #include "libavutil/imgutils.h"
10 | #include "libavcodec/avcodec.h"
11 | #include "libavutil/mathematics.h"
12 | }
13 |
14 | class LAUFFMpegObject : public QObject
15 | {
16 | Q_OBJECT
17 |
18 | public:
19 | explicit LAUFFMpegObject(QObject *parent = nullptr);
20 |
21 | private:
22 |
23 |
24 | signals:
25 |
26 | };
27 |
28 | #endif // LAUFFMPEGOBJECT_H
29 |
--------------------------------------------------------------------------------
/lauharriscornerdetectorglwidget.cpp:
--------------------------------------------------------------------------------
1 | #include "lauharriscornerdetectorglwidget.h"
2 | #include "locale.h"
3 |
4 | /****************************************************************************/
5 | /****************************************************************************/
6 | /****************************************************************************/
7 | LAUHarrisCornerDetectorGLWidget::~LAUHarrisCornerDetectorGLWidget()
8 | {
9 | if (wasInitialized()) {
10 | makeCurrent();
11 | for (int n = 0; n < 5; n++) {
12 | if (frameBufferObjects[n]) {
13 | delete frameBufferObjects[n];
14 | }
15 | }
16 | }
17 | qDebug() << "LAUHarrisCornerDetectorGLWidget::~LAUHarrisCornerDetectorGLWidget()";
18 | }
19 |
20 | /****************************************************************************/
21 | /****************************************************************************/
22 | /****************************************************************************/
23 | void LAUHarrisCornerDetectorGLWidget::process()
24 | {
25 | // SEE IF WE NEED NEW FBOS
26 | if (videoTexture) {
27 | for (int n = 0; n < 5; n++) {
28 | if (frameBufferObjects[n] == NULL) {
29 | // CREATE A FORMAT OBJECT FOR CREATING THE FRAME BUFFER
30 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
31 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
32 |
33 | frameBufferObjects[n] = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
34 | frameBufferObjects[n]->release();
35 | } else if (frameBufferObjects[n]->width() != videoTexture->width() || frameBufferObjects[n]->height() != videoTexture->height()) {
36 | delete frameBufferObjects[n];
37 |
38 | // CREATE A FORMAT OBJECT FOR CREATING THE FRAME BUFFER
39 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
40 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
41 |
42 | frameBufferObjects[n] = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
43 | frameBufferObjects[n]->release();
44 | }
45 | }
46 |
47 | // SET CLEAR COLOR AS NOT A NUMBERS
48 | glClearColor(NAN, NAN, NAN, NAN);
49 |
50 | // CALCULATE THE GRADIENT BUFFER
51 | if (frameBufferObjects[0]->bind()) {
52 | if (programA.bind()) {
53 | // CLEAR THE FRAME BUFFER OBJECT
54 | glViewport(2, 2, frameBufferObjects[0]->width() - 4, frameBufferObjects[0]->height() - 4);
55 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
56 |
57 | // BIND VBOS FOR DRAWING TRIANGLES ON SCREEN
58 | if (quadVertexBuffer.bind()) {
59 | if (quadIndexBuffer.bind()) {
60 | // BIND THE TEXTURE FROM THE ORIGINAL SCAN
61 | glActiveTexture(GL_TEXTURE0);
62 | videoTexture->bind();
63 | programA.setUniformValue("qt_texture", 0);
64 |
65 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
66 | glVertexAttribPointer(programA.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), 0);
67 | programA.enableAttributeArray("qt_vertex");
68 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
69 |
70 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
71 | quadIndexBuffer.release();
72 | }
73 | quadVertexBuffer.release();
74 | }
75 | programA.release();
76 | }
77 | frameBufferObjects[0]->release();
78 | }
79 |
80 | // SMOOTH THE GRADIENT BUFFER WITH A GAUSSIAN FILTER
81 | if (frameBufferObjects[1]->bind()) {
82 | if (programB.bind()) {
83 | // CLEAR THE FRAME BUFFER OBJECT
84 | glViewport(4, 4, frameBufferObjects[1]->width() - 8, frameBufferObjects[1]->height() - 8);
85 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
86 |
87 | // BIND VBOS FOR DRAWING TRIANGLES ON SCREEN
88 | if (quadVertexBuffer.bind()) {
89 | if (quadIndexBuffer.bind()) {
90 | // BIND THE TEXTURE FROM THE FRAME BUFFER OBJECT A
91 | glActiveTexture(GL_TEXTURE1);
92 | glBindTexture(GL_TEXTURE_2D, frameBufferObjects[0]->texture());
93 | programB.setUniformValue("qt_texture", 1);
94 |
95 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
96 | glVertexAttribPointer(programB.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), 0);
97 | programB.enableAttributeArray("qt_vertex");
98 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
99 |
100 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
101 | quadIndexBuffer.release();
102 | }
103 | quadVertexBuffer.release();
104 | }
105 | programB.release();
106 | }
107 | frameBufferObjects[1]->release();
108 | }
109 |
110 | // FIND LOCAL MAXIMUMS
111 | if (frameBufferObjects[2]->bind()) {
112 | if (programC.bind()) {
113 | // CLEAR THE FRAME BUFFER OBJECT
114 | glViewport(5, 5, frameBufferObjects[2]->width() - 10, frameBufferObjects[2]->height() - 10);
115 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
116 |
117 | // BIND VBOS FOR DRAWING TRIANGLES ON SCREEN
118 | if (quadVertexBuffer.bind()) {
119 | if (quadIndexBuffer.bind()) {
120 | // BIND THE TEXTURE FROM THE FRAME BUFFER OBJECT B
121 | glActiveTexture(GL_TEXTURE2);
122 | glBindTexture(GL_TEXTURE_2D, frameBufferObjects[1]->texture());
123 | programC.setUniformValue("qt_texture", 2);
124 |
125 | // SET THE HARRIS CORNER MATRIX SCALE FACTOR K
126 | programC.setUniformValue("qt_scaleFactor", qtScaleFactor);
127 |
128 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
129 | glVertexAttribPointer(programC.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), 0);
130 | programC.enableAttributeArray("qt_vertex");
131 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
132 |
133 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
134 | quadIndexBuffer.release();
135 | }
136 | quadVertexBuffer.release();
137 | }
138 | programC.release();
139 | }
140 | frameBufferObjects[2]->release();
141 | }
142 |
143 | // DISPLAY CORNERS ON THE INPUT SCAN
144 | if (frameBufferObjects[3]->bind()) {
145 | if (programD.bind()) {
146 | // CLEAR THE FRAME BUFFER OBJECT
147 | glViewport(0, 0, frameBufferObjects[3]->width(), frameBufferObjects[3]->height());
148 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
149 |
150 | // BIND VBOS FOR DRAWING TRIANGLES ON SCREEN
151 | if (quadVertexBuffer.bind()) {
152 | if (quadIndexBuffer.bind()) {
153 | // BIND THE TEXTURE FROM THE FRAME BUFFER OBJECT C
154 | glActiveTexture(GL_TEXTURE3);
155 | glBindTexture(GL_TEXTURE_2D, frameBufferObjects[2]->texture());
156 | programD.setUniformValue("qt_texture", 3);
157 |
158 | // SET THE MINIMUM CORNER STRENGTH THRESHOLD
159 | programD.setUniformValue("qt_threshold", qtCornerThreshold);
160 | programD.setUniformValue("qt_radius", 10);
161 |
162 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
163 | glVertexAttribPointer(programD.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), 0);
164 | programD.enableAttributeArray("qt_vertex");
165 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
166 |
167 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
168 | quadIndexBuffer.release();
169 | }
170 | quadVertexBuffer.release();
171 | }
172 | programD.release();
173 | }
174 | frameBufferObjects[3]->release();
175 | }
176 |
177 | // DISPLAY CORNERS ON THE INPUT SCAN
178 | if (frameBufferObjects[4]->bind()) {
179 | if (programE.bind()) {
180 | // CLEAR THE FRAME BUFFER OBJECT
181 | glViewport(0, 0, frameBufferObjects[4]->width(), frameBufferObjects[4]->height());
182 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
183 |
184 | // BIND VBOS FOR DRAWING TRIANGLES ON SCREEN
185 | if (quadVertexBuffer.bind()) {
186 | if (quadIndexBuffer.bind()) {
187 | // BIND THE ORIGINAL SCAN AS WE MERGE THE TWO
188 | glActiveTexture(GL_TEXTURE0);
189 | videoTexture->bind();
190 | programE.setUniformValue("qt_textureA", 0);
191 |
192 | // BIND THE TEXTURE FROM THE FRAME BUFFER OBJECT C
193 | glActiveTexture(GL_TEXTURE4);
194 | glBindTexture(GL_TEXTURE_2D, frameBufferObjects[3]->texture());
195 | programE.setUniformValue("qt_textureB", 4);
196 |
197 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
198 | glVertexAttribPointer(programE.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), 0);
199 | programE.enableAttributeArray("qt_vertex");
200 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
201 |
202 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
203 | quadIndexBuffer.release();
204 | }
205 | quadVertexBuffer.release();
206 | }
207 | programE.release();
208 | }
209 | frameBufferObjects[4]->release();
210 | }
211 | }
212 | }
213 |
214 | /****************************************************************************/
215 | /****************************************************************************/
216 | /****************************************************************************/
217 | void LAUHarrisCornerDetectorGLWidget::initialize()
218 | {
219 | LAUVideoGLWidget::initialize();
220 |
221 | // NOW ADD OUR LIST OF HARRIS CORNER SHADER PROGRAMS
222 | setlocale(LC_NUMERIC, "C");
223 | programA.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterHarrisCornersA.vert");
224 | programA.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterHarrisCornersA.frag");
225 | programA.link();
226 |
227 | programB.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterHarrisCornersB.vert");
228 | programB.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterHarrisCornersB.frag");
229 | programB.link();
230 |
231 | programC.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterHarrisCornersC.vert");
232 | programC.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterHarrisCornersC.frag");
233 | programC.link();
234 |
235 | programD.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterHarrisCornersD.vert");
236 | programD.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterHarrisCornersD.frag");
237 | programD.link();
238 |
239 | programE.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterHarrisCornersE.vert");
240 | programE.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterHarrisCornersE.frag");
241 | programE.link();
242 | setlocale(LC_ALL, "");
243 | }
244 |
245 | /****************************************************************************/
246 | /****************************************************************************/
247 | /****************************************************************************/
248 | void LAUHarrisCornerDetectorGLWidget::paint()
249 | {
250 | if (frameBufferObjects[4] == NULL) {
251 | LAUVideoGLWidget::paint();
252 | } else {
253 | // SET THE VIEW PORT AND CLEAR THE SCREEN BUFFER
254 | glViewport(0, 0, localWidth, localHeight);
255 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
256 |
257 | // DISPLAY THE LAST FBO IN OUR LIST
258 | if (videoTexture) {
259 | if (program.bind()) {
260 | if (quadVertexBuffer.bind()) {
261 | if (quadIndexBuffer.bind()) {
262 | // SET THE ACTIVE TEXTURE ON THE GPU
263 | glActiveTexture(GL_TEXTURE0);
264 | glBindTexture(GL_TEXTURE_2D, frameBufferObjects[4]->texture());
265 | program.setUniformValue("qt_texture", 0);
266 | #ifdef Q_OS_WIN
267 | program.setUniformValue("qt_flip", false);
268 | #else
269 | program.setUniformValue("qt_flip", true);
270 | #endif
271 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
272 | program.setAttributeBuffer("qt_vertex", GL_FLOAT, 0, 4, 4 * sizeof(float));
273 | program.enableAttributeArray("qt_vertex");
274 |
275 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
276 |
277 | quadIndexBuffer.release();
278 | }
279 | quadVertexBuffer.release();
280 | }
281 | program.release();
282 | }
283 | }
284 | }
285 | }
286 |
--------------------------------------------------------------------------------
/lauharriscornerdetectorglwidget.h:
--------------------------------------------------------------------------------
1 | #ifndef LAUHARRISCORNERDETECTORGLWIDGET_H
2 | #define LAUHARRISCORNERDETECTORGLWIDGET_H
3 |
4 | #include
5 | #include
6 |
7 | #include "lauvideoglwidget.h"
8 |
9 | /****************************************************************************/
10 | /****************************************************************************/
11 | /****************************************************************************/
12 | class LAUHarrisCornerDetectorGLWidget : public LAUVideoGLWidget
13 | {
14 | Q_OBJECT
15 |
16 | public:
17 | explicit LAUHarrisCornerDetectorGLWidget(QWidget *parent = NULL) : LAUVideoGLWidget(parent)
18 | {
19 | qtScaleFactor = 0.05f;
20 | qtCornerThreshold = 0.01f;
21 | for (int n = 0; n < 5; n++) {
22 | frameBufferObjects[n] = NULL;
23 | }
24 | }
25 | ~LAUHarrisCornerDetectorGLWidget();
26 |
27 | QImage grabImage()
28 | {
29 | if (frameBufferObjects[4]) {
30 | return (frameBufferObjects[4]->toImage());
31 | }
32 | return (QImage());
33 | }
34 |
35 | void setScaleFactor(float val)
36 | {
37 | qtScaleFactor = val;
38 | }
39 |
40 | void setCornerThreshold(float val)
41 | {
42 | qtCornerThreshold = val;
43 | }
44 |
45 | void initialize();
46 | void process();
47 | void paint();
48 |
49 | private:
50 | float qtScaleFactor, qtCornerThreshold;
51 | QOpenGLFramebufferObject *frameBufferObjects[5];
52 | QOpenGLShaderProgram programA, programB, programC, programD, programE;
53 | };
54 |
55 | #endif // LAUHARRISCORNERDETECTORGLWIDGET_H
56 |
--------------------------------------------------------------------------------
/lauhistogramequalizationglwidget.cpp:
--------------------------------------------------------------------------------
1 | #include "lauhistogramequalizationglwidget.h"
2 | #include "laumemoryobject.h"
3 | #include "locale.h"
4 |
5 | /****************************************************************************/
6 | /****************************************************************************/
7 | /****************************************************************************/
8 | LAUHistogramEqualizationGLWidget::~LAUHistogramEqualizationGLWidget()
9 | {
10 | if (wasInitialized()) {
11 | makeCurrent();
12 | if (frameBufferObjectA) {
13 | delete frameBufferObjectA;
14 | }
15 | if (frameBufferObjectB) {
16 | delete frameBufferObjectB;
17 | }
18 | if (histogramTexture) {
19 | delete histogramTexture;
20 | }
21 | }
22 | qDebug() << "LAUHistogramEqualizationGLWidget::~LAUHistogramEqualizationGLWidget()";
23 | }
24 |
25 | /****************************************************************************/
26 | /****************************************************************************/
27 | /****************************************************************************/
28 | void LAUHistogramEqualizationGLWidget::initialize()
29 | {
30 | LAUVideoGLWidget::initialize();
31 |
32 | // CREATE GLSL PROGRAM FOR PROCESSING THE INCOMING VIDEO
33 | setlocale(LC_NUMERIC, "C");
34 |
35 | // CREATE A SHADER TO MAP INCOMING VIDEO TO AN RGBA HISTOGRAM
36 | programA.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/rgbaToHistogramFilter.vert");
37 | programA.addShaderFromSourceFile(QOpenGLShader::Geometry, ":/shaders/Shaders/rgbaToHistogramFilter.geom");
38 | programA.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/rgbaToHistogramFilter.frag");
39 | programA.link();
40 |
41 | // CREATE A SHADER FOR RESCALING PIXELS BASED ON HISTOGRAM
42 | programB.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/equalizeHistogramRGBAFilter.vert");
43 | programB.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/equalizeHistogramRGBAFilter.frag");
44 | programB.link();
45 |
46 | setlocale(LC_ALL, "");
47 | }
48 |
49 | /****************************************************************************/
50 | /****************************************************************************/
51 | /****************************************************************************/
52 | void LAUHistogramEqualizationGLWidget::process()
53 | {
54 | // SET THE BLENDING FLAGS
55 | glEnable(GL_BLEND);
56 | glBlendFunc(GL_ONE, GL_ONE);
57 | glBlendEquation(GL_FUNC_ADD);
58 | glPointSize(1.0f);
59 |
60 | // SEE IF WE NEED NEW FBOS
61 | if (videoTexture) {
62 | if (pixlVertexBuffer.isCreated() == false) {
63 | // CREATE A BUFFER TO HOLD THE ROW AND COLUMN COORDINATES OF IMAGE PIXELS FOR THE TEXEL FETCHES
64 | pixlVertexBuffer = QOpenGLBuffer(QOpenGLBuffer::VertexBuffer);
65 | pixlVertexBuffer.create();
66 | pixlVertexBuffer.setUsagePattern(QOpenGLBuffer::StaticDraw);
67 | if (pixlVertexBuffer.bind()) {
68 | pixlVertexBuffer.allocate(videoTexture->height()*videoTexture->width() * 2 * sizeof(float));
69 | float *vertices = (float *)pixlVertexBuffer.map(QOpenGLBuffer::WriteOnly);
70 | if (vertices) {
71 | for (int row = 0; row < videoTexture->height(); row++) {
72 | for (int col = 0; col < videoTexture->width(); col++) {
73 | vertices[2 * (col + row * width()) + 0] = (float)col;
74 | vertices[2 * (col + row * width()) + 1] = (float)row;
75 | }
76 | }
77 | pixlVertexBuffer.unmap();
78 | } else {
79 | qDebug() << QString("Unable to map pixlVertexBuffer from GPU.");
80 | }
81 | }
82 | }
83 |
84 | if (pixlIndexBuffer.isCreated() == false) {
85 | // CREATE AN INDEX BUFFER FOR THE RESULTING POINT CLOUD DRAWN AS TRIANGLES
86 | pixlIndexBuffer = QOpenGLBuffer(QOpenGLBuffer::IndexBuffer);
87 | pixlIndexBuffer.create();
88 | pixlIndexBuffer.setUsagePattern(QOpenGLBuffer::StaticDraw);
89 | if (pixlIndexBuffer.bind()) {
90 | pixlIndexBuffer.allocate((videoTexture->height()*videoTexture->width())*sizeof(unsigned int));
91 | unsigned int *indices = (unsigned int *)pixlIndexBuffer.map(QOpenGLBuffer::WriteOnly);
92 | if (indices) {
93 | unsigned int ind = 0;
94 | for (int row = 0; row < videoTexture->height(); row++) {
95 | for (int col = 0; col < videoTexture->width(); col++) {
96 | indices[ind] = ind;
97 | ind++;
98 | }
99 | }
100 | pixlIndexBuffer.unmap();
101 | } else {
102 | qDebug() << QString("Unable to map indiceBuffer from GPU.");
103 | }
104 | }
105 | }
106 |
107 | // CALCULATE THE SIZE OF THE FRAME BUFFER OBJECT TO HOLD THE HISTOGRAMS
108 | int blocksPerRow = 10;
109 | int blocksPerCol = 10;
110 | int blockWidth = qCeil((float)videoTexture->width() / (float)blocksPerRow);
111 | int blockHeght = qCeil((float)videoTexture->height() / (float)blocksPerCol);
112 |
113 | int numberOfBlocksX = qCeil((float)videoTexture->width() / (float)blockWidth);
114 | int numberOfBlocksY = qCeil((float)videoTexture->height() / (float)blockHeght);
115 | int numberOfBlocks = numberOfBlocksX * numberOfBlocksY;
116 |
117 | if (frameBufferObjectA == nullptr) {
118 | // CREATE THE FRAME BUFFER OBJECT TO HOLD THE HISTOGRAMS OF THE INCOMING TEXTURE
119 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
120 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
121 |
122 | // CREATE THE FRAME BUFFER OBJECT TO HOLD THE HISTOGRAMS OF THE INCOMING TEXTURE
123 | frameBufferObjectA = new QOpenGLFramebufferObject(CALIBRATIONHISTOGRAMLENGTH, numberOfBlocks, frameBufferObjectFormat);
124 | frameBufferObjectA->release();
125 |
126 | // CREATE A LOCAL MEMORY OBJECT TO HOLD A COPY OF THE FRAME BUFFER OBJECT
127 | histogramObject = LAUMemoryObject(frameBufferObjectA->width(), frameBufferObjectA->height(), 4, sizeof(float));
128 |
129 | // CREATE TEXTURE FOR HOLDING THE MODIFIED HISTOGRAMS FOR EQUALIZING THE INPUT IMAGE
130 | if (histogramTexture == nullptr) {
131 | histogramTexture = new QOpenGLTexture(QOpenGLTexture::Target2D);
132 | histogramTexture->setSize(frameBufferObjectA->width(), frameBufferObjectA->height());
133 | histogramTexture->setFormat(QOpenGLTexture::RGBA32F);
134 | histogramTexture->setWrapMode(QOpenGLTexture::ClampToBorder);
135 | histogramTexture->setMinificationFilter(QOpenGLTexture::Nearest);
136 | histogramTexture->setMagnificationFilter(QOpenGLTexture::Nearest);
137 | histogramTexture->allocateStorage();
138 | }
139 | }
140 |
141 | if (frameBufferObjectB == nullptr) {
142 | // CREATE THE FRAME BUFFER OBJECT TO HOLD THE HISTOGRAM EQUALIZED RESULTS AS A TEXTURE
143 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
144 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
145 |
146 | // CREATE THE FRAME BUFFER OBJECT TO HOLD THE HISTOGRAM EQUALIZED RESULTS AS A TEXTURE
147 | frameBufferObjectB = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
148 | frameBufferObjectB->release();
149 | }
150 |
151 | // BIND THE FRAME BUFFER OBJECT FOR PROCESSING THE HISTOGRAM
152 | // ALONG WITH THE GLSL PROGRAMS THAT WILL DO THE PROCESSING
153 | if (frameBufferObjectA->bind()) {
154 | if (programA.bind()) {
155 | // CLEAR THE FRAME BUFFER OBJECT
156 | glViewport(0, 0, frameBufferObjectA->width(), frameBufferObjectA->height());
157 | glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
158 | glClear(GL_COLOR_BUFFER_BIT);
159 |
160 | // BIND VBOS FOR DRAWING PIXELS ON SCREEN
161 | if (pixlVertexBuffer.bind()) {
162 | if (pixlIndexBuffer.bind()) {
163 | // BIND THE TEXTURE FROM THE FRAME BUFFER OBJECT
164 | glActiveTexture(GL_TEXTURE0);
165 | videoTexture->bind();
166 | programA.setUniformValue("qt_texture", 0);
167 |
168 | programA.setUniformValue("qt_blockSizeX", blockWidth);
169 | programA.setUniformValue("qt_blockSizeY", blockHeght);
170 | programA.setUniformValue("qt_blocksPerRow", blocksPerRow);
171 | programA.setUniformValue("qt_blocksPerCol", blocksPerCol);
172 |
173 | float geometrySlope = 2.0f / (float)frameBufferObjectA->height();
174 | float geometryOffst = 1.0f / (float)frameBufferObjectA->height() - 1.0f;
175 | programA.setUniformValue("qt_geometryMappingSlope", geometrySlope);
176 | programA.setUniformValue("qt_geometryMappingOffst", geometryOffst);
177 |
178 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
179 | glVertexAttribPointer(programA.attributeLocation("qt_vertex"), 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), 0);
180 | programA.enableAttributeArray("qt_vertex");
181 | glDrawElements(GL_POINTS, videoTexture->width()*videoTexture->height(), GL_UNSIGNED_INT, 0);
182 |
183 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
184 | pixlIndexBuffer.release();
185 | }
186 | pixlVertexBuffer.release();
187 | }
188 | programA.release();
189 | }
190 | frameBufferObjectA->release();
191 | }
192 |
193 | // DOWNLOAD THE HISTOGRAM BUFFER OBJECT FROM THE GPU TO THE CPU
194 | glBindTexture(GL_TEXTURE_2D, frameBufferObjectA->texture());
195 | glPixelStorei(GL_PACK_ALIGNMENT, 1);
196 | glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_FLOAT, histogramObject.constPointer());
197 |
198 | // EQUALIZE THE HISTOGRAMS
199 | for (unsigned int row = 0; row < histogramObject.height(); row++) {
200 | // GET A POINTER TO THE CURRENT HISTOGRAM
201 | float *buffer = (float *)histogramObject.constScanLine(row);
202 |
203 | // GET THE NUMBER OF PIXELS THE CURRENT HISTOGRAM (ALPHA VALUE OF ELEMENT IN THE HISTOGRAM)
204 | __m128 numPixelsInSubblockVec = _mm_set1_ps(buffer[255 * 4 + 3]);
205 |
206 | // SET THE UPPER BOUND ON THE HISTOGRAM DISTRIBUTION
207 | __m128 vecMx = _mm_set1_ps(0.025f);
208 |
209 | // GENERATE A CLIPPED HISTOGRAM AND COLLECT THE TRIMMED PART
210 | __m128 cumSumA = _mm_set1_ps(0.0f);
211 | for (unsigned int col = 0; col < 256; col++) {
212 | __m128 pixA = _mm_div_ps(_mm_load_ps(&buffer[4 * col]), numPixelsInSubblockVec);
213 | __m128 pixB = _mm_min_ps(pixA, vecMx);
214 | _mm_store_ps(&buffer[4 * col], pixB);
215 | cumSumA = _mm_add_ps(cumSumA, _mm_sub_ps(pixA, pixB));
216 | }
217 |
218 | // DETERMINE HOW MUCH TO TRIMMED AREA TO DISTRIBUTE OVER ALL BINS
219 | cumSumA = _mm_div_ps(cumSumA, _mm_set1_ps(256.0));
220 |
221 | // ITERATION THROUGH HISTOGRAM CALCULATING CUMMULATIVE DISTRIBUTION FUNCTION
222 | __m128 cumSumB = _mm_set1_ps(0.0f);
223 | for (unsigned int col = 0; col < 256; col++) {
224 | __m128 pix = _mm_add_ps(_mm_load_ps(&buffer[4 * col]), cumSumA);
225 | cumSumB = _mm_add_ps(cumSumB, pix);
226 | _mm_store_ps(&buffer[4 * col], cumSumB);
227 | }
228 | }
229 | //histogramObject.save(QString("/Users/dllau/Documents/histogramObject.tif"));
230 |
231 | // UPLOAD THE MODIFIED HISTOGRAMS BACK TO THE GPU
232 | histogramTexture->setData(QOpenGLTexture::RGBA, QOpenGLTexture::Float32, (const void *)histogramObject.constPointer());
233 |
234 | // BIND THE FRAME BUFFER OBJECT FOR PROCESSING THE HISTOGRAM
235 | // ALONG WITH THE GLSL PROGRAMS THAT WILL DO THE PROCESSING
236 | if (frameBufferObjectB->bind()) {
237 | if (programB.bind()) {
238 | // CLEAR THE FRAME BUFFER OBJECT
239 | glViewport(0, 0, frameBufferObjectB->width(), frameBufferObjectB->height());
240 | glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
241 | glClear(GL_COLOR_BUFFER_BIT);
242 |
243 | // BIND VBOS FOR DRAWING PIXELS ON SCREEN
244 | if (quadVertexBuffer.bind()) {
245 | if (quadIndexBuffer.bind()) {
246 | // BIND THE TEXTURE OF THE INPUT IMAGE
247 | glActiveTexture(GL_TEXTURE0);
248 | videoTexture->bind();
249 | programB.setUniformValue("qt_texture", 0);
250 |
251 | // BIND THE TEXTURE OF THE HISTOGRAM TEXTURE
252 | glActiveTexture(GL_TEXTURE1);
253 | histogramTexture->bind();
254 | programB.setUniformValue("qt_histogram", 1);
255 |
256 | // SET THE SUBBLOCK COORDINATE MAPPING PARAMETERS
257 | programB.setUniformValue("qt_blockSizeX", blockWidth);
258 | programB.setUniformValue("qt_blockSizeY", blockHeght);
259 | programB.setUniformValue("qt_blocksPerRow", blocksPerRow);
260 | programB.setUniformValue("qt_blocksPerCol", blocksPerCol);
261 |
262 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
263 | glVertexAttribPointer(programB.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), 0);
264 | programB.enableAttributeArray("qt_vertex");
265 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
266 |
267 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
268 | quadIndexBuffer.release();
269 | }
270 | quadVertexBuffer.release();
271 | }
272 | programB.release();
273 | }
274 | //static LAUMemoryObject object(frameBufferObjectB->width(), frameBufferObjectB->height(), 4, sizeof(float));
275 | //glBindTexture(GL_TEXTURE_2D, frameBufferObjectB->texture());
276 | //glPixelStorei(GL_PACK_ALIGNMENT, 1);
277 | //glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_FLOAT, object.constPointer());
278 | //object.save(QString("/Users/dllau/Documents/histogram.tif"));
279 | frameBufferObjectB->release();
280 | }
281 | }
282 | }
283 |
284 | /****************************************************************************/
285 | /****************************************************************************/
286 | /****************************************************************************/
287 | void LAUHistogramEqualizationGLWidget::paint()
288 | {
289 | if (frameBufferObjectB == nullptr) {
290 | LAUVideoGLWidget::paint();
291 | } else {
292 | // SET THE VIEW PORT AND CLEAR THE SCREEN BUFFER
293 | glViewport(0, 0, localWidth, localHeight);
294 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
295 |
296 | // DISPLAY THE LAST FBO IN OUR LIST
297 | if (videoTexture) {
298 | if (program.bind()) {
299 | if (quadVertexBuffer.bind()) {
300 | if (quadIndexBuffer.bind()) {
301 | // SET THE ACTIVE TEXTURE ON THE GPU
302 | glActiveTexture(GL_TEXTURE0);
303 | glBindTexture(GL_TEXTURE_2D, frameBufferObjectB->texture());
304 | program.setUniformValue("qt_texture", 0);
305 | #ifdef Q_OS_WIN
306 | program.setUniformValue("qt_flip", false);
307 | #else
308 | program.setUniformValue("qt_flip", true);
309 | #endif
310 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
311 | program.setAttributeBuffer("qt_vertex", GL_FLOAT, 0, 4, 4 * sizeof(float));
312 | program.enableAttributeArray("qt_vertex");
313 |
314 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
315 |
316 | quadIndexBuffer.release();
317 | }
318 | quadVertexBuffer.release();
319 | }
320 | program.release();
321 | }
322 | }
323 | }
324 | }
325 |
--------------------------------------------------------------------------------
/lauhistogramequalizationglwidget.h:
--------------------------------------------------------------------------------
1 | #ifndef LAUHISTOGRAMEQUALIZATIONGLWIDGET_H
2 | #define LAUHISTOGRAMEQUALIZATIONGLWIDGET_H
3 | #include
4 | #include
5 |
6 | #include "lauvideoglwidget.h"
7 | #include "laumemoryobject.h"
8 |
9 | #define CALIBRATIONHISTOGRAMLENGTH 256
10 |
11 | /****************************************************************************/
12 | /****************************************************************************/
13 | /****************************************************************************/
14 | class LAUHistogramEqualizationGLWidget : public LAUVideoGLWidget
15 | {
16 | Q_OBJECT
17 |
18 | public:
19 | explicit LAUHistogramEqualizationGLWidget(QWidget *parent = NULL) : LAUVideoGLWidget(parent), frameBufferObjectA(nullptr), frameBufferObjectB(nullptr), histogramTexture(nullptr) { ; }
20 | ~LAUHistogramEqualizationGLWidget();
21 |
22 | void initialize();
23 | void process();
24 | void paint();
25 |
26 | QImage grabImage()
27 | {
28 | if (frameBufferObjectB) {
29 | return (frameBufferObjectB->toImage());
30 | }
31 | return (QImage());
32 | }
33 |
34 | private:
35 | LAUMemoryObject histogramObject;
36 | QOpenGLFramebufferObject *frameBufferObjectA, *frameBufferObjectB;
37 | QOpenGLTexture *histogramTexture;
38 | QOpenGLShaderProgram programA, programB;
39 | QOpenGLBuffer pixlVertexBuffer, pixlIndexBuffer;
40 | };
41 |
42 | #endif // LAUHISTOGRAMEQUALIZATIONGLWIDGET_H
43 |
--------------------------------------------------------------------------------
/laumemoryobject.h:
--------------------------------------------------------------------------------
1 | /*********************************************************************************
2 | * *
3 | * Copyright (c) 2017, Dr. Daniel L. Lau *
4 | * All rights reserved. *
5 | * *
6 | * Redistribution and use in source and binary forms, with or without *
7 | * modification, are permitted provided that the following conditions are met: *
8 | * 1. Redistributions of source code must retain the above copyright *
9 | * notice, this list of conditions and the following disclaimer. *
10 | * 2. Redistributions in binary form must reproduce the above copyright *
11 | * notice, this list of conditions and the following disclaimer in the *
12 | * documentation and/or other materials provided with the distribution. *
13 | * 3. All advertising materials mentioning features or use of this software *
14 | * must display the following acknowledgement: *
15 | * This product includes software developed by the . *
16 | * 4. Neither the name of the nor the *
17 | * names of its contributors may be used to endorse or promote products *
18 | * derived from this software without specific prior written permission. *
19 | * *
20 | * THIS SOFTWARE IS PROVIDED BY Dr. Daniel L. Lau ''AS IS'' AND ANY *
21 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED *
22 | * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE *
23 | * DISCLAIMED. IN NO EVENT SHALL Dr. Daniel L. Lau BE LIABLE FOR ANY *
24 | * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES *
25 | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; *
26 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND *
27 | * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT *
28 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS *
29 | * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *
30 | * *
31 | *********************************************************************************/
32 |
33 | #ifndef LAUMEMORYOBJECT_H
34 | #define LAUMEMORYOBJECT_H
35 |
36 | #ifndef Q_PROCESSOR_ARM
37 | #include "emmintrin.h"
38 | #include "xmmintrin.h"
39 | #include "tmmintrin.h"
40 | #include "smmintrin.h"
41 | #endif
42 |
43 | #include
44 | #include
45 | #include
46 | #include
47 | #include
48 | #include
49 | #include
50 | #include
51 | #include
52 | #include
53 |
54 | #ifndef HEADLESS
55 | #include
56 | #endif
57 |
58 | namespace libtiff
59 | {
60 | #include "tiffio.h"
61 | }
62 |
63 | namespace LAU3DVideoParameters
64 | {
65 | enum LAUVideoPlaybackState { StateLiveVideo, StateVideoPlayback };
66 | enum LAUVideoPlaybackDevice { DeviceUndefined, DeviceKinect, DevicePrimeSense, DeviceProsilicaLCG, DeviceProsilicaDPR, DeviceProsilicaIOS, Device2DCamera, DeviceProsilicaARG, DeviceProsilicaPST, DeviceProsilicaAST, DeviceProsilicaGRY, DeviceProsilicaRGB, DeviceXimea, DeviceIDS, DeviceRealSense, DeviceLucid, DeviceDemo };
67 | enum LAUVideoPlaybackColor { ColorUndefined, ColorGray, ColorRGB, ColorRGBA, ColorXYZ, ColorXYZW, ColorXYZG, ColorXYZRGB, ColorXYZWRGBA };
68 | enum LAUVideoPatternSequence { SequenceNone, SequenceCustom, SequenceUnitFrequency, SequenceTwoFrequency, SequenceThreeFrequency, SequenceDualFrequency, SequenceCalibration, SequenceMultipath, SequenceTiming };
69 | enum LAUVideoProjector { ProjectorLC4500, ProjectorLC3000, ProjectorTI2010, ProjectorML500, ProjectoML750ST, ProjectorUnknown };
70 | enum LAUVideoProjectorSynchronizationMode { ModeSlave, ModeMaster, ModeMono, ModeMasterHandshake, ModeHDMIFPGA, ModeFPGA };
71 | enum LAUVideoPatternSynchronizationScheme { SchemeFlashingSequence, SchemePatternBit, SchemeNone };
72 |
73 | int colors(LAUVideoPlaybackColor clr);
74 | bool isMachineVision(LAUVideoPlaybackDevice dvc);
75 | }
76 |
77 | using namespace LAU3DVideoParameters;
78 |
79 | #define MINNUMBEROFFRAMESAVAILABLE 40
80 | #define MAXNUMBEROFFRAMESAVAILABLE 100
81 |
82 | void myTIFFWarningHandler(const char *stringA, const char *stringB, va_list args);
83 | void myTIFFErrorHandler(const char *stringA, const char *stringB, va_list args);
84 |
85 | /****************************************************************************/
86 | /****************************************************************************/
87 | /****************************************************************************/
88 | class LAUMemoryObjectData : public QSharedData
89 | {
90 | public:
91 | LAUMemoryObjectData();
92 | LAUMemoryObjectData(const LAUMemoryObjectData &other);
93 | LAUMemoryObjectData(unsigned int cols, unsigned int rows, unsigned int chns = 1, unsigned int byts = 1, unsigned int frms = 1);
94 | LAUMemoryObjectData(unsigned long long bytes);
95 |
96 | ~LAUMemoryObjectData();
97 |
98 | static int instanceCounter;
99 |
100 | unsigned int numRows, numCols, numChns, numFrms, numByts;
101 | unsigned int stepBytes, frameBytes;
102 | unsigned long long numBytesTotal;
103 | void *buffer;
104 |
105 | QString *rfidString;
106 | QMatrix4x4 *transformMatrix;
107 | QPoint *anchorPt;
108 | unsigned int *elapsedTime;
109 |
110 | void allocateBuffer();
111 | };
112 |
113 | /****************************************************************************/
114 | /****************************************************************************/
115 | /****************************************************************************/
116 | class LAUMemoryObject
117 | {
118 | public:
119 | LAUMemoryObject()
120 | {
121 | data = new LAUMemoryObjectData();
122 | }
123 |
124 | LAUMemoryObject(unsigned int cols, unsigned int rows, unsigned int chns = 1, unsigned int byts = 1, unsigned int frms = 1)
125 | {
126 | data = new LAUMemoryObjectData(cols, rows, chns, byts, frms);
127 | }
128 |
129 | LAUMemoryObject(unsigned long long bytes)
130 | {
131 | data = new LAUMemoryObjectData(bytes);
132 | }
133 |
134 | LAUMemoryObject(const LAUMemoryObject &other) : data(other.data) { ; }
135 |
136 | LAUMemoryObject &operator = (const LAUMemoryObject &other)
137 | {
138 | if (this != &other) {
139 | data = other.data;
140 | }
141 | return (*this);
142 | }
143 |
144 | LAUMemoryObject(QImage image);
145 | LAUMemoryObject(QString filename, int index = -1);
146 | LAUMemoryObject(libtiff::TIFF *inTiff, int index = -1);
147 |
148 | bool save(QString filename = QString()) const;
149 | bool save(libtiff::TIFF *otTiff, int index = 0) const;
150 | bool load(libtiff::TIFF *inTiff, int index = -1);
151 |
152 | // LOAD INTO READS A FILE INTO THE EXISTING BUFFER BUT ALL
153 | // SIZE PARAMETERS MUST BE SAME OTHERWISE RETURN FALSE
154 | bool loadInto(libtiff::TIFF *inTiff, int index = -1);
155 | bool loadInto(QString filename, int index = -1);
156 |
157 | unsigned int nonZeroPixelsCount() const;
158 | LAUMemoryObject toFloat();
159 | LAUMemoryObject minAreaFilter(int rad) const;
160 |
161 | // SEE IF THE POINTERS ARE LOOKING AT SAME MEMORY
162 | bool operator == (const LAUMemoryObject &other) const
163 | {
164 | if (this == &other) {
165 | return (true);
166 | }
167 | return (data->buffer == other.data->buffer);
168 | }
169 |
170 | bool operator < (const LAUMemoryObject &other) const
171 | {
172 | if (this == &other) {
173 | return (false);
174 | }
175 | return (elapsed() < other.elapsed());
176 | }
177 |
178 | bool operator > (const LAUMemoryObject &other) const
179 | {
180 | if (this == &other) {
181 | return (false);
182 | }
183 | return (elapsed() > other.elapsed());
184 | }
185 |
186 | bool operator <= (const LAUMemoryObject &other) const
187 | {
188 | if (this == &other) {
189 | return (true);
190 | }
191 | return (elapsed() <= other.elapsed());
192 | }
193 |
194 | bool operator >= (const LAUMemoryObject &other) const
195 | {
196 | if (this == &other) {
197 | return (true);
198 | }
199 | return (elapsed() >= other.elapsed());
200 | }
201 |
202 | ~LAUMemoryObject() { ; }
203 |
204 | inline bool isNull() const
205 | {
206 | return (data->buffer == nullptr);
207 | }
208 |
209 | inline bool isValid() const
210 | {
211 | return (data->buffer != nullptr);
212 | }
213 |
214 | inline unsigned long long length() const
215 | {
216 | return (data->numBytesTotal);
217 | }
218 |
219 | inline QSize size() const
220 | {
221 | return (QSize(width(), height()));
222 | }
223 |
224 | inline unsigned int nugget() const
225 | {
226 | return (data->numChns * data->numByts);
227 | }
228 |
229 | inline unsigned int width() const
230 | {
231 | return (data->numCols);
232 | }
233 |
234 | inline unsigned int height() const
235 | {
236 | return (data->numRows);
237 | }
238 |
239 | inline unsigned int depth() const
240 | {
241 | return (data->numByts);
242 | }
243 |
244 | inline unsigned int colors() const
245 | {
246 | return (data->numChns);
247 | }
248 |
249 | inline unsigned int frames() const
250 | {
251 | return (data->numFrms);
252 | }
253 |
254 | inline unsigned int step() const
255 | {
256 | return (data->stepBytes);
257 | }
258 |
259 | inline unsigned long long block() const
260 | {
261 | return (data->frameBytes);
262 | }
263 |
264 | inline unsigned char *pointer()
265 | {
266 | return (scanLine(0));
267 | }
268 |
269 | inline unsigned char *constPointer() const
270 | {
271 | return (constScanLine(0));
272 | }
273 |
274 | inline unsigned char *scanLine(unsigned int row, unsigned int frame = 0)
275 | {
276 | return (&(((unsigned char *)(data->buffer))[frame * block() + row * step()]));
277 | }
278 |
279 | inline unsigned char *constScanLine(unsigned int row, unsigned int frame = 0) const
280 | {
281 | return (&(((unsigned char *)(data->buffer))[frame * block() + row * step()]));
282 | }
283 |
284 | inline unsigned char *frame(unsigned int frm = 0)
285 | {
286 | return (scanLine(0, frm));
287 | }
288 |
289 | inline unsigned char *constFrame(unsigned int frm = 0) const
290 | {
291 | return (constScanLine(0, frm));
292 | }
293 |
294 | inline QString rfid() const
295 | {
296 | if (data->rfidString) {
297 | return (*(data->rfidString));
298 | } else {
299 | return (QString("RFID String wasn't allocated!"));
300 | }
301 | }
302 |
303 | inline void setRFID(QString string)
304 | {
305 | if (data->rfidString) {
306 | data->rfidString->clear();
307 | data->rfidString->append(string);
308 | }
309 | }
310 |
311 | inline void setConstRFID(QString string) const
312 | {
313 | if (data->rfidString) {
314 | data->rfidString->clear();
315 | data->rfidString->append(string);
316 | }
317 | }
318 |
319 | inline QMatrix4x4 transform() const
320 | {
321 | if (data->transformMatrix) {
322 | return (*(data->transformMatrix));
323 | } else {
324 | return (QMatrix4x4());
325 | }
326 | }
327 |
328 | inline void setTransform(QMatrix4x4 mat)
329 | {
330 | if (data->transformMatrix) {
331 | memcpy((void *)(data->transformMatrix->data()), (void *)mat.constData(), sizeof(QMatrix4x4));
332 | }
333 | }
334 |
335 | inline void setConstTransform(QMatrix4x4 mat) const
336 | {
337 | if (data->transformMatrix) {
338 | memcpy((void *)(data->transformMatrix->data()), (void *)mat.constData(), sizeof(QMatrix4x4));
339 | }
340 | }
341 |
342 | inline unsigned int elapsed() const
343 | {
344 | if (data->elapsedTime) {
345 | return (*data->elapsedTime);
346 | } else {
347 | return (0);
348 | }
349 | }
350 |
351 | inline void setElapsed(unsigned int elps)
352 | {
353 | if (data->elapsedTime) {
354 | *data->elapsedTime = elps;
355 | }
356 | }
357 |
358 | inline void setConstElapsed(unsigned int elps) const
359 | {
360 | if (data->elapsedTime) {
361 | *data->elapsedTime = elps;
362 | }
363 | }
364 |
365 | inline QPoint anchor() const
366 | {
367 | if (data->anchorPt) {
368 | return (*data->anchorPt);
369 | } else {
370 | return (QPoint());
371 | }
372 | }
373 |
374 | inline void setAnchor(QPoint pt)
375 | {
376 | if (data->anchorPt) {
377 | data->anchorPt->setX(pt.x());
378 | data->anchorPt->setY(pt.y());
379 | }
380 | }
381 |
382 | inline void setConstAnchor(QPoint pt) const
383 | {
384 | if (data->anchorPt) {
385 | data->anchorPt->setX(pt.x());
386 | data->anchorPt->setY(pt.y());
387 | }
388 | }
389 |
390 | static int numberOfColors(LAUVideoPlaybackColor color)
391 | {
392 | switch (color) {
393 | case ColorGray:
394 | return (1);
395 | case ColorRGB:
396 | case ColorXYZ:
397 | return (3);
398 | case ColorRGBA:
399 | case ColorXYZG:
400 | return (4);
401 | case ColorXYZRGB:
402 | return (6);
403 | case ColorXYZWRGBA:
404 | return (8);
405 | default:
406 | return (-1);
407 | }
408 | }
409 |
410 | static QString lastTiffErrorString;
411 | static QString lastTiffWarningString;
412 | static int howManyDirectoriesDoesThisTiffFileHave(QString filename);
413 | static int howManyChannelsDoesThisTiffFileHave(QString filename, int frame);
414 |
415 | protected:
416 | QSharedDataPointer data;
417 | };
418 |
419 | /****************************************************************************/
420 | /****************************************************************************/
421 | /****************************************************************************/
422 | class LAUMemoryObjectManager : public QObject
423 | {
424 | Q_OBJECT
425 |
426 | public:
427 | explicit LAUMemoryObjectManager(unsigned int cols = 0, unsigned int rows = 0, unsigned int chns = 0, unsigned int byts = 0, unsigned int frms = 0, QObject *parent = 0) : QObject(parent), numRows(rows), numCols(cols), numChns(chns), numByts(byts), numFrms(frms) { ; }
428 | ~LAUMemoryObjectManager();
429 |
430 | public slots:
431 | void onGetFrame();
432 | void onReleaseFrame(LAUMemoryObject frame);
433 |
434 | private:
435 | unsigned int numRows, numCols, numChns, numByts, numFrms;
436 | QList framesAvailable;
437 |
438 | signals:
439 | void emitFrame(LAUMemoryObject frame);
440 | };
441 |
442 | /****************************************************************************/
443 | /****************************************************************************/
444 | /****************************************************************************/
445 | class LAUModalityObject
446 | {
447 | public:
448 | LAUModalityObject(LAUMemoryObject dpt = LAUMemoryObject(), LAUMemoryObject clr = LAUMemoryObject(), LAUMemoryObject map = LAUMemoryObject()) : depth(dpt), color(clr), mappi(map) { ; }
449 | LAUModalityObject(const LAUModalityObject &other) : depth(other.depth), color(other.color), mappi(other.mappi) { ; }
450 | LAUModalityObject &operator = (const LAUModalityObject &other)
451 | {
452 | if (this != &other) {
453 | depth = other.depth;
454 | color = other.color;
455 | mappi = other.mappi;
456 | }
457 | return (*this);
458 | }
459 |
460 | bool isAnyValid()
461 | {
462 | return (depth.isValid() || color.isValid() || mappi.isValid());
463 | }
464 |
465 | LAUMemoryObject depth;
466 | LAUMemoryObject color;
467 | LAUMemoryObject mappi;
468 | };
469 |
470 | /****************************************************************************/
471 | /****************************************************************************/
472 | /****************************************************************************/
473 | class LAUMemoryObjectWriter : public QThread
474 | {
475 | Q_OBJECT
476 |
477 | public:
478 | explicit LAUMemoryObjectWriter(QString flnm, LAUMemoryObject obj, QObject *parent = nullptr);
479 | ~LAUMemoryObjectWriter();
480 |
481 | bool isNull() const
482 | {
483 | return (!isValid());
484 | }
485 |
486 | bool isValid() const
487 | {
488 | return (tiff != nullptr);
489 | }
490 |
491 | protected:
492 | void run();
493 |
494 | private:
495 | libtiff::TIFF *tiff;
496 | LAUMemoryObject object;
497 |
498 | signals:
499 | void emitSaveComplete();
500 | };
501 |
502 | Q_DECLARE_METATYPE(LAUMemoryObject);
503 |
504 | #endif // LAUMEMORYOBJECT_H
505 |
--------------------------------------------------------------------------------
/laurandomizepixelsglwidget.cpp:
--------------------------------------------------------------------------------
1 | #include "laurandomizepixelsglwidget.h"
2 | #include "locale.h"
3 |
4 | /****************************************************************************/
5 | /****************************************************************************/
6 | /****************************************************************************/
7 | LAURandomizePixelsGLWidget::~LAURandomizePixelsGLWidget()
8 | {
9 | if (wasInitialized()) {
10 | makeCurrent();
11 | if (mapTexture) {
12 | delete mapTexture;
13 | }
14 | if (frameBufferObject) {
15 | delete frameBufferObject;
16 | }
17 | }
18 | qDebug() << "LAURandomizePixelsGLWidget::~LAURandomizePixelsGLWidget()";
19 | }
20 |
21 | /****************************************************************************/
22 | /****************************************************************************/
23 | /****************************************************************************/
24 | void LAURandomizePixelsGLWidget::buildMappingTexture(int cols, int rows)
25 | {
26 | // CHECK TO SEE IF THERE IS AN EXISTING TEXTURE THAT WE NEED TO DELETE
27 | if (mapTexture) {
28 | delete mapTexture;
29 | }
30 |
31 | // CREATE A NEW 3-D TEXTURE
32 | mapTexture = new QOpenGLTexture(QOpenGLTexture::Target3D);
33 | mapTexture->setSize(cols, rows, 16);
34 | mapTexture->setFormat(QOpenGLTexture::RG32F);
35 | mapTexture->setWrapMode(QOpenGLTexture::ClampToEdge);
36 | mapTexture->setMinificationFilter(QOpenGLTexture::Linear);
37 | mapTexture->setMagnificationFilter(QOpenGLTexture::Linear);
38 | mapTexture->allocateStorage();
39 |
40 | // CREATE A LIST OF AVAILABLE PIXELS
41 | QList pointsA, pointsB;
42 | for (int c = 0; c < cols; c++) {
43 | for (int r = 0; r < rows; r++) {
44 | pointsA << QPoint(c, r);
45 | }
46 | }
47 |
48 | // NOW SORT THE PIXELS IN RANDOM ORDER FRAME BY FRAME
49 | int index = 0;
50 | unsigned short *buffer = (unsigned short *)malloc(mapTexture->width() * mapTexture->height() * mapTexture->depth() * sizeof(unsigned short) * 2);
51 | for (int s = 0; s < mapTexture->depth(); s++) {
52 | framesA << s;
53 | if (pointsA.count() > pointsB.count()) {
54 | while (pointsA.isEmpty() == false) {
55 | int index = qFloor((double)rand() / (double)RAND_MAX * (double)pointsA.count());
56 | pointsB << pointsA.takeAt(index);
57 | }
58 | for (int n = 0; n < pointsB.count(); n++) {
59 | buffer[index++] = (unsigned short)pointsB.at(n).x();
60 | buffer[index++] = (unsigned short)pointsB.at(n).y();
61 | }
62 | } else {
63 | while (pointsB.isEmpty() == false) {
64 | int index = qFloor((double)rand() / (double)RAND_MAX * (double)pointsB.count());
65 | pointsA << pointsB.takeAt(index);
66 | }
67 | for (int n = 0; n < pointsA.count(); n++) {
68 | buffer[index++] = (unsigned short)pointsA.at(n).x();
69 | buffer[index++] = (unsigned short)pointsA.at(n).y();
70 | }
71 | }
72 | }
73 |
74 | // UPLOAD THE RANDOM INDICES TO THE GPU TEXTURE
75 | QOpenGLPixelTransferOptions options;
76 | options.setAlignment(1);
77 | mapTexture->setData(QOpenGLTexture::RG, QOpenGLTexture::UInt16, (const void *)buffer, &options);
78 |
79 | // DELETE THE TEMPORARY BUFFER
80 | free(buffer);
81 | }
82 |
83 | /****************************************************************************/
84 | /****************************************************************************/
85 | /****************************************************************************/
86 | QImage LAURandomizePixelsGLWidget::grabImage()
87 | {
88 | if (frameBufferObject) {
89 | return (frameBufferObject->toImage());
90 | }
91 | return (QImage());
92 | }
93 |
94 | /****************************************************************************/
95 | /****************************************************************************/
96 | /****************************************************************************/
97 | void LAURandomizePixelsGLWidget::initialize()
98 | {
99 | LAUVideoGLWidget::initialize();
100 |
101 | // NOW ADD OUR LIST OF HARRIS CORNER SHADER PROGRAMS
102 | setlocale(LC_NUMERIC, "C");
103 | programA.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterRandomMappingA.vert");
104 | programA.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterRandomMappingA.frag");
105 | programA.link();
106 |
107 | programB.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterRandomMappingB.vert");
108 | programB.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterRandomMappingB.frag");
109 | programB.link();
110 | setlocale(LC_ALL, "");
111 | }
112 |
113 | /****************************************************************************/
114 | /****************************************************************************/
115 | /****************************************************************************/
116 | void LAURandomizePixelsGLWidget::process()
117 | {
118 | if (framesA.isEmpty()) {
119 | framesA = framesB;
120 | framesB.clear();
121 | }
122 |
123 | // SEE IF WE NEED NEW FBOS
124 | if (videoTexture) {
125 | if (frameBufferObject == NULL) {
126 | // CREATE A FORMAT OBJECT FOR CREATING THE FRAME BUFFER
127 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
128 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
129 |
130 | frameBufferObject = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
131 | frameBufferObject->release();
132 | } else if (frameBufferObject->width() != videoTexture->width() || frameBufferObject->height() != videoTexture->height()) {
133 | delete frameBufferObject;
134 |
135 | // CREATE A FORMAT OBJECT FOR CREATING THE FRAME BUFFER
136 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
137 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
138 |
139 | frameBufferObject = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
140 | frameBufferObject->release();
141 | }
142 |
143 | // CHECK TO SEE IF WE NEED TO INITIALIZE THE MAPPING TEXTURE
144 | if (mapTexture == NULL || mapTexture->width() != videoTexture->width() || mapTexture->height() != videoTexture->height()) {
145 | buildMappingTexture(videoTexture->width(), videoTexture->height());
146 | }
147 |
148 | // SET CLEAR COLOR AS NOT A NUMBERS
149 | glClearColor(NAN, NAN, NAN, NAN);
150 |
151 | // CALCULATE THE GRADIENT BUFFER
152 | if (frameBufferObject->bind()) {
153 | if (programA.bind()) {
154 | // CLEAR THE FRAME BUFFER OBJECT
155 | glViewport(0, 0, frameBufferObject->width(), frameBufferObject->height());
156 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
157 |
158 | // BIND VBOS FOR DRAWING TRIANGLES ON SCREEN
159 | if (quadVertexBuffer.bind()) {
160 | if (quadIndexBuffer.bind()) {
161 | // BIND THE TEXTURE FROM THE ORIGINAL SCAN
162 | glActiveTexture(GL_TEXTURE0);
163 | videoTexture->bind();
164 | programA.setUniformValue("qt_texture", 0);
165 |
166 | glActiveTexture(GL_TEXTURE1);
167 | mapTexture->bind();
168 | programA.setUniformValue("qt_map", 1);
169 |
170 | // SET THE LAYER IN THE RANDOM MAPPING TEXTURE
171 | int index = qFloor((double)rand() / (double)RAND_MAX * framesA.count());
172 | programA.setUniformValue("qt_index", framesA.at(index));
173 | framesB << framesA.takeAt(index);
174 |
175 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
176 | glVertexAttribPointer(programA.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), 0);
177 | programA.enableAttributeArray("qt_vertex");
178 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
179 |
180 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
181 | quadIndexBuffer.release();
182 | }
183 | quadVertexBuffer.release();
184 | }
185 | programA.release();
186 | }
187 | frameBufferObject->release();
188 | }
189 | update();
190 | }
191 | }
192 |
193 | /****************************************************************************/
194 | /****************************************************************************/
195 | /****************************************************************************/
196 | void LAURandomizePixelsGLWidget::paint()
197 | {
198 | if (frameBufferObject == NULL) {
199 | LAUVideoGLWidget::paint();
200 | } else {
201 | // SET THE VIEW PORT AND CLEAR THE SCREEN BUFFER
202 | glViewport(0, 0, localWidth, localHeight);
203 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
204 |
205 | // DISPLAY THE LAST FBO IN OUR LIST
206 | if (videoTexture) {
207 | if (programB.bind()) {
208 | if (quadVertexBuffer.bind()) {
209 | if (quadIndexBuffer.bind()) {
210 | // SET THE ACTIVE TEXTURE ON THE GPU
211 | glActiveTexture(GL_TEXTURE0);
212 | videoTexture->bind();
213 | programB.setUniformValue("qt_textureA", 0);
214 |
215 | glActiveTexture(GL_TEXTURE1);
216 | glBindTexture(GL_TEXTURE_2D, frameBufferObject->texture());
217 | programB.setUniformValue("qt_textureB", 1);
218 |
219 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
220 | programB.setAttributeBuffer("qt_vertex", GL_FLOAT, 0, 4, 4 * sizeof(float));
221 | programB.enableAttributeArray("qt_vertex");
222 |
223 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
224 |
225 | quadIndexBuffer.release();
226 | }
227 | quadVertexBuffer.release();
228 | }
229 | programB.release();
230 | }
231 | }
232 | }
233 | }
234 |
--------------------------------------------------------------------------------
/laurandomizepixelsglwidget.h:
--------------------------------------------------------------------------------
1 | #ifndef LAURANDOMIZEPIXELSGLWIDGET_H
2 | #define LAURANDOMIZEPIXELSGLWIDGET_H
3 |
4 | #include
5 | #include
6 |
7 | #include "lauvideoglwidget.h"
8 |
9 | /****************************************************************************/
10 | /****************************************************************************/
11 | /****************************************************************************/
12 | class LAURandomizePixelsGLWidget : public LAUVideoGLWidget
13 | {
14 | Q_OBJECT
15 |
16 | public:
17 | explicit LAURandomizePixelsGLWidget(QWidget *parent = NULL) : LAUVideoGLWidget(parent), mapTexture(NULL), frameBufferObject(NULL)
18 | {
19 | ;
20 | }
21 | ~LAURandomizePixelsGLWidget();
22 |
23 | void initialize();
24 | void process();
25 | void paint();
26 |
27 | QImage grabImage();
28 |
29 | private:
30 | QList framesA, framesB;
31 | QOpenGLTexture *mapTexture;
32 | QOpenGLFramebufferObject *frameBufferObject;
33 | QOpenGLShaderProgram programA, programB;
34 |
35 | void buildMappingTexture(int cols, int rows);
36 | };
37 |
38 | #endif // LAURANDOMIZEPIXELSGLWIDGET_H
39 |
--------------------------------------------------------------------------------
/lausobeledgedetectorglwidget.cpp:
--------------------------------------------------------------------------------
1 | #include "lausobeledgedetectorglwidget.h"
2 | #include "locale.h"
3 |
4 | /****************************************************************************/
5 | /****************************************************************************/
6 | /****************************************************************************/
7 | LAUSobelEdgeDetectorGLWidget::~LAUSobelEdgeDetectorGLWidget()
8 | {
9 | if (wasInitialized()) {
10 | makeCurrent();
11 | delete frameBufferObject;
12 | }
13 | qDebug() << "LAUSobelEdgeDetectorGLWidget::~LAUSobelEdgeDetectorGLWidget()";
14 | }
15 |
16 | /****************************************************************************/
17 | /****************************************************************************/
18 | /****************************************************************************/
19 | void LAUSobelEdgeDetectorGLWidget::process()
20 | {
21 | // SEE IF WE NEED NEW FBOS
22 | if (videoTexture) {
23 | if (frameBufferObject == NULL) {
24 | // CREATE A FORMAT OBJECT FOR CREATING THE FRAME BUFFER
25 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
26 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
27 |
28 | frameBufferObject = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
29 | frameBufferObject->release();
30 | } else if (frameBufferObject->width() != videoTexture->width() || frameBufferObject->height() != videoTexture->height()) {
31 | delete frameBufferObject;
32 |
33 | // CREATE A FORMAT OBJECT FOR CREATING THE FRAME BUFFER
34 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
35 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
36 |
37 | frameBufferObject = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
38 | frameBufferObject->release();
39 | }
40 |
41 | // SET CLEAR COLOR AS NOT A NUMBERS
42 | glClearColor(NAN, NAN, NAN, NAN);
43 |
44 | // CALCULATE THE GRADIENT BUFFER
45 | if (frameBufferObject->bind()) {
46 | if (programA.bind()) {
47 | // CLEAR THE FRAME BUFFER OBJECT
48 | glViewport(1, 1, frameBufferObject->width() - 2, frameBufferObject->height() - 2);
49 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
50 |
51 | // BIND VBOS FOR DRAWING TRIANGLES ON SCREEN
52 | if (quadVertexBuffer.bind()) {
53 | if (quadIndexBuffer.bind()) {
54 | // BIND THE TEXTURE FROM THE ORIGINAL SCAN
55 | glActiveTexture(GL_TEXTURE0);
56 | videoTexture->bind();
57 | programA.setUniformValue("qt_texture", 0);
58 |
59 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
60 | glVertexAttribPointer(programA.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), 0);
61 | programA.enableAttributeArray("qt_vertex");
62 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
63 |
64 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
65 | quadIndexBuffer.release();
66 | }
67 | quadVertexBuffer.release();
68 | }
69 | programA.release();
70 | }
71 | frameBufferObject->release();
72 | }
73 | }
74 | }
75 |
76 | /****************************************************************************/
77 | /****************************************************************************/
78 | /****************************************************************************/
79 | void LAUSobelEdgeDetectorGLWidget::initialize()
80 | {
81 | LAUVideoGLWidget::initialize();
82 |
83 | // NOW ADD OUR LIST OF HARRIS CORNER SHADER PROGRAMS
84 | setlocale(LC_NUMERIC, "C");
85 | programA.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterSobelEdge.vert");
86 | programA.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterSobelEdge.frag");
87 | programA.link();
88 | setlocale(LC_ALL, "");
89 | }
90 |
91 | /****************************************************************************/
92 | /****************************************************************************/
93 | /****************************************************************************/
94 | void LAUSobelEdgeDetectorGLWidget::paint()
95 | {
96 | if (frameBufferObject == NULL) {
97 | LAUVideoGLWidget::paint();
98 | } else {
99 | // SET THE VIEW PORT AND CLEAR THE SCREEN BUFFER
100 | glViewport(0, 0, localWidth, localHeight);
101 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
102 |
103 | // DISPLAY THE LAST FBO IN OUR LIST
104 | if (videoTexture) {
105 | if (program.bind()) {
106 | if (quadVertexBuffer.bind()) {
107 | if (quadIndexBuffer.bind()) {
108 | // SET THE ACTIVE TEXTURE ON THE GPU
109 | glActiveTexture(GL_TEXTURE0);
110 | glBindTexture(GL_TEXTURE_2D, frameBufferObject->texture());
111 | program.setUniformValue("qt_texture", 0);
112 | #ifdef Q_OS_WIN
113 | program.setUniformValue("qt_flip", false);
114 | #else
115 | program.setUniformValue("qt_flip", true);
116 | #endif
117 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
118 | program.setAttributeBuffer("qt_vertex", GL_FLOAT, 0, 4, 4 * sizeof(float));
119 | program.enableAttributeArray("qt_vertex");
120 |
121 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
122 |
123 | quadIndexBuffer.release();
124 | }
125 | quadVertexBuffer.release();
126 | }
127 | program.release();
128 | }
129 | }
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/lausobeledgedetectorglwidget.h:
--------------------------------------------------------------------------------
1 | #ifndef LAUSOBELEDGEDETECTORGLWIDGET_H
2 | #define LAUSOBELEDGEDETECTORGLWIDGET_H
3 |
4 | #include
5 | #include
6 |
7 | #include "lauvideoglwidget.h"
8 |
9 | /****************************************************************************/
10 | /****************************************************************************/
11 | /****************************************************************************/
12 | class LAUSobelEdgeDetectorGLWidget : public LAUVideoGLWidget
13 | {
14 | Q_OBJECT
15 |
16 | public:
17 | explicit LAUSobelEdgeDetectorGLWidget(QWidget *parent = NULL) : LAUVideoGLWidget(parent), frameBufferObject(NULL) { ; }
18 | ~LAUSobelEdgeDetectorGLWidget();
19 |
20 | QImage grabImage()
21 | {
22 | if (frameBufferObject) {
23 | return (frameBufferObject->toImage());
24 | }
25 | return (QImage());
26 | }
27 |
28 | void initialize();
29 | void process();
30 | void paint();
31 |
32 | private:
33 | QOpenGLFramebufferObject *frameBufferObject;
34 | QOpenGLShaderProgram programA;
35 | };
36 |
37 | #endif // LAUSOBELEDGEDETECTORGLWIDGET_H
38 |
--------------------------------------------------------------------------------
/lautiredetectorglfilter.cpp:
--------------------------------------------------------------------------------
1 | #include "lautiredetectorglfilter.h"
2 | #include "locale.h"
3 |
4 | /****************************************************************************/
5 | /****************************************************************************/
6 | /****************************************************************************/
7 | LAUTireDetectorGLWidget::~LAUTireDetectorGLWidget()
8 | {
9 | if (wasInitialized()) {
10 | makeCurrent();
11 | delete frameBufferObject;
12 | }
13 | qDebug() << "LAUTireDetectorGLWidget::~LAUTireDetectorGLWidget()";
14 | }
15 |
16 | /****************************************************************************/
17 | /****************************************************************************/
18 | /****************************************************************************/
19 | void LAUTireDetectorGLWidget::process()
20 | {
21 | // SEE IF WE NEED NEW FBOS
22 | if (videoTexture) {
23 | if (frameBufferObject == NULL) {
24 | // CREATE A FORMAT OBJECT FOR CREATING THE FRAME BUFFER
25 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
26 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
27 |
28 | frameBufferObject = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
29 | frameBufferObject->release();
30 | } else if (frameBufferObject->width() != videoTexture->width() || frameBufferObject->height() != videoTexture->height()) {
31 | delete frameBufferObject;
32 |
33 | // CREATE A FORMAT OBJECT FOR CREATING THE FRAME BUFFER
34 | QOpenGLFramebufferObjectFormat frameBufferObjectFormat;
35 | frameBufferObjectFormat.setInternalTextureFormat(GL_RGBA32F);
36 |
37 | frameBufferObject = new QOpenGLFramebufferObject(videoTexture->width(), videoTexture->height(), frameBufferObjectFormat);
38 | frameBufferObject->release();
39 | }
40 |
41 | // SET CLEAR COLOR AS NOT A NUMBERS
42 | glClearColor(NAN, NAN, NAN, NAN);
43 |
44 | // CALCULATE THE GRADIENT BUFFER
45 | if (frameBufferObject->bind()) {
46 | if (programA.bind()) {
47 | // CLEAR THE FRAME BUFFER OBJECT
48 | glViewport(1, 1, frameBufferObject->width() - 2, frameBufferObject->height() - 2);
49 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
50 |
51 | // BIND VBOS FOR DRAWING TRIANGLES ON SCREEN
52 | if (quadVertexBuffer.bind()) {
53 | if (quadIndexBuffer.bind()) {
54 | // BIND THE TEXTURE FROM THE ORIGINAL SCAN
55 | glActiveTexture(GL_TEXTURE0);
56 | videoTexture->bind();
57 | programA.setUniformValue("qt_texture", 0);
58 |
59 | // SET THE RADIUS AND SKEW PARAMETER
60 | programA.setUniformValue("qt_radius", 50);
61 | programA.setUniformValue("qt_skew", 1.25f);
62 |
63 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
64 | glVertexAttribPointer(programA.attributeLocation("qt_vertex"), 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), 0);
65 | programA.enableAttributeArray("qt_vertex");
66 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
67 |
68 | // RELEASE THE FRAME BUFFER OBJECT AND ITS ASSOCIATED GLSL PROGRAMS
69 | quadIndexBuffer.release();
70 | }
71 | quadVertexBuffer.release();
72 | }
73 | programA.release();
74 | }
75 | frameBufferObject->release();
76 | }
77 | }
78 | }
79 |
80 | /****************************************************************************/
81 | /****************************************************************************/
82 | /****************************************************************************/
83 | void LAUTireDetectorGLWidget::initialize()
84 | {
85 | LAUVideoGLWidget::initialize();
86 |
87 | // NOW ADD OUR LIST OF HARRIS CORNER SHADER PROGRAMS
88 | setlocale(LC_NUMERIC, "C");
89 | programA.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/filterSkewBlur.vert");
90 | programA.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/filterSkewBlur.frag");
91 | programA.link();
92 | setlocale(LC_ALL, "");
93 | }
94 |
95 | /****************************************************************************/
96 | /****************************************************************************/
97 | /****************************************************************************/
98 | void LAUTireDetectorGLWidget::paint()
99 | {
100 | if (frameBufferObject == NULL) {
101 | LAUVideoGLWidget::paint();
102 | } else {
103 | // SET THE VIEW PORT AND CLEAR THE SCREEN BUFFER
104 | glViewport(0, 0, localWidth, localHeight);
105 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
106 |
107 | // DISPLAY THE LAST FBO IN OUR LIST
108 | if (videoTexture) {
109 | if (program.bind()) {
110 | if (quadVertexBuffer.bind()) {
111 | if (quadIndexBuffer.bind()) {
112 | // SET THE ACTIVE TEXTURE ON THE GPU
113 | glActiveTexture(GL_TEXTURE0);
114 | glBindTexture(GL_TEXTURE_2D, frameBufferObject->texture());
115 | program.setUniformValue("qt_texture", 0);
116 |
117 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
118 | program.setAttributeBuffer("qt_vertex", GL_FLOAT, 0, 4, 4 * sizeof(float));
119 | program.enableAttributeArray("qt_vertex");
120 |
121 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
122 |
123 | quadIndexBuffer.release();
124 | }
125 | quadVertexBuffer.release();
126 | }
127 | program.release();
128 | }
129 | }
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/lautiredetectorglfilter.h:
--------------------------------------------------------------------------------
1 | #ifndef LAUTIREDETECTORGLFILTER_H
2 | #define LAUTIREDETECTORGLFILTER_H
3 |
4 | #include
5 | #include
6 |
7 | #include "lauvideoglwidget.h"
8 |
9 | /****************************************************************************/
10 | /****************************************************************************/
11 | /****************************************************************************/
12 | class LAUTireDetectorGLWidget : public LAUVideoGLWidget
13 | {
14 | Q_OBJECT
15 |
16 | public:
17 | explicit LAUTireDetectorGLWidget(QWidget *parent = NULL) : LAUVideoGLWidget(parent), frameBufferObject(NULL) { ; }
18 | ~LAUTireDetectorGLWidget();
19 |
20 | QImage grabImage()
21 | {
22 | if (frameBufferObject) {
23 | return (frameBufferObject->toImage());
24 | }
25 | return (QImage());
26 | }
27 |
28 | void initialize();
29 | void process();
30 | void paint();
31 |
32 | private:
33 | QOpenGLFramebufferObject *frameBufferObject;
34 | QOpenGLShaderProgram programA;
35 | };
36 |
37 | #endif // LAUTIREDETECTORGLFILTER_H
38 |
--------------------------------------------------------------------------------
/lauvideoglwidget.cpp:
--------------------------------------------------------------------------------
1 | #include "lauvideoglwidget.h"
2 | #include "locale.h"
3 |
4 | /****************************************************************************/
5 | /****************************************************************************/
6 | /****************************************************************************/
7 | LAUVideoGLWidget::~LAUVideoGLWidget()
8 | {
9 | if (wasInitialized()) {
10 | makeCurrent();
11 | if (videoTexture) {
12 | delete videoTexture;
13 | }
14 | vertexArrayObject.release();
15 | }
16 | qDebug() << QString("LAUVideoGLWidget::~LAUVideoGLWidget()");
17 | }
18 |
19 | /****************************************************************************/
20 | /****************************************************************************/
21 | /****************************************************************************/
22 | void LAUVideoGLWidget::setFrame(const QVideoFrame &frame)
23 | {
24 | QVideoFrame localFrame = frame;
25 | if (localFrame.map(QAbstractVideoBuffer::ReadOnly)) {
26 | #ifdef Q_OS_WIN
27 | // PASS THE VIDEO FRAME TO THE VIDEO RECORDER
28 | if (*recorder && (*recorder)->isOpened()){
29 | cv::Mat image;
30 | switch (frame.pixelFormat()){
31 | case QVideoFrame::Format_ABGR32:
32 | image = cv::Mat(frame.height(), frame.width(), CV_8UC4, (void*)frame.bits(), frame.bytesPerLine());
33 | break;
34 | case QVideoFrame::Format_RGB24:
35 | image = cv::Mat(frame.height(), frame.width(), CV_8UC3, (void*)frame.bits(), frame.bytesPerLine());
36 | break;
37 | default:
38 | image = cv::Mat(frame.height(), frame.width(), CV_8UC3, (void*)frame.bits(), frame.bytesPerLine());
39 | }
40 | (*recorder)->write(image);
41 | }
42 | #endif
43 | // REPORT FRAME RATE TO THE CONSOLE
44 | counter++;
45 | if (counter >= 30) {
46 | qDebug() << QString("%1 fps").arg(1000.0 * (float)counter / (float)time.elapsed());
47 | time.restart();
48 | counter = 0;
49 | }
50 |
51 | makeCurrent();
52 |
53 | // SEE IF WE NEED A NEW TEXTURE TO HOLD THE INCOMING VIDEO FRAME
54 | if (!videoTexture || videoTexture->width() != localFrame.width() || videoTexture->height() != localFrame.height()) {
55 | if (videoTexture) {
56 | delete videoTexture;
57 | }
58 |
59 | // CREATE THE GPU SIDE TEXTURE BUFFER TO HOLD THE INCOMING VIDEO
60 | videoTexture = new QOpenGLTexture(QOpenGLTexture::Target2D);
61 | videoTexture->setSize(localFrame.width(), localFrame.height());
62 | videoTexture->setFormat(QOpenGLTexture::RGBA32F);
63 | videoTexture->setWrapMode(QOpenGLTexture::ClampToBorder);
64 | videoTexture->setMinificationFilter(QOpenGLTexture::Nearest);
65 | videoTexture->setMagnificationFilter(QOpenGLTexture::Nearest);
66 | videoTexture->allocateStorage();
67 | }
68 |
69 | // UPLOAD THE CPU BUFFER TO THE GPU TEXTURE
70 | // COPY FRAME BUFFER TEXTURE FROM GPU TO LOCAL CPU BUFFER
71 | QVideoFrame::PixelFormat format = localFrame.pixelFormat();
72 | if (format == QVideoFrame::Format_ARGB32 || format == QVideoFrame::Format_RGB32) {
73 | unsigned int bytesPerSample = localFrame.bytesPerLine() / localFrame.width() / 4;
74 | if (bytesPerSample == sizeof(unsigned char)) {
75 | videoTexture->setData(QOpenGLTexture::BGRA, QOpenGLTexture::UInt8, (const void *)localFrame.bits());
76 | }
77 | }
78 | localFrame.unmap();
79 |
80 | // PROCESS THE TEXTURE
81 | process();
82 |
83 | // UPDATE THE USER DISPLAY
84 | update();
85 | }
86 | }
87 |
88 | /****************************************************************************/
89 | /****************************************************************************/
90 | /****************************************************************************/
91 | void LAUVideoGLWidget::setFrame(QImage frame)
92 | {
93 | if (frame.isNull() == false) {
94 | // REPORT FRAME RATE TO THE CONSOLE
95 | counter++;
96 | if (counter >= 30) {
97 | qDebug() << QString("%1 fps").arg(1000.0 * (float)counter / (float)time.elapsed());
98 | time.restart();
99 | counter = 0;
100 | }
101 |
102 | makeCurrent();
103 |
104 | // SEE IF WE NEED A NEW TEXTURE TO HOLD THE INCOMING VIDEO FRAME
105 | if (!videoTexture || videoTexture->width() != frame.width() || videoTexture->height() != frame.height()) {
106 | if (videoTexture) {
107 | delete videoTexture;
108 | }
109 |
110 | // CREATE THE GPU SIDE TEXTURE BUFFER TO HOLD THE INCOMING VIDEO
111 | videoTexture = new QOpenGLTexture(QOpenGLTexture::Target2D);
112 | videoTexture->setSize(frame.width(), frame.height());
113 | videoTexture->setFormat(QOpenGLTexture::RGBA32F);
114 | videoTexture->setWrapMode(QOpenGLTexture::ClampToBorder);
115 | videoTexture->setMinificationFilter(QOpenGLTexture::Nearest);
116 | videoTexture->setMagnificationFilter(QOpenGLTexture::Nearest);
117 | videoTexture->allocateStorage();
118 |
119 | qDebug() << videoTexture->width() << videoTexture->height();
120 | }
121 |
122 | // UPLOAD THE CPU BUFFER TO THE GPU TEXTURE
123 | // COPY FRAME BUFFER TEXTURE FROM GPU TO LOCAL CPU BUFFER
124 | QImage::Format format = frame.format();
125 | if (format == QImage::Format_ARGB32) {
126 | unsigned int bytesPerSample = frame.bytesPerLine() / frame.width() / 4;
127 | if (bytesPerSample == sizeof(unsigned char)) {
128 | videoTexture->setData(QOpenGLTexture::BGRA, QOpenGLTexture::UInt8, (const void *)frame.bits());
129 | }
130 | }
131 |
132 | // PROCESS THE TEXTURE
133 | process();
134 |
135 | // UPDATE THE USER DISPLAY
136 | update();
137 | }
138 | }
139 |
140 | /****************************************************************************/
141 | /****************************************************************************/
142 | /****************************************************************************/
143 | void LAUVideoGLWidget::initialize()
144 | {
145 | // INITIALIZE OUR GL CALLS AND SET THE CLEAR COLOR
146 | initializeOpenGLFunctions();
147 | glClearColor(0.5f, 0.0f, 0.0f, 1.0f);
148 |
149 | // CREATE THE VERTEX ARRAY OBJECT FOR FEEDING VERTICES TO OUR SHADER PROGRAMS
150 | vertexArrayObject.create();
151 | vertexArrayObject.bind();
152 |
153 | // CREATE VERTEX BUFFER TO HOLD CORNERS OF QUADRALATERAL
154 | quadVertexBuffer = QOpenGLBuffer(QOpenGLBuffer::VertexBuffer);
155 | quadVertexBuffer.create();
156 | quadVertexBuffer.setUsagePattern(QOpenGLBuffer::StaticDraw);
157 | if (quadVertexBuffer.bind()) {
158 | // ALLOCATE THE VERTEX BUFFER FOR HOLDING THE FOUR CORNERS OF A RECTANGLE
159 | quadVertexBuffer.allocate(16 * sizeof(float));
160 | float *buffer = (float *)quadVertexBuffer.map(QOpenGLBuffer::WriteOnly);
161 | if (buffer) {
162 | buffer[0] = -1.0;
163 | buffer[1] = -1.0;
164 | buffer[2] = 0.0;
165 | buffer[3] = 1.0;
166 | buffer[4] = +1.0;
167 | buffer[5] = -1.0;
168 | buffer[6] = 0.0;
169 | buffer[7] = 1.0;
170 | buffer[8] = +1.0;
171 | buffer[9] = +1.0;
172 | buffer[10] = 0.0;
173 | buffer[11] = 1.0;
174 | buffer[12] = -1.0;
175 | buffer[13] = +1.0;
176 | buffer[14] = 0.0;
177 | buffer[15] = 1.0;
178 | quadVertexBuffer.unmap();
179 | } else {
180 | qDebug() << QString("quadVertexBuffer not allocated.") << glGetError();
181 | }
182 | quadVertexBuffer.release();
183 | }
184 |
185 | // CREATE INDEX BUFFER TO ORDERINGS OF VERTICES FORMING POLYGON
186 | quadIndexBuffer = QOpenGLBuffer(QOpenGLBuffer::IndexBuffer);
187 | quadIndexBuffer.create();
188 | quadIndexBuffer.setUsagePattern(QOpenGLBuffer::StaticDraw);
189 | if (quadIndexBuffer.bind()) {
190 | quadIndexBuffer.allocate(6 * sizeof(unsigned int));
191 | unsigned int *indices = (unsigned int *)quadIndexBuffer.map(QOpenGLBuffer::WriteOnly);
192 | if (indices) {
193 | indices[0] = 0;
194 | indices[1] = 1;
195 | indices[2] = 2;
196 | indices[3] = 0;
197 | indices[4] = 2;
198 | indices[5] = 3;
199 | quadIndexBuffer.unmap();
200 | } else {
201 | qDebug() << QString("indiceBufferA buffer mapped from GPU.");
202 | }
203 | quadIndexBuffer.release();
204 | }
205 |
206 | // CREATE SHADER FOR SHOWING THE VIDEO NOT AVAILABLE IMAGE
207 | setlocale(LC_NUMERIC, "C");
208 | program.addShaderFromSourceFile(QOpenGLShader::Vertex, ":/shaders/Shaders/displayRGBVideo.vert");
209 | program.addShaderFromSourceFile(QOpenGLShader::Fragment, ":/shaders/Shaders/displayRGBVideo.frag");
210 | program.link();
211 | setlocale(LC_ALL, "");
212 | }
213 |
214 | /****************************************************************************/
215 | /****************************************************************************/
216 | /****************************************************************************/
217 | void LAUVideoGLWidget::resize(int w, int h)
218 | {
219 | // Get the Desktop Widget so that we can get information about multiple monitors connected to the system.
220 | QDesktopWidget *dkWidget = QApplication::desktop();
221 | QList screenList = QGuiApplication::screens();
222 | qreal devicePixelRatio = screenList[dkWidget->screenNumber(this)]->devicePixelRatio();
223 | localHeight = h * devicePixelRatio;
224 | localWidth = w * devicePixelRatio;
225 | }
226 |
227 | /****************************************************************************/
228 | /****************************************************************************/
229 | /****************************************************************************/
230 | void LAUVideoGLWidget::paint()
231 | {
232 | // SET THE VIEW PORT AND CLEAR THE SCREEN BUFFER
233 | glViewport(0, 0, localWidth, localHeight);
234 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
235 |
236 | // MAKE SURE WE HAVE A TEXTURE TO SHOW
237 | if (videoTexture) {
238 | if (program.bind()) {
239 | if (quadVertexBuffer.bind()) {
240 | if (quadIndexBuffer.bind()) {
241 | // SET THE ACTIVE TEXTURE ON THE GPU
242 | glActiveTexture(GL_TEXTURE0);
243 | videoTexture->bind();
244 | program.setUniformValue("qt_texture", 0);
245 | #ifdef Q_OS_WIN
246 | program.setUniformValue("qt_flip", false);
247 | #else
248 | program.setUniformValue("qt_flip", true);
249 | #endif
250 | // TELL OPENGL PROGRAMMABLE PIPELINE HOW TO LOCATE VERTEX POSITION DATA
251 | program.setAttributeBuffer("qt_vertex", GL_FLOAT, 0, 4, 4 * sizeof(float));
252 | program.enableAttributeArray("qt_vertex");
253 |
254 | glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
255 |
256 | quadIndexBuffer.release();
257 | }
258 | quadVertexBuffer.release();
259 | }
260 | program.release();
261 | }
262 | }
263 | }
264 |
--------------------------------------------------------------------------------
/lauvideoglwidget.h:
--------------------------------------------------------------------------------
1 | #ifndef LAUVIDEOGLWIDGET_H
2 | #define LAUVIDEOGLWIDGET_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include
14 | #include
15 | #include
16 | #include
17 | #include
18 | #include
19 | #include
20 |
21 | #ifdef Q_OS_WIN
22 | #include
23 | #include
24 | #include
25 | #include
26 | #include
27 |
28 | using namespace cv;
29 | using namespace std;
30 | #endif
31 |
32 | /****************************************************************************/
33 | /****************************************************************************/
34 | /****************************************************************************/
35 | class LAUVideoGLWidget : public QOpenGLWidget, protected QOpenGLFunctions
36 | {
37 | Q_OBJECT
38 |
39 | public:
40 | explicit LAUVideoGLWidget(QWidget *parent = NULL) : QOpenGLWidget(parent), videoTexture(NULL), counter(0) { ; }
41 | ~LAUVideoGLWidget();
42 |
43 | virtual bool isValid() const
44 | {
45 | return (wasInitialized());
46 | }
47 |
48 | bool wasInitialized() const
49 | {
50 | return (vertexArrayObject.isCreated());
51 | }
52 |
53 | #ifdef Q_OS_WIN
54 | void setVideoRecorder(cv::VideoWriter **handle)
55 | {
56 | recorder = handle;
57 | }
58 | #endif
59 |
60 | void setFrame(const QVideoFrame &frame);
61 | void setFrame(QImage frame);
62 |
63 | virtual QImage grabImage()
64 | {
65 | if (videoTexture) {
66 | makeCurrent();
67 | QImage image(videoTexture->width(), videoTexture->height(), QImage::Format_RGBA8888);
68 | videoTexture->bind();
69 | glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, image.bits());
70 | videoTexture->release();
71 | return (image);
72 | }
73 | return (QImage());
74 | }
75 |
76 | virtual void process() { ; }
77 | virtual void initialize();
78 | virtual void resize(int w, int h);
79 | virtual void paint();
80 |
81 | protected:
82 | void initializeGL()
83 | {
84 | initialize();
85 | }
86 |
87 | void resizeGL(int w, int h)
88 | {
89 | resize(w, h);
90 | }
91 |
92 | void paintGL()
93 | {
94 | paint();
95 | }
96 |
97 | #ifdef Q_OS_WIN
98 | cv::VideoWriter **recorder;
99 | #endif
100 |
101 | QOpenGLVertexArrayObject vertexArrayObject;
102 | QOpenGLBuffer quadVertexBuffer, quadIndexBuffer;
103 | QOpenGLShaderProgram program;
104 | QOpenGLTexture *videoTexture;
105 |
106 | int localWidth, localHeight;
107 | qreal devicePixelRatio;
108 |
109 | private:
110 | int counter;
111 | QTime time;
112 | };
113 |
114 | #endif // LAUVIDEOGLWIDGET_H
115 |
--------------------------------------------------------------------------------
/lauvideosurface.cpp:
--------------------------------------------------------------------------------
1 | #include "lauvideosurface.h"
2 | #include
3 |
4 | /****************************************************************************/
5 | /****************************************************************************/
6 | /****************************************************************************/
7 | QList LAUVideoSurface::supportedPixelFormats(QAbstractVideoBuffer::HandleType type) const
8 | {
9 | QList list;
10 | if (type == QAbstractVideoBuffer::NoHandle) {
11 | list << QVideoFrame::Format_RGB32;
12 | list << QVideoFrame::Format_ARGB32;
13 | list << QVideoFrame::Format_ARGB32_Premultiplied;
14 | list << QVideoFrame::Format_RGB555;
15 | }
16 | return (list);
17 | }
18 |
19 | /****************************************************************************/
20 | /****************************************************************************/
21 | /****************************************************************************/
22 | QVideoSurfaceFormat LAUVideoSurface::nearestFormat(const QVideoSurfaceFormat &format) const
23 | {
24 | return (format);
25 | }
26 |
27 | /****************************************************************************/
28 | /****************************************************************************/
29 | /****************************************************************************/
30 | bool LAUVideoSurface::isFormatSupported(const QVideoSurfaceFormat &format) const
31 | {
32 | if (QVideoFrame::imageFormatFromPixelFormat(format.pixelFormat()) == QImage::Format_Invalid) {
33 | return (false);
34 | } else if (format.frameSize().isEmpty()) {
35 | return (false);
36 | } else if (format.handleType() != QAbstractVideoBuffer::NoHandle) {
37 | return (false);
38 | }
39 | return (true);
40 | }
41 |
42 | /****************************************************************************/
43 | /****************************************************************************/
44 | /****************************************************************************/
45 | bool LAUVideoSurface::present(const QVideoFrame &frame)
46 | {
47 | // SEND THE IN-COMING VIDEO TO THE LABEL WIDGET, IF IT EXISTS
48 | if (labelWidget) {
49 | labelWidget->setFrame(frame);
50 | }
51 | return (true);
52 | }
53 |
54 | /****************************************************************************/
55 | /****************************************************************************/
56 | /****************************************************************************/
57 | bool LAUVideoSurface::start(const QVideoSurfaceFormat &format)
58 | {
59 | return (QAbstractVideoSurface::start(format));
60 | }
61 |
62 | /****************************************************************************/
63 | /****************************************************************************/
64 | /****************************************************************************/
65 | void LAUVideoSurface::stop()
66 | {
67 | QAbstractVideoSurface::stop();
68 | }
69 |
--------------------------------------------------------------------------------
/lauvideosurface.h:
--------------------------------------------------------------------------------
1 | #ifndef LAUVIDEOSURFACE_H
2 | #define LAUVIDEOSURFACE_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 |
12 | #include "lauvideoglwidget.h"
13 |
14 | /****************************************************************************/
15 | /****************************************************************************/
16 | /****************************************************************************/
17 | class LAUVideoSurface : public QAbstractVideoSurface
18 | {
19 | Q_OBJECT
20 |
21 | public:
22 | explicit LAUVideoSurface(QObject *parent = NULL) : QAbstractVideoSurface(parent), labelWidget(NULL) { ; }
23 |
24 | LAUVideoGLWidget *label() const
25 | {
26 | return (labelWidget);
27 | }
28 |
29 | void setLabel(LAUVideoGLWidget *lbl)
30 | {
31 | labelWidget = lbl;
32 | }
33 |
34 | QVideoSurfaceFormat nearestFormat(const QVideoSurfaceFormat &format) const;
35 | bool isFormatSupported(const QVideoSurfaceFormat &format) const;
36 | bool present(const QVideoFrame &frame);
37 | bool start(const QVideoSurfaceFormat &format);
38 | void stop();
39 |
40 | QList supportedPixelFormats(QAbstractVideoBuffer::HandleType type = QAbstractVideoBuffer::NoHandle) const;
41 |
42 | private:
43 | LAUVideoGLWidget *labelWidget;
44 | };
45 |
46 | #endif // LAUVIDEOSURFACE_H
47 |
--------------------------------------------------------------------------------
/lauwebcameracapture.qrc:
--------------------------------------------------------------------------------
1 |
2 |
3 | Shaders/filterHarrisCornersE.vert
4 | Shaders/filterHarrisCornersE.frag
5 | Shaders/filterHarrisCornersD.vert
6 | Shaders/filterHarrisCornersD.frag
7 | Shaders/filterHarrisCornersC.vert
8 | Shaders/filterHarrisCornersC.frag
9 | Shaders/filterHarrisCornersB.vert
10 | Shaders/filterHarrisCornersB.frag
11 | Shaders/filterHarrisCornersA.vert
12 | Shaders/filterHarrisCornersA.frag
13 | Shaders/displayRGBVideo.vert
14 | Shaders/displayRGBVideo.frag
15 | Shaders/filterRandomMappingB.vert
16 | Shaders/filterRandomMappingB.frag
17 | Shaders/filterRandomMappingA.frag
18 | Shaders/filterRandomMappingA.vert
19 | Shaders/filterBinaryMedian.vert
20 | Shaders/filterBinaryMedian.frag
21 | Shaders/filterAdaptiveThreshold.vert
22 | Shaders/filterAdaptiveThreshold.frag
23 | Shaders/filterYGaussian.vert
24 | Shaders/filterYGaussian.frag
25 | Shaders/filterXGaussian.vert
26 | Shaders/filterXGaussian.frag
27 | Shaders/filterRGBAtoGray.frag
28 | Shaders/filterRGBAtoGray.vert
29 | Shaders/filterSobelEdge.frag
30 | Shaders/filterSobelEdge.vert
31 | Shaders/filterBinaryLookUpTable.vert
32 | Shaders/filterBinaryLookUpTable.frag
33 | Shaders/filterMaxRGBA.vert
34 | Shaders/filterMinRGBA.frag
35 | Shaders/filterMinRGBA.vert
36 | Shaders/filterMaxRGBA.frag
37 | Shaders/displayCalTag.vert
38 | Shaders/displayCalTag.frag
39 | Shaders/calTagXYZW.frag
40 | Shaders/calTagXYZW.vert
41 | Shaders/filterDrawFace.vert
42 | Shaders/filterDrawFace.frag
43 | Shaders/filterSkewBlur.vert
44 | Shaders/filterSkewBlur.frag
45 | Shaders/equalizeHistogramRGBAFilter.frag
46 | Shaders/equalizeHistogramRGBAFilter.vert
47 | Shaders/rgbaToHistogramFilter.frag
48 | Shaders/rgbaToHistogramFilter.geom
49 | Shaders/rgbaToHistogramFilter.vert
50 |
51 |
52 |
--------------------------------------------------------------------------------
/lauwebcamerawidget.cpp:
--------------------------------------------------------------------------------
1 | #include "lauwebcamerawidget.h"
2 | #include "lautiredetectorglfilter.h"
3 | #include "laurandomizepixelsglwidget.h"
4 | #include "lausobeledgedetectorglwidget.h"
5 | #include "lauharriscornerdetectorglwidget.h"
6 | #include "lauhistogramequalizationglwidget.h"
7 | #include "laufacialfeaturedetectorglwidget.h"
8 |
9 | #include
10 | #include
11 | #include
12 | #include
13 |
14 | QUrl LAUWebCameraWidget::localURL = QUrl::fromLocalFile(QString("%1/videofile.mp4").arg(QStandardPaths::writableLocation(QStandardPaths::TempLocation)));
15 |
16 | /****************************************************************************/
17 | /****************************************************************************/
18 | /****************************************************************************/
19 | LAUWebCameraWidget::LAUWebCameraWidget(QCamera::CaptureMode capture, QWidget *parent) : QWidget(parent), mode(capture), thread(NULL), camera(NULL), recorder(NULL), imageCapture(NULL), surface(NULL)
20 | {
21 | // SEE IF THERE IS A LEFTOVER VIDEO FILE FROM A PREVIOUS RUN OF THE SOFTWARE
22 | //saveVideoFile();
23 |
24 | this->setLayout(new QVBoxLayout());
25 | this->layout()->setContentsMargins(6, 6, 6, 6);
26 |
27 | // ASK THE USER WHAT FILTER THEY WANT TO IMPLEMENT
28 | QStringList items;
29 | items << QString("Facial Features");
30 | items << QString("Harris Corners");
31 | items << QString("Randomized Pixels");
32 | items << QString("Histogram Equalize");
33 | items << QString("Tire Detector");
34 | items << QString("Raw Video");
35 | items << QString("Sobel Edges");
36 |
37 | bool ok = false;
38 | QString string = QInputDialog::getItem(nullptr, QString("Web Camera Widget"), QString("Select video filter"), items, 3, false, &ok);
39 |
40 | if (ok) {
41 | if (string == QString("Raw Video")) {
42 | label = new LAUVideoGLWidget();
43 | } else if (string == QString("Facial Features")) {
44 | label = new LAUFacialFeatureDetectorGLWidget();
45 | } else if (string == QString("Tire Detector")) {
46 | label = new LAUTireDetectorGLWidget();
47 | } else if (string == QString("Harris Corners")) {
48 | label = new LAUHarrisCornerDetectorGLWidget();
49 | } else if (string == QString("Histogram Equalize")){
50 | label = new LAUHistogramEqualizationGLWidget();
51 | } else if (string == QString("Randomized Pixels")) {
52 | label = new LAURandomizePixelsGLWidget();
53 | } else if (string == QString("Sobel Edges")) {
54 | label = new LAUSobelEdgeDetectorGLWidget();
55 | }
56 | } else {
57 | label = new LAUVideoGLWidget();
58 | }
59 | #ifdef Q_OS_WIN
60 | label->setVideoRecorder(&recorder);
61 | #endif
62 | label->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
63 | this->layout()->addWidget(label);
64 |
65 | QStringList strings;
66 | QList cameras = QCameraInfo::availableCameras();
67 | for (int n = 0; n < cameras.count(); n++) {
68 | strings << cameras.at(n).description();
69 | }
70 |
71 | if (strings.count() > 1) {
72 | bool okay = false;
73 | QString string = QInputDialog::getItem(this, QString("Select Camera"), QString("Select input device"), strings, 0, false, &okay);
74 | if (okay) {
75 | int n = strings.indexOf(string);
76 | camera = new QCamera(cameras.at(n));
77 | }
78 | } else if (strings.count() == 1) {
79 | camera = new QCamera(cameras.first());
80 | } else {
81 | QMessageBox::warning(this, QString("LAUWebCam"), QString("No camera available."));
82 | }
83 |
84 | if (camera) {
85 | surface = new LAUVideoSurface();
86 | surface->setLabel(label);
87 |
88 | QCameraViewfinderSettings set = camera->viewfinderSettings();
89 |
90 | //QList resolutions = camera->supportedViewfinderResolutions(set);
91 | //QList formats = camera->supportedViewfinderPixelFormats(set);
92 | //QList ranges = camera->supportedViewfinderFrameRateRanges(set);
93 |
94 | set.setResolution(LAUWEBCAMERAWIDGETWIDTH, LAUWEBCAMERAWIDGETHEIGHT);
95 | set.setMaximumFrameRate(LAUWEBCAMERAWIDGETFPS);
96 | set.setMinimumFrameRate(LAUWEBCAMERAWIDGETFPS);
97 | //set.setPixelFormat(QVideoFrame::Format_ARGB32);
98 |
99 | camera->setViewfinderSettings(set);
100 | camera->setViewfinder(surface);
101 | camera->setCaptureMode(mode);
102 |
103 | if (mode == QCamera::CaptureStillImage) {
104 | imageCapture = new QCameraImageCapture(camera);
105 | imageCapture->setCaptureDestination(QCameraImageCapture::CaptureToBuffer);
106 | connect(imageCapture, SIGNAL(imageCaptured(int, QImage)), this, SLOT(onImageAvailable(int, QImage)));
107 | }
108 |
109 | // CREATE A NEW THREAD TO HOST THE CAMERA
110 | thread = new QThread();
111 | camera->moveToThread(thread);
112 | thread->start();
113 | }
114 | label->setMinimumSize(qMin(LAUWEBCAMERAWIDGETWIDTH, 640), qMin(LAUWEBCAMERAWIDGETHEIGHT, 480));
115 | }
116 |
117 | /****************************************************************************/
118 | /****************************************************************************/
119 | /****************************************************************************/
120 | LAUWebCameraWidget::~LAUWebCameraWidget()
121 | {
122 | if (thread) {
123 | thread->quit();
124 | while (thread->isRunning()) {
125 | qApp->processEvents();
126 | }
127 | delete thread;
128 | }
129 |
130 | if (surface) {
131 | surface->stop();
132 | delete surface;
133 | }
134 |
135 | if (camera) {
136 | camera->stop();
137 | delete camera;
138 | }
139 |
140 | // DELETE TEMPORARY VIDEO RECORDING FILE IF IT EXISTS
141 | if (QFile::exists(localURL.toLocalFile())) {
142 | QFile::remove(localURL.toLocalFile());
143 | }
144 | }
145 |
146 | /****************************************************************************/
147 | /****************************************************************************/
148 | /****************************************************************************/
149 | void LAUWebCameraWidget::onCapture()
150 | {
151 | if (imageCapture) {
152 | // WAIT HERE UNTIL CAMERA IS READY TO CAPTURE
153 | while (imageCapture->isReadyForCapture() == false) {
154 | qApp->processEvents();
155 | }
156 | imageCapture->capture();
157 | if (imageCapture->error() != QCameraImageCapture::NoError) {
158 | qDebug() << imageCapture->errorString();
159 | }
160 | }
161 | }
162 |
163 | /****************************************************************************/
164 | /****************************************************************************/
165 | /****************************************************************************/
166 | void LAUWebCameraWidget::onTriggerVideo(bool state)
167 | {
168 | qDebug() << "Trigger video recording:" << state;
169 |
170 | if (recorder) {
171 | #ifndef Q_OS_WIN
172 | // GET OUTPUT LOCATION
173 | localURL = recorder->outputLocation();
174 |
175 | // STOP RECORDING AND DELETE THE RECORDER
176 | recorder->stop();
177 | #endif
178 | // DELETE THE RECORDER
179 | #ifdef Q_OS_WIN
180 | delete recorder;
181 | #else
182 | recorder->deleteLater();
183 | #endif
184 | recorder = nullptr;
185 |
186 | // LET THE USER SAVE THE VIDEO FILE TO DISK
187 | saveVideoFile();
188 | } else {
189 | #ifdef Q_OS_WIN
190 | recorder = new cv::VideoWriter();
191 | if (recorder->open(localURL.toString().toStdString(), cv::VideoWriter::fourcc('M', 'J', 'P', 'G'), 10.0, cv::Size(LAUWEBCAMERAWIDGETWIDTH, LAUWEBCAMERAWIDGETHEIGHT), true)) {
192 | qDebug() << "Recording to file:" << localURL.toString();
193 | }
194 | #else
195 | // CREATE NEW RECORDER
196 | recorder = new QMediaRecorder(camera);
197 |
198 | // SET AUDIO PARAMETERS
199 | QAudioEncoderSettings audioSettings;
200 | audioSettings.setCodec("audio/amr");
201 | audioSettings.setQuality(QMultimedia::HighQuality);
202 | recorder->setAudioSettings(audioSettings);
203 |
204 | // SET THE SINK
205 | recorder->setOutputLocation(localURL);
206 | recorder->record();
207 | #endif
208 | }
209 | }
210 |
211 | /****************************************************************************/
212 | /****************************************************************************/
213 | /****************************************************************************/
214 | void LAUWebCameraWidget::onImageAvailable(int id, QImage image)
215 | {
216 | Q_UNUSED(id);
217 |
218 | QLabel *label = new QLabel();
219 | label->setPixmap(QPixmap::fromImage(image));
220 | label->show();
221 | }
222 |
223 | /****************************************************************************/
224 | /****************************************************************************/
225 | /****************************************************************************/
226 | void LAUWebCameraWidget::grabImage()
227 | {
228 | if (label) {
229 | QImage image = label->grabImage();
230 | if (image.isNull() == false) {
231 | QSettings settings;
232 | QString directory = settings.value("LAUWebCameraWidget::lastUsedDirectory", QStandardPaths::writableLocation(QStandardPaths::DocumentsLocation)).toString();
233 | QString filename = QFileDialog::getSaveFileName(0, QString("Save image to disk (*.tif)"), directory, QString("*.tif *.tiff"));
234 | if (filename.isEmpty() == false) {
235 | settings.setValue("LAUWebCameraWidget::lastUsedDirectory", QFileInfo(filename).absolutePath());
236 | if (filename.toLower().endsWith(".tif") == false && filename.toLower().endsWith(".tiff")) {
237 | filename.append(".tif");
238 | }
239 | } else {
240 | return;
241 | }
242 | image.save(filename, "TIFF");
243 | }
244 | }
245 | }
246 |
247 | /****************************************************************************/
248 | /****************************************************************************/
249 | /****************************************************************************/
250 | bool LAUWebCameraWidget::saveVideoFile()
251 | {
252 | // MAKE SURE TEMPORARY VIDEO FILE EXISTS
253 | if (QFile::exists(localURL.toLocalFile()) == false) {
254 | return (false);
255 | }
256 |
257 | // GET THE LAST USED DIRECTORY FROM SETTINGS
258 | QSettings settings;
259 | QString directory = settings.value("LAUWebCameraWidget::lastUsedDirectory", QStandardPaths::writableLocation(QStandardPaths::DocumentsLocation)).toString();
260 | if (QDir().exists(directory) == false) {
261 | directory = QStandardPaths::writableLocation(QStandardPaths::DocumentsLocation);
262 | }
263 |
264 | QString filename;
265 | for (int counter = 0; counter < 1000; counter++) {
266 | if (counter < 10) {
267 | filename = QString("%1/intubation00%2.mp4").arg(directory).arg(counter);
268 | } else if (counter < 100) {
269 | filename = QString("%1/intubation0%2.mp4").arg(directory).arg(counter);
270 | } else {
271 | filename = QString("%1/intubation%2.mp4").arg(directory).arg(counter);
272 | }
273 |
274 | if (QFile::exists(filename) == false) {
275 | break;
276 | }
277 | }
278 |
279 | while (1) {
280 | // COPY TO A USER SPECIFIED FILE
281 | filename = QFileDialog::getSaveFileName(nullptr, QString("Save video to disk (*.mp4)"), filename, QString("*.mp4"));
282 | if (filename.isEmpty() == false) {
283 | if (filename.toLower().endsWith(".mp4") == false) {
284 | filename.append(".mp4");
285 | }
286 | settings.setValue("LAUWebCameraWidget::lastUsedDirectory", QFileInfo(filename).absolutePath());
287 |
288 | // RENAME THE TEMPORARY RECORDING TO A PERMANENT FILE
289 | return (QFile::rename(localURL.toLocalFile(), filename));
290 | }
291 |
292 | // GIVE THE USER ANOTHER CHANCE
293 | if (QMessageBox::warning(this, QString("Webcam Recorder"), QString("You are about to discard the recording and lose the data forever.\n\nDo you want to do this?"), QMessageBox::Yes | QMessageBox::No, QMessageBox::No) == QMessageBox::Yes) {
294 | if (QMessageBox::warning(this, QString("Webcam Recorder"), QString("Are you sure?"), QMessageBox::Yes | QMessageBox::No, QMessageBox::No) == QMessageBox::Yes) {
295 | return (false);
296 | }
297 | }
298 | }
299 | }
300 |
--------------------------------------------------------------------------------
/lauwebcamerawidget.h:
--------------------------------------------------------------------------------
1 | #ifndef LAUWEBCAMERAWIDGET_H
2 | #define LAUWEBCAMERAWIDGET_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include
14 | #include
15 | #include
16 | #include
17 | #include
18 | #include
19 | #include
20 | #include
21 | #include
22 | #include
23 |
24 | #include "lauvideosurface.h"
25 |
26 | #define LAUWEBCAMERAWIDGETWIDTH 640
27 | #define LAUWEBCAMERAWIDGETHEIGHT 480
28 | #define LAUWEBCAMERAWIDGETFPS 30.0f
29 |
30 | /****************************************************************************/
31 | /****************************************************************************/
32 | /****************************************************************************/
33 | class LAUWebCameraWidget : public QWidget
34 | {
35 | Q_OBJECT
36 |
37 | public:
38 | LAUWebCameraWidget(QCamera::CaptureMode capture, QWidget *parent = 0);
39 | ~LAUWebCameraWidget();
40 |
41 | bool isValid() const
42 | {
43 | return (camera);
44 | }
45 |
46 | bool isNull() const
47 | {
48 | return (!isValid());
49 | }
50 |
51 | void grabImage();
52 | bool saveVideoFile();
53 |
54 | public slots:
55 | void onCapture();
56 | void onImageAvailable(int id, QImage image);
57 | void onTriggerVideo(bool state);
58 |
59 | protected:
60 | void showEvent(QShowEvent *)
61 | {
62 | if (camera) {
63 | camera->start();
64 | QTimer::singleShot(1000, this, SLOT(onCapture()));
65 | }
66 | }
67 |
68 | private:
69 | QCamera::CaptureMode mode;
70 | LAUVideoGLWidget *label;
71 | QThread *thread;
72 | QCamera *camera;
73 | #ifdef Q_OS_WIN
74 | cv::VideoWriter *recorder;
75 | #else
76 | QMediaRecorder *recorder;
77 | #endif
78 | QCameraImageCapture *imageCapture;
79 | LAUVideoSurface *surface;
80 |
81 | static QUrl localURL;
82 | };
83 |
84 | /****************************************************************************/
85 | /****************************************************************************/
86 | /****************************************************************************/
87 | class LAUWebCameraDialog : public QDialog
88 | {
89 | Q_OBJECT
90 |
91 | public:
92 | explicit LAUWebCameraDialog(QCamera::CaptureMode capture, QWidget *parent = 0) : QDialog(parent), buttonBox(nullptr)
93 | {
94 | // CREATE A WIDGET TO WRAP AROUND
95 | widget = new LAUWebCameraWidget(capture);
96 |
97 | // SET THE LAYOUT AND DISPLAY OUR WIDGET INSIDE OF IT
98 | this->setWindowTitle(QString("Video Recorder"));
99 | this->setLayout(new QVBoxLayout());
100 | this->layout()->setContentsMargins(6, 6, 6, 6);
101 | this->layout()->addWidget(widget);
102 |
103 | buttonBox = new QDialogButtonBox(QDialogButtonBox::Ok | QDialogButtonBox::Cancel);
104 | connect(buttonBox->button(QDialogButtonBox::Ok), SIGNAL(clicked()), this, SLOT(accept()));
105 | connect(buttonBox->button(QDialogButtonBox::Cancel), SIGNAL(clicked()), this, SLOT(reject()));
106 |
107 | buttonBox->button(QDialogButtonBox::Ok)->setText(QString("Snapshot"));
108 | buttonBox->button(QDialogButtonBox::Cancel)->setText(QString("Quit"));
109 |
110 | QPushButton *button = buttonBox->addButton(QString("Record"), QDialogButtonBox::ActionRole);
111 | button->setCheckable(true);
112 | button->setChecked(false);
113 | connect(button, SIGNAL(clicked(bool)), this, SLOT(onTriggerVideo(bool)));
114 |
115 | this->layout()->addWidget(buttonBox);
116 | }
117 |
118 | bool isValid() const
119 | {
120 | return (widget->isValid());
121 | }
122 |
123 | bool isNull() const
124 | {
125 | return (widget->isNull());
126 | }
127 |
128 | public slots:
129 | void onTriggerVideo(bool state)
130 | {
131 | if (buttonBox) {
132 | buttonBox->button(QDialogButtonBox::Ok)->setDisabled(state);
133 | buttonBox->button(QDialogButtonBox::Cancel)->setDisabled(state);
134 | }
135 |
136 | if (widget) {
137 | widget->onTriggerVideo(state);
138 | }
139 | }
140 |
141 | protected:
142 | void accept()
143 | {
144 | widget->grabImage();
145 | }
146 |
147 | private:
148 | LAUWebCameraWidget *widget;
149 | QDialogButtonBox *buttonBox;
150 | };
151 |
152 | #endif // LAUWEBCAMERAWIDGET_H
153 |
--------------------------------------------------------------------------------
/main.cpp:
--------------------------------------------------------------------------------
1 | #include "lauwebcamerawidget.h"
2 |
3 | #include
4 | #include
5 |
6 | int main(int argc, char *argv[])
7 | {
8 | QSurfaceFormat format;
9 | format.setDepthBufferSize(10);
10 | format.setMajorVersion(4);
11 | format.setMinorVersion(1);
12 | format.setProfile(QSurfaceFormat::CoreProfile);
13 | format.setRenderableType(QSurfaceFormat::OpenGL);
14 | QSurfaceFormat::setDefaultFormat(format);
15 |
16 | #ifndef Q_OS_LINUX
17 | QApplication::setAttribute(Qt::AA_EnableHighDpiScaling);
18 | #endif
19 | QApplication::setAttribute(Qt::AA_UseDesktopOpenGL);
20 | QApplication::setAttribute(Qt::AA_ShareOpenGLContexts);
21 |
22 | QApplication a(argc, argv);
23 | a.setOrganizationName(QString("Lau Consulting Inc"));
24 | a.setOrganizationDomain(QString("drhalftone.com"));
25 | a.setApplicationName(QString("LAUWebCamera"));
26 |
27 | LAUWebCameraDialog w(QCamera::CaptureVideo);
28 |
29 | if (w.isValid()) {
30 | return w.exec();
31 | }
32 | return (0);
33 | }
34 |
--------------------------------------------------------------------------------