├── LICENSE
├── README.md
└── src
└── qt_gl_gst
├── alphamasks
├── ed_was_ere.jpg
├── fade.jpg
├── hole.jpg
├── ripple.jpg
└── target.jpg
├── applogger.cpp
├── applogger.h
├── asyncwaitingqueue.h
├── controlsform.cpp
├── controlsform.h
├── controlsform.ui
├── glpowervrwidget.cpp
├── glpowervrwidget.h
├── glwidget.cpp
├── glwidget.h
├── gstpipeline.cpp
├── gstpipeline.h
├── main.cpp
├── mainwindow.cpp
├── mainwindow.h
├── model.cpp
├── model.h
├── pipeline.cpp
├── pipeline.h
├── qt_gl_gst.pro
├── qt_gl_gst_omap3530.pro
├── run_with_1_vid_omap3.sh
├── run_with_3_vids.sh
├── run_with_5_vids.sh
├── shaderlists.cpp
├── shaderlists.h
├── shaders
├── alphamask-imgstream.frag
├── alphamask-recttex.frag
├── alphamask.frag
├── alphamask.vert
├── brick.frag
├── brick.vert
├── colourhilight.frag
├── colourhilightswap.frag
├── noeffect.frag
├── noeffect.vert
├── vidlighting.frag
├── vidlighting.vert
├── yuv2rgbI420-normalisedtexcoords-recttex.frag
├── yuv2rgbI420-normalisedtexcoords.frag
├── yuv2rgbI420-recttex.frag
├── yuv2rgbI420.frag
├── yuv2rgbUYVY-imgstream.frag
├── yuv2rgbUYVY-normalisedtexcoords-imgstream.frag
├── yuv2rgbUYVY-normalisedtexcoords.frag
└── yuv2rgbUYVY.frag
├── tigstpipeline.cpp
├── tigstpipeline.h
├── yuvdebugwindow.cpp
└── yuvdebugwindow.h
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 2, June 1991
3 |
4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
6 | Everyone is permitted to copy and distribute verbatim copies
7 | of this license document, but changing it is not allowed.
8 |
9 | Preamble
10 |
11 | The licenses for most software are designed to take away your
12 | freedom to share and change it. By contrast, the GNU General Public
13 | License is intended to guarantee your freedom to share and change free
14 | software--to make sure the software is free for all its users. This
15 | General Public License applies to most of the Free Software
16 | Foundation's software and to any other program whose authors commit to
17 | using it. (Some other Free Software Foundation software is covered by
18 | the GNU Lesser General Public License instead.) You can apply it to
19 | your programs, too.
20 |
21 | When we speak of free software, we are referring to freedom, not
22 | price. Our General Public Licenses are designed to make sure that you
23 | have the freedom to distribute copies of free software (and charge for
24 | this service if you wish), that you receive source code or can get it
25 | if you want it, that you can change the software or use pieces of it
26 | in new free programs; and that you know you can do these things.
27 |
28 | To protect your rights, we need to make restrictions that forbid
29 | anyone to deny you these rights or to ask you to surrender the rights.
30 | These restrictions translate to certain responsibilities for you if you
31 | distribute copies of the software, or if you modify it.
32 |
33 | For example, if you distribute copies of such a program, whether
34 | gratis or for a fee, you must give the recipients all the rights that
35 | you have. You must make sure that they, too, receive or can get the
36 | source code. And you must show them these terms so they know their
37 | rights.
38 |
39 | We protect your rights with two steps: (1) copyright the software, and
40 | (2) offer you this license which gives you legal permission to copy,
41 | distribute and/or modify the software.
42 |
43 | Also, for each author's protection and ours, we want to make certain
44 | that everyone understands that there is no warranty for this free
45 | software. If the software is modified by someone else and passed on, we
46 | want its recipients to know that what they have is not the original, so
47 | that any problems introduced by others will not reflect on the original
48 | authors' reputations.
49 |
50 | Finally, any free program is threatened constantly by software
51 | patents. We wish to avoid the danger that redistributors of a free
52 | program will individually obtain patent licenses, in effect making the
53 | program proprietary. To prevent this, we have made it clear that any
54 | patent must be licensed for everyone's free use or not licensed at all.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | GNU GENERAL PUBLIC LICENSE
60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
61 |
62 | 0. This License applies to any program or other work which contains
63 | a notice placed by the copyright holder saying it may be distributed
64 | under the terms of this General Public License. The "Program", below,
65 | refers to any such program or work, and a "work based on the Program"
66 | means either the Program or any derivative work under copyright law:
67 | that is to say, a work containing the Program or a portion of it,
68 | either verbatim or with modifications and/or translated into another
69 | language. (Hereinafter, translation is included without limitation in
70 | the term "modification".) Each licensee is addressed as "you".
71 |
72 | Activities other than copying, distribution and modification are not
73 | covered by this License; they are outside its scope. The act of
74 | running the Program is not restricted, and the output from the Program
75 | is covered only if its contents constitute a work based on the
76 | Program (independent of having been made by running the Program).
77 | Whether that is true depends on what the Program does.
78 |
79 | 1. You may copy and distribute verbatim copies of the Program's
80 | source code as you receive it, in any medium, provided that you
81 | conspicuously and appropriately publish on each copy an appropriate
82 | copyright notice and disclaimer of warranty; keep intact all the
83 | notices that refer to this License and to the absence of any warranty;
84 | and give any other recipients of the Program a copy of this License
85 | along with the Program.
86 |
87 | You may charge a fee for the physical act of transferring a copy, and
88 | you may at your option offer warranty protection in exchange for a fee.
89 |
90 | 2. You may modify your copy or copies of the Program or any portion
91 | of it, thus forming a work based on the Program, and copy and
92 | distribute such modifications or work under the terms of Section 1
93 | above, provided that you also meet all of these conditions:
94 |
95 | a) You must cause the modified files to carry prominent notices
96 | stating that you changed the files and the date of any change.
97 |
98 | b) You must cause any work that you distribute or publish, that in
99 | whole or in part contains or is derived from the Program or any
100 | part thereof, to be licensed as a whole at no charge to all third
101 | parties under the terms of this License.
102 |
103 | c) If the modified program normally reads commands interactively
104 | when run, you must cause it, when started running for such
105 | interactive use in the most ordinary way, to print or display an
106 | announcement including an appropriate copyright notice and a
107 | notice that there is no warranty (or else, saying that you provide
108 | a warranty) and that users may redistribute the program under
109 | these conditions, and telling the user how to view a copy of this
110 | License. (Exception: if the Program itself is interactive but
111 | does not normally print such an announcement, your work based on
112 | the Program is not required to print an announcement.)
113 |
114 | These requirements apply to the modified work as a whole. If
115 | identifiable sections of that work are not derived from the Program,
116 | and can be reasonably considered independent and separate works in
117 | themselves, then this License, and its terms, do not apply to those
118 | sections when you distribute them as separate works. But when you
119 | distribute the same sections as part of a whole which is a work based
120 | on the Program, the distribution of the whole must be on the terms of
121 | this License, whose permissions for other licensees extend to the
122 | entire whole, and thus to each and every part regardless of who wrote it.
123 |
124 | Thus, it is not the intent of this section to claim rights or contest
125 | your rights to work written entirely by you; rather, the intent is to
126 | exercise the right to control the distribution of derivative or
127 | collective works based on the Program.
128 |
129 | In addition, mere aggregation of another work not based on the Program
130 | with the Program (or with a work based on the Program) on a volume of
131 | a storage or distribution medium does not bring the other work under
132 | the scope of this License.
133 |
134 | 3. You may copy and distribute the Program (or a work based on it,
135 | under Section 2) in object code or executable form under the terms of
136 | Sections 1 and 2 above provided that you also do one of the following:
137 |
138 | a) Accompany it with the complete corresponding machine-readable
139 | source code, which must be distributed under the terms of Sections
140 | 1 and 2 above on a medium customarily used for software interchange; or,
141 |
142 | b) Accompany it with a written offer, valid for at least three
143 | years, to give any third party, for a charge no more than your
144 | cost of physically performing source distribution, a complete
145 | machine-readable copy of the corresponding source code, to be
146 | distributed under the terms of Sections 1 and 2 above on a medium
147 | customarily used for software interchange; or,
148 |
149 | c) Accompany it with the information you received as to the offer
150 | to distribute corresponding source code. (This alternative is
151 | allowed only for noncommercial distribution and only if you
152 | received the program in object code or executable form with such
153 | an offer, in accord with Subsection b above.)
154 |
155 | The source code for a work means the preferred form of the work for
156 | making modifications to it. For an executable work, complete source
157 | code means all the source code for all modules it contains, plus any
158 | associated interface definition files, plus the scripts used to
159 | control compilation and installation of the executable. However, as a
160 | special exception, the source code distributed need not include
161 | anything that is normally distributed (in either source or binary
162 | form) with the major components (compiler, kernel, and so on) of the
163 | operating system on which the executable runs, unless that component
164 | itself accompanies the executable.
165 |
166 | If distribution of executable or object code is made by offering
167 | access to copy from a designated place, then offering equivalent
168 | access to copy the source code from the same place counts as
169 | distribution of the source code, even though third parties are not
170 | compelled to copy the source along with the object code.
171 |
172 | 4. You may not copy, modify, sublicense, or distribute the Program
173 | except as expressly provided under this License. Any attempt
174 | otherwise to copy, modify, sublicense or distribute the Program is
175 | void, and will automatically terminate your rights under this License.
176 | However, parties who have received copies, or rights, from you under
177 | this License will not have their licenses terminated so long as such
178 | parties remain in full compliance.
179 |
180 | 5. You are not required to accept this License, since you have not
181 | signed it. However, nothing else grants you permission to modify or
182 | distribute the Program or its derivative works. These actions are
183 | prohibited by law if you do not accept this License. Therefore, by
184 | modifying or distributing the Program (or any work based on the
185 | Program), you indicate your acceptance of this License to do so, and
186 | all its terms and conditions for copying, distributing or modifying
187 | the Program or works based on it.
188 |
189 | 6. Each time you redistribute the Program (or any work based on the
190 | Program), the recipient automatically receives a license from the
191 | original licensor to copy, distribute or modify the Program subject to
192 | these terms and conditions. You may not impose any further
193 | restrictions on the recipients' exercise of the rights granted herein.
194 | You are not responsible for enforcing compliance by third parties to
195 | this License.
196 |
197 | 7. If, as a consequence of a court judgment or allegation of patent
198 | infringement or for any other reason (not limited to patent issues),
199 | conditions are imposed on you (whether by court order, agreement or
200 | otherwise) that contradict the conditions of this License, they do not
201 | excuse you from the conditions of this License. If you cannot
202 | distribute so as to satisfy simultaneously your obligations under this
203 | License and any other pertinent obligations, then as a consequence you
204 | may not distribute the Program at all. For example, if a patent
205 | license would not permit royalty-free redistribution of the Program by
206 | all those who receive copies directly or indirectly through you, then
207 | the only way you could satisfy both it and this License would be to
208 | refrain entirely from distribution of the Program.
209 |
210 | If any portion of this section is held invalid or unenforceable under
211 | any particular circumstance, the balance of the section is intended to
212 | apply and the section as a whole is intended to apply in other
213 | circumstances.
214 |
215 | It is not the purpose of this section to induce you to infringe any
216 | patents or other property right claims or to contest validity of any
217 | such claims; this section has the sole purpose of protecting the
218 | integrity of the free software distribution system, which is
219 | implemented by public license practices. Many people have made
220 | generous contributions to the wide range of software distributed
221 | through that system in reliance on consistent application of that
222 | system; it is up to the author/donor to decide if he or she is willing
223 | to distribute software through any other system and a licensee cannot
224 | impose that choice.
225 |
226 | This section is intended to make thoroughly clear what is believed to
227 | be a consequence of the rest of this License.
228 |
229 | 8. If the distribution and/or use of the Program is restricted in
230 | certain countries either by patents or by copyrighted interfaces, the
231 | original copyright holder who places the Program under this License
232 | may add an explicit geographical distribution limitation excluding
233 | those countries, so that distribution is permitted only in or among
234 | countries not thus excluded. In such case, this License incorporates
235 | the limitation as if written in the body of this License.
236 |
237 | 9. The Free Software Foundation may publish revised and/or new versions
238 | of the General Public License from time to time. Such new versions will
239 | be similar in spirit to the present version, but may differ in detail to
240 | address new problems or concerns.
241 |
242 | Each version is given a distinguishing version number. If the Program
243 | specifies a version number of this License which applies to it and "any
244 | later version", you have the option of following the terms and conditions
245 | either of that version or of any later version published by the Free
246 | Software Foundation. If the Program does not specify a version number of
247 | this License, you may choose any version ever published by the Free Software
248 | Foundation.
249 |
250 | 10. If you wish to incorporate parts of the Program into other free
251 | programs whose distribution conditions are different, write to the author
252 | to ask for permission. For software which is copyrighted by the Free
253 | Software Foundation, write to the Free Software Foundation; we sometimes
254 | make exceptions for this. Our decision will be guided by the two goals
255 | of preserving the free status of all derivatives of our free software and
256 | of promoting the sharing and reuse of software generally.
257 |
258 | NO WARRANTY
259 |
260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
268 | REPAIR OR CORRECTION.
269 |
270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
278 | POSSIBILITY OF SUCH DAMAGES.
279 |
280 | END OF TERMS AND CONDITIONS
281 |
282 | How to Apply These Terms to Your New Programs
283 |
284 | If you develop a new program, and you want it to be of the greatest
285 | possible use to the public, the best way to achieve this is to make it
286 | free software which everyone can redistribute and change under these terms.
287 |
288 | To do so, attach the following notices to the program. It is safest
289 | to attach them to the start of each source file to most effectively
290 | convey the exclusion of warranty; and each file should have at least
291 | the "copyright" line and a pointer to where the full notice is found.
292 |
293 | {description}
294 | Copyright (C) {year} {fullname}
295 |
296 | This program is free software; you can redistribute it and/or modify
297 | it under the terms of the GNU General Public License as published by
298 | the Free Software Foundation; either version 2 of the License, or
299 | (at your option) any later version.
300 |
301 | This program is distributed in the hope that it will be useful,
302 | but WITHOUT ANY WARRANTY; without even the implied warranty of
303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
304 | GNU General Public License for more details.
305 |
306 | You should have received a copy of the GNU General Public License along
307 | with this program; if not, write to the Free Software Foundation, Inc.,
308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
309 |
310 | Also add information on how to contact you by electronic and paper mail.
311 |
312 | If the program is interactive, make it output a short notice like this
313 | when it starts in an interactive mode:
314 |
315 | Gnomovision version 69, Copyright (C) year name of author
316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
317 | This is free software, and you are welcome to redistribute it
318 | under certain conditions; type `show c' for details.
319 |
320 | The hypothetical commands `show w' and `show c' should show the appropriate
321 | parts of the General Public License. Of course, the commands you use may
322 | be called something other than `show w' and `show c'; they could even be
323 | mouse-clicks or menu items--whatever suits your program.
324 |
325 | You should also get your employer (if you work as a programmer) or your
326 | school, if any, to sign a "copyright disclaimer" for the program, if
327 | necessary. Here is a sample; alter the names:
328 |
329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program
330 | `Gnomovision' (which makes passes at compilers) written by James Hacker.
331 |
332 | {signature of Ty Coon}, 1 April 1989
333 | Ty Coon, President of Vice
334 |
335 | This General Public License does not permit incorporating your program into
336 | proprietary programs. If your program is a subroutine library, you may
337 | consider it more useful to permit linking proprietary applications with the
338 | library. If this is what you want to do, use the GNU Lesser General
339 | Public License instead of this License.
340 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | qt_gl_gst_poc
2 | =============
3 |
4 | This is a proof of concept demo using a media framework (Currently Gstreamer) to stream video frames as textures. The videos can then be rendered with different shaders, blended with alphamasks, rendered on obj models and any other effects to be added as desired. Qt is used to drive the application and provides a basic UI as well as GL|ES compatibility. Currently runs on a native desktop build and the Texas Instruments OMAP3 using TI's bc-cat driver for the PowerVR streaming texture extensions.
5 |
6 | See more detail and background of this project on the page for it [here](http://www.edlangley.co.uk/projects/opengl-streaming-textures/).
7 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/alphamasks/ed_was_ere.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/edlangley/qt_gl_gst_poc/0bf54c885d8bacd5f6a2ee53de3ff74be575540f/src/qt_gl_gst/alphamasks/ed_was_ere.jpg
--------------------------------------------------------------------------------
/src/qt_gl_gst/alphamasks/fade.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/edlangley/qt_gl_gst_poc/0bf54c885d8bacd5f6a2ee53de3ff74be575540f/src/qt_gl_gst/alphamasks/fade.jpg
--------------------------------------------------------------------------------
/src/qt_gl_gst/alphamasks/hole.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/edlangley/qt_gl_gst_poc/0bf54c885d8bacd5f6a2ee53de3ff74be575540f/src/qt_gl_gst/alphamasks/hole.jpg
--------------------------------------------------------------------------------
/src/qt_gl_gst/alphamasks/ripple.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/edlangley/qt_gl_gst_poc/0bf54c885d8bacd5f6a2ee53de3ff74be575540f/src/qt_gl_gst/alphamasks/ripple.jpg
--------------------------------------------------------------------------------
/src/qt_gl_gst/alphamasks/target.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/edlangley/qt_gl_gst_poc/0bf54c885d8bacd5f6a2ee53de3ff74be575540f/src/qt_gl_gst/alphamasks/target.jpg
--------------------------------------------------------------------------------
/src/qt_gl_gst/applogger.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #include "applogger.h"
5 |
6 | Logger GlobalLog;
7 |
8 | #define APPLOGGER_MAX_MSG_LEN 256
9 |
10 | Logger::Logger()
11 | {
12 | }
13 |
14 | void Logger::SetModuleLogLevel(unsigned int module, LogLevel level)
15 | {
16 | m_currentLogLevels[module] = level;
17 | }
18 |
19 | Logger::LogLevel Logger::GetModuleLogLevel(unsigned int module)
20 | {
21 | if(m_currentLogLevels.contains(module) == false)
22 | {
23 | m_currentLogLevels[module] = DEFAULT_LOG_LEVEL;
24 | }
25 |
26 | return m_currentLogLevels[module];
27 | }
28 |
29 | void Logger::LogMessage(unsigned int module, LogLevel severity, const char* const format, ...)
30 | {
31 |
32 | if(m_currentLogLevels.contains(module) == false)
33 | {
34 | m_currentLogLevels[module] = DEFAULT_LOG_LEVEL;
35 | }
36 |
37 | if(severity <= m_currentLogLevels[module])
38 | {
39 | va_list args;
40 | va_start(args, format);
41 |
42 | char buffer[APPLOGGER_MAX_MSG_LEN];
43 | vsnprintf(buffer, APPLOGGER_MAX_MSG_LEN, format, args);
44 |
45 | // Allow single place to decide how/where to print message
46 | outputMessage(module, severity, buffer);
47 |
48 | va_end(args);
49 | }
50 | }
51 |
52 | void Logger::LogMessageWithFuncTrace(unsigned int module, LogLevel severity,
53 | const char* const filename, const char* const function, const int line,
54 | const char* const format,
55 | ...)
56 | {
57 | if(m_currentLogLevels.contains(module) == false)
58 | {
59 | m_currentLogLevels[module] = DEFAULT_LOG_LEVEL;
60 | }
61 |
62 | if(severity <= m_currentLogLevels[module])
63 | {
64 | va_list args;
65 | va_start(args, format);
66 |
67 | std::string logString = filename;
68 | logString += ":";
69 | logString += function;
70 | logString += ":";
71 | std::ostringstream ss;
72 | ss << line;
73 | logString += ss.str();
74 | logString += "> ";
75 | logString += format;
76 |
77 | char buffer[APPLOGGER_MAX_MSG_LEN];
78 | vsnprintf(buffer, APPLOGGER_MAX_MSG_LEN, logString.c_str(), args);
79 |
80 | // Allow single place to decide how/where to print message
81 | outputMessage(module, severity, buffer);
82 |
83 | va_end(args);
84 | }
85 | }
86 |
87 | void Logger::outputMessage(unsigned int module, LogLevel severity, const char * const message)
88 | {
89 | if(severity <= Logger::Error)
90 | {
91 | qCritical(message);
92 | }
93 | else
94 | {
95 | qDebug(message);
96 | }
97 | }
98 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/applogger.h:
--------------------------------------------------------------------------------
1 | #ifndef LOGGER_H
2 | #define LOGGER_H
3 |
4 | #include
5 | #include
6 |
7 | class Logger
8 | {
9 | public:
10 | enum LogLevel
11 | {
12 | Error,
13 | Warning,
14 | Info,
15 | Debug1,
16 | Debug2
17 | };
18 |
19 | Logger();
20 | void SetModuleLogLevel(unsigned int module, LogLevel level);
21 | LogLevel GetModuleLogLevel(unsigned int module);
22 | void LogMessage(unsigned int module, LogLevel severity, const char* const format, ...);
23 | void LogMessageWithFuncTrace(unsigned int module, LogLevel severity,
24 | const char* const filename, const char* const function, const int line,
25 | const char* const format,
26 | ...);
27 |
28 | private:
29 | void outputMessage(unsigned int module, LogLevel severity, const char* const message);
30 | QMap m_currentLogLevels;
31 | };
32 |
33 | #define DEFAULT_LOG_LEVEL Logger::Warning
34 |
35 |
36 | // The global object actually used for LOG calls
37 | extern Logger GlobalLog;
38 |
39 | #define LOG(moduleId, severity, ...) \
40 | GlobalLog.LogMessageWithFuncTrace(moduleId, severity, \
41 | basename(__FILE__), \
42 | __PRETTY_FUNCTION__, \
43 | __LINE__, \
44 | __VA_ARGS__ \
45 | )
46 |
47 | // Global log module directory:
48 | enum
49 | {
50 | LOG_GL,
51 | LOG_GLSHADERS,
52 | LOG_OBJLOADER,
53 | LOG_VIDPIPELINE
54 | };
55 |
56 |
57 | #endif // LOGGER_H
58 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/asyncwaitingqueue.h:
--------------------------------------------------------------------------------
1 | #ifndef ASYNCWAITINGQUEUE_H
2 | #define ASYNCWAITINGQUEUE_H
3 |
4 | #include
5 | #include
6 | #include
7 |
8 | /* Thread safe queue implementation which can block (with timeout)
9 | on get until an item arrives in the queue, at which point thread
10 | is woken up.
11 | */
12 | template
13 | class AsyncQueue
14 | {
15 | public:
16 | AsyncQueue() : m_waitingReaders(0) {}
17 |
18 | int size()
19 | {
20 | QMutexLocker locker(&m_mutex);
21 | return this->m_buffer.size();
22 | }
23 |
24 | void put(const T& item)
25 | {
26 | QMutexLocker locker(&m_mutex);
27 | this->m_buffer.push_back(item);
28 | if(this->m_waitingReaders)
29 | this->m_bufferIsNotEmpty.wakeOne();
30 | }
31 |
32 | bool get(T *itemDestPtr, unsigned long time_ms = 0)
33 | {
34 | QMutexLocker locker(&m_mutex);
35 | bool itemInQueue = false;
36 |
37 | itemInQueue = (this->m_buffer.size()) ? true : false;
38 | if(!itemInQueue && time_ms)
39 | {
40 | ++(this->m_waitingReaders);
41 | itemInQueue = this->m_bufferIsNotEmpty.wait(&m_mutex, time_ms);
42 | --(this->m_waitingReaders);
43 | }
44 |
45 | if(itemInQueue)
46 | {
47 | T item = this->m_buffer.front();
48 | this->m_buffer.pop_front();
49 | *itemDestPtr = item;
50 | return true;
51 | }
52 | else
53 | {
54 | return false;
55 | }
56 | }
57 |
58 | private:
59 | typedef QList Container;
60 | QMutex m_mutex;
61 | QWaitCondition m_bufferIsNotEmpty;
62 | Container m_buffer;
63 | short m_waitingReaders;
64 | };
65 |
66 |
67 | #endif // ASYNCWAITINGQUEUE_H
68 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/controlsform.cpp:
--------------------------------------------------------------------------------
1 | #include "controlsform.h"
2 | #include "ui_controlsform.h"
3 |
4 | #include "glwidget.h"
5 |
6 | ControlsForm::ControlsForm(GLWidget *glWidgetToInsert, QWidget *parent) :
7 | QWidget(parent),
8 | ui(new Ui::ControlsForm)
9 | {
10 | ui->setupUi(this);
11 |
12 | ui->glWidgetLayout->addWidget(glWidgetToInsert);
13 |
14 | QObject::connect(ui->cycBackGndPushButton, SIGNAL(clicked(bool)), glWidgetToInsert, SLOT(cycleBackgroundSlot()));
15 | QObject::connect(ui->cycModShadPushButton, SIGNAL(clicked(bool)), glWidgetToInsert, SLOT(cycleModelShaderSlot()));
16 | QObject::connect(ui->cycVidShadPushButton, SIGNAL(clicked(bool)), glWidgetToInsert, SLOT(cycleVidShaderSlot()));
17 | QObject::connect(ui->exitPushButton, SIGNAL(clicked(bool)), glWidgetToInsert, SLOT(exitSlot()));
18 | QObject::connect(ui->loadAlphaPushButton, SIGNAL(clicked(bool)), glWidgetToInsert, SLOT(loadAlphaSlot()));
19 | QObject::connect(ui->loadModelPushButton, SIGNAL(clicked(bool)), glWidgetToInsert, SLOT(loadModelSlot()));
20 | QObject::connect(ui->loadVidPushButton, SIGNAL(clicked(bool)), glWidgetToInsert, SLOT(loadVideoSlot()));
21 | QObject::connect(ui->resetPosPushButton, SIGNAL(clicked(bool)), glWidgetToInsert, SLOT(resetPosSlot()));
22 | QObject::connect(ui->showYUVPushButton, SIGNAL(clicked(bool)), glWidgetToInsert, SLOT(showYUVWindowSlot()));
23 | QObject::connect(ui->rotateCheckBox, SIGNAL(toggled(bool)), glWidgetToInsert, SLOT(rotateToggleSlot(bool)));
24 | QObject::connect(ui->stackVidsCheckBox, SIGNAL(stateChanged(int)), glWidgetToInsert, SLOT(stackVidsToggleSlot(int)));
25 |
26 | QObject::connect(glWidgetToInsert, SIGNAL(rotateStateChanged(bool)), ui->rotateCheckBox, SLOT(setChecked(bool)));
27 | QObject::connect(glWidgetToInsert, SIGNAL(stackVidsStateChanged(bool)), ui->stackVidsCheckBox, SLOT(setChecked(bool)));
28 |
29 | }
30 |
31 | ControlsForm::~ControlsForm()
32 | {
33 | delete ui;
34 | }
35 |
36 | void ControlsForm::closeEvent(QCloseEvent* event)
37 | {
38 | Q_UNUSED(event);
39 |
40 | // At the mo, tell parent to close too.
41 | QWidget* _parent = dynamic_cast(parent());
42 | if(_parent)
43 | _parent->close();
44 | }
45 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/controlsform.h:
--------------------------------------------------------------------------------
1 | #ifndef CONTROLSFORM_H
2 | #define CONTROLSFORM_H
3 |
4 | #include
5 |
6 | class GLWidget;
7 |
8 | namespace Ui {
9 | class ControlsForm;
10 | }
11 |
12 | class ControlsForm : public QWidget
13 | {
14 | Q_OBJECT
15 |
16 | public:
17 | explicit ControlsForm(GLWidget *glWidgetToInsert, QWidget *parent = 0);
18 | ~ControlsForm();
19 |
20 | protected:
21 | void closeEvent(QCloseEvent* event);
22 |
23 | private:
24 | Ui::ControlsForm *ui;
25 | };
26 |
27 | #endif // CONTROLSFORM_H
28 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/controlsform.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | ControlsForm
4 |
5 |
6 |
7 | 0
8 | 0
9 | 859
10 | 412
11 |
12 |
13 |
14 |
15 | 0
16 | 0
17 |
18 |
19 |
20 | Form
21 |
22 |
23 |
24 | 0
25 |
26 |
27 | 0
28 |
29 | -
30 |
31 |
32 | QLayout::SetMaximumSize
33 |
34 |
-
35 |
36 |
37 | Qt::Vertical
38 |
39 |
40 |
41 | 0
42 | 40
43 |
44 |
45 |
46 |
47 |
48 |
49 | -
50 |
51 |
-
52 |
53 |
54 | Cycle Vid Shader
55 |
56 |
57 |
58 | -
59 |
60 |
61 | Cycle Model Shader
62 |
63 |
64 |
65 | -
66 |
67 |
68 | Stack Videos
69 |
70 |
71 |
72 | -
73 |
74 |
75 | Cycle Background
76 |
77 |
78 |
79 | -
80 |
81 |
82 | Load Video
83 |
84 |
85 |
86 | -
87 |
88 |
89 | Reset Position
90 |
91 |
92 |
93 | -
94 |
95 |
96 | Load Alphamask
97 |
98 |
99 |
100 | -
101 |
102 |
103 | Rotate
104 |
105 |
106 | true
107 |
108 |
109 |
110 | -
111 |
112 |
113 | Load Model
114 |
115 |
116 |
117 | -
118 |
119 |
120 | Exit
121 |
122 |
123 |
124 | -
125 |
126 |
127 | Qt::Horizontal
128 |
129 |
130 |
131 | 40
132 | 20
133 |
134 |
135 |
136 |
137 | -
138 |
139 |
140 | Show YUV Window
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/glpowervrwidget.cpp:
--------------------------------------------------------------------------------
1 |
2 | #if 1 // temp whilst reviving my build env
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 |
9 | #include "applogger.h"
10 | #include "glpowervrwidget.h"
11 | #include "cmem.h"
12 |
13 | #define COLFMT_TO_BC_FOURCC(fourCC) fourCC
14 |
15 | GLPowerVRWidget::GLPowerVRWidget(int argc, char *argv[], QWidget *parent) :
16 | GLWidget(argc, argv, parent)
17 | {
18 | if(CMEM_init() == -1)
19 | {
20 | LOG(LOG_GL, Logger::Error, "Error calling CMEM_init");
21 | close();
22 | }
23 | }
24 |
25 | GLPowerVRWidget::~GLPowerVRWidget()
26 | {
27 | }
28 |
29 | void GLPowerVRWidget::initializeGL()
30 | {
31 | GLWidget::initializeGL();
32 |
33 | const GLubyte *glExtCStr;
34 |
35 | if (!(glExtCStr = glGetString(GL_EXTENSIONS)))
36 | {
37 | LOG(LOG_GL, Logger::Error, "Can't get GLES 2.0 extensions");
38 | close();
39 | }
40 |
41 | if (!strstr((char *)glExtCStr, "GL_IMG_texture_stream2"))
42 | {
43 | LOG(LOG_GL, Logger::Error, "GL_IMG_texture_stream2 extension not present");
44 | close();
45 | }
46 |
47 | glTexBindStreamIMG = (PFNGLTEXBINDSTREAMIMGPROC)this->context()->getProcAddress("glTexBindStreamIMG");
48 | glGetTexAttrIMG = (PFNGLGETTEXSTREAMDEVICEATTRIBUTEIVIMGPROC)this->context()->getProcAddress("glGetTexStreamDeviceAttributeivIMG");
49 | glGetTexDeviceIMG = (PFNGLGETTEXSTREAMDEVICENAMEIMGPROC)this->context()->getProcAddress("glGetTexStreamDeviceNameIMG");
50 |
51 | if (!glTexBindStreamIMG || !glGetTexAttrIMG || !glGetTexDeviceIMG)
52 | {
53 | LOG(LOG_GL, Logger::Error, "Couldn't get pointers to IMG extension functions");
54 | close();
55 | }
56 | }
57 |
58 | Pipeline *GLPowerVRWidget::createPipeline(int vidIx)
59 | {
60 | if(vidIx > MAX_BCDEV)
61 | {
62 | LOG(LOG_GL, Logger::Error, "ERROR: vidIx=%d which is greater than bccat devs available", vidIx);
63 | return NULL;
64 | }
65 |
66 | // Only need to init Bufferclass dev if haven't already
67 | if(vidIx >= m_bcFds.size())
68 | {
69 | m_bcFds.resize(vidIx+1);
70 |
71 | int bcFd;
72 | QString bcDevName = QString("/dev/bccat%1").arg(vidIx);
73 | if((bcFd = open(bcDevName.toUtf8().constData(), O_RDWR|O_NDELAY)) == -1)
74 | {
75 | LOG(LOG_GL, Logger::Error, "ERROR: open %s failed", bcDevName.toUtf8().constData());
76 | return NULL;
77 | }
78 | LOG(LOG_GL, Logger::Debug1, "opened %s fd=%d", bcDevName.toUtf8().constData(), bcFd);
79 |
80 | m_bcFds.replace(vidIx, bcFd);
81 | }
82 |
83 | // Size all the other containers for buffer handling appropriately
84 | if(vidIx >= m_vidBufferAddressesSet.size())
85 | {
86 | m_vidBufferAddressesSet.resize(vidIx+1);
87 | }
88 | m_vidBufferAddressesSet.replace(vidIx, false);
89 |
90 | if(vidIx >= m_vidBufferAddresses.size())
91 | {
92 | m_vidBufferAddresses.resize(vidIx+1);
93 | }
94 | m_vidBufferAddresses.replace(vidIx, QVector());
95 |
96 | return new TIGStreamerPipeline(vidIx, this->m_videoLoc[vidIx], SLOT(newFrame(int)), this);
97 | }
98 |
99 | int GLPowerVRWidget::totalVidBuffers()
100 | {
101 | int total = 0;
102 |
103 | QVector< QVector >::iterator bufVecPtr;
104 | for(bufVecPtr = m_vidBufferAddresses.begin(); bufVecPtr != m_vidBufferAddresses.end(); ++bufVecPtr)
105 | {
106 | total += bufVecPtr->size();
107 | }
108 |
109 | return total;
110 | }
111 |
112 | bool GLPowerVRWidget::loadNewTexture(int vidIx)
113 | {
114 | bool texLoaded = false;
115 |
116 | unsigned long currentVidBufferAddress = (unsigned long)CMEM_getPhys(this->m_vidPipelines[vidIx]->bufToVidDataStart(this->m_vidTextures[vidIx].buffer));
117 |
118 | LOG(LOG_GL, Logger::Debug2, "vid %d, CMEM phys=%lx", vidIx, currentVidBufferAddress);
119 |
120 | if(m_vidBufferAddressesSet.contains(false))
121 | {
122 | bool gotThisBuffer = false;
123 | QVector::iterator bufPtr;
124 | for(bufPtr = m_vidBufferAddresses[vidIx].begin(); bufPtr != m_vidBufferAddresses[vidIx].end(); ++bufPtr)
125 | {
126 | if(bufPtr->pa == currentVidBufferAddress)
127 | {
128 | // Already recorded this buffer address
129 | LOG(LOG_GL, Logger::Debug2, "vid %d, already saved phys addr %lx", vidIx, currentVidBufferAddress);
130 | gotThisBuffer = true;
131 |
132 | // If we've got the same buffer a second time,
133 | // assume that means we have all the buffer addresses
134 | m_vidBufferAddressesSet[vidIx] = true;
135 | }
136 | }
137 |
138 | if(!gotThisBuffer)
139 | {
140 | // A new buffer has come through, record the details:
141 | bc_buf_ptr_t bc_buf;
142 | bc_buf.index = totalVidBuffers(); // Multiple buffers per possibly multiple videos
143 | // Size parameter isn't actually used in the driver just yet but fill in anyway for
144 | // future proofing:
145 | switch(this->m_vidTextures[vidIx].colourFormat)
146 | {
147 | case ColFmt_I420:
148 | bc_buf.size = this->m_vidTextures[vidIx].width * this->m_vidTextures[vidIx].height * 1.5f;
149 | break;
150 | case ColFmt_UYVY:
151 | default:
152 | bc_buf.size = this->m_vidTextures[vidIx].width * this->m_vidTextures[vidIx].height * 2;
153 | break;
154 | }
155 | bc_buf.pa = currentVidBufferAddress;
156 | LOG(LOG_GL, Logger::Debug1, "vid %d, saving bc_buf_ptr_t: index=%d, size=%d, pa=%lx",
157 | vidIx, bc_buf.index, bc_buf.size, bc_buf.pa);
158 | m_vidBufferAddresses[vidIx].push_back(bc_buf);
159 | }
160 |
161 | // Have we got all the buffer addresses we are waiting for, for all videos?
162 | if(!m_vidBufferAddressesSet.contains(false))
163 | {
164 | LOG(LOG_GL, Logger::Debug1, "got all the bc_buf_ptr_t entries for all vids");
165 |
166 | for(int currentVidIx = 0; currentVidIx < m_vidPipelines.size(); currentVidIx++)
167 | {
168 | // We now definitely have the size information needed to prep the driver:
169 | bc_buf_params_t bufParams;
170 | bufParams.count = m_vidBufferAddresses[currentVidIx].size();
171 | bufParams.width = this->m_vidTextures[currentVidIx].width;
172 | bufParams.height = this->m_vidTextures[currentVidIx].height;
173 |
174 | bufParams.fourcc = COLFMT_TO_BC_FOURCC(this->m_vidPipelines[currentVidIx]->getColourFormat());
175 | bufParams.type = BC_MEMORY_USERPTR;
176 |
177 | LOG(LOG_GL, Logger::Debug1, "vid %d, calling BCIOREQ_BUFFERS, count=%d, fourCC=0x%08X, width=%d, height=%d, type=%d",
178 | currentVidIx, bufParams.count, bufParams.fourcc, bufParams.width, bufParams.height, bufParams.type);
179 | int retVal;
180 | if((retVal = ioctl(m_bcFds[currentVidIx], BCIOREQ_BUFFERS, &bufParams)) != 0)
181 | {
182 | LOG(LOG_GL, Logger::Error, "ERROR: BCIOREQ_BUFFERS on fd %d failed, retVal=%d, errno=%d:%s",
183 | m_bcFds[currentVidIx], retVal, errno, strerror(errno));
184 | return false;
185 | }
186 |
187 | LOG(LOG_GL, Logger::Debug1, "vid %d, calling BCIOGET_BUFFERCOUNT", currentVidIx);
188 | BCIO_package ioctlVar;
189 | if (ioctl(m_bcFds[currentVidIx], BCIOGET_BUFFERCOUNT, &ioctlVar) != 0)
190 | {
191 | LOG(LOG_GL, Logger::Error, "ERROR: BCIOGET_BUFFERCOUNT failed");
192 | return false;
193 | }
194 |
195 | if (ioctlVar.output == 0)
196 | {
197 | LOG(LOG_GL, Logger::Error, "ERROR: no texture buffers available");
198 | return false;
199 | }
200 |
201 | for(bufPtr = m_vidBufferAddresses[currentVidIx].begin(); bufPtr != m_vidBufferAddresses[currentVidIx].end(); ++bufPtr)
202 | {
203 | if (ioctl(m_bcFds[currentVidIx], BCIOSET_BUFFERPHYADDR, bufPtr) != 0)
204 | {
205 | LOG(LOG_GL, Logger::Error, "ERROR: BCIOSET_BUFFERADDR[%d]: failed (0x%lx)",
206 | bufPtr->index, bufPtr->pa);
207 | }
208 | }
209 | }
210 |
211 | printOpenGLError(__FILE__, __LINE__);
212 |
213 | // Should be able to set up the GLES side now:
214 | const GLubyte *imgDevName;
215 | imgDevName = this->glGetTexDeviceIMG(vidIx);
216 |
217 | printOpenGLError(__FILE__, __LINE__);
218 |
219 | GLint numImgBufs, imgBufWidth, imgBufHeight, imgBufFmt;
220 | this->glGetTexAttrIMG(vidIx, GL_TEXTURE_STREAM_DEVICE_NUM_BUFFERS_IMG, &numImgBufs);
221 | this->glGetTexAttrIMG(vidIx, GL_TEXTURE_STREAM_DEVICE_WIDTH_IMG, &imgBufWidth);
222 | this->glGetTexAttrIMG(vidIx, GL_TEXTURE_STREAM_DEVICE_HEIGHT_IMG, &imgBufHeight);
223 | this->glGetTexAttrIMG(vidIx, GL_TEXTURE_STREAM_DEVICE_FORMAT_IMG, &imgBufFmt);
224 |
225 | LOG(LOG_GL, Logger::Debug1, "GLES IMG attrs: dev name: %s, numbufs=%d, width=%d, height=%d, format=%d",
226 | imgDevName, numImgBufs, imgBufWidth, imgBufHeight, imgBufFmt);
227 |
228 | printOpenGLError(__FILE__, __LINE__);
229 |
230 | /* do stuff from setup_shaders() in bc_cat example common.c code here */
231 | glActiveTexture(GL_TEXTURE0);
232 |
233 |
234 | for(int currentVidIx = 0; currentVidIx < m_vidPipelines.size(); currentVidIx++)
235 | {
236 | // Delete the single texture ID created in superclass,
237 | // ready to create the pool of texture IDs for each video instead
238 | glDeleteTextures (1, &m_vidTextures[currentVidIx].texId);
239 |
240 | // Loop through all the buffers, link buf index to tex IDs:
241 | QVector::iterator bufPtr;
242 | for(bufPtr = m_vidBufferAddresses[currentVidIx].begin(); bufPtr != m_vidBufferAddresses[currentVidIx].end(); ++bufPtr)
243 | {
244 | GLuint newTexId;
245 | glGenTextures(1, &newTexId);
246 |
247 | LOG(LOG_GL, Logger::Debug1, "calling glBindTexture texId=%d",
248 | newTexId);
249 | glBindTexture(GL_TEXTURE_STREAM_IMG, newTexId);
250 | printOpenGLError(__FILE__, __LINE__);
251 |
252 | LOG(LOG_GL, Logger::Debug1, "setting texture filters");
253 | // specify filters
254 | //glTexParameterf(GL_TEXTURE_STREAM_IMG, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
255 | //glTexParameterf(GL_TEXTURE_STREAM_IMG, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
256 | // cmem examples use:
257 | glTexParameterf(GL_TEXTURE_STREAM_IMG, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
258 | glTexParameterf(GL_TEXTURE_STREAM_IMG, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
259 | printOpenGLError(__FILE__, __LINE__);
260 |
261 | // assign the buffer
262 | LOG(LOG_GL, Logger::Debug1, "calling glTexBindStreamIMG buf index=%d", bufPtr->index);
263 | glTexBindStreamIMG(vidIx, bufPtr->index);
264 | printOpenGLError(__FILE__, __LINE__);
265 |
266 | m_bcBufIxToTexId[bufPtr->index] = newTexId;
267 |
268 | // When the loop exits, take last buffer captured for vid as active texture
269 | this->m_vidTextures[currentVidIx].texId = newTexId;
270 | }
271 | }
272 | texLoaded = true;
273 | }
274 |
275 | }
276 | else
277 | {
278 | LOG(LOG_GL, Logger::Debug2, "vid %d, looking up bc_buf_ptr_t index for buf pa=%lx",
279 | vidIx, currentVidBufferAddress);
280 |
281 | bool bufAdrFound = false;
282 | QVector::iterator bufPtr;
283 | for(bufPtr = m_vidBufferAddresses[vidIx].begin(); bufPtr != m_vidBufferAddresses[vidIx].end(); ++bufPtr)
284 | {
285 | if(bufPtr->pa == currentVidBufferAddress)
286 | {
287 | LOG(LOG_GL, Logger::Debug2, "vid %d, setting texture to bc_buf_ptr_t index %d, texId %d",
288 | vidIx, bufPtr->index, m_bcBufIxToTexId[bufPtr->index]);
289 | this->m_vidTextures[vidIx].texId = m_bcBufIxToTexId[bufPtr->index];
290 | bufAdrFound = true;
291 | break;
292 | }
293 | }
294 |
295 | if(bufAdrFound)
296 | {
297 | glActiveTexture(GL_TEXTURE0);
298 | glBindTexture(GL_TEXTURE_STREAM_IMG, this->m_vidTextures[vidIx].texId);
299 |
300 | texLoaded = true;
301 | }
302 | else
303 | {
304 | LOG(LOG_GL, Logger::Error, "new vid buffer arrived after all expected buffers have been setup, pa=%lx",
305 | currentVidBufferAddress);
306 | }
307 | }
308 |
309 | return texLoaded;
310 | }
311 |
312 | #endif
313 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/glpowervrwidget.h:
--------------------------------------------------------------------------------
1 | #ifndef GLPOWERVRWIDGET_H
2 | #define GLPOWERVRWIDGET_H
3 |
4 | #include "glwidget.h"
5 | #include "bc_cat.h"
6 |
7 | #define MAX_BCDEV 9
8 |
9 | class GLPowerVRWidget : public GLWidget
10 | {
11 | Q_OBJECT
12 | public:
13 | explicit GLPowerVRWidget(int argc, char *argv[], QWidget *parent = 0);
14 | ~GLPowerVRWidget();
15 |
16 | protected:
17 | void initializeGL();
18 | virtual Pipeline* createPipeline(int vidIx);
19 | bool loadNewTexture(int vidIx);
20 |
21 | signals:
22 |
23 | public slots:
24 |
25 | private:
26 | int totalVidBuffers();
27 |
28 | PFNGLTEXBINDSTREAMIMGPROC glTexBindStreamIMG;
29 | PFNGLGETTEXSTREAMDEVICEATTRIBUTEIVIMGPROC glGetTexAttrIMG;
30 | PFNGLGETTEXSTREAMDEVICENAMEIMGPROC glGetTexDeviceIMG;
31 |
32 | QVector< QVector > m_vidBufferAddresses;
33 | QVector m_vidBufferAddressesSet;
34 | QVector m_bcFds;
35 | QMap m_bcBufIxToTexId;
36 |
37 | };
38 |
39 | #endif // GLPOWERVRWIDGET_H
40 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/glwidget.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include "glwidget.h"
3 | #include "shaderlists.h"
4 | #include "applogger.h"
5 |
6 | #ifdef GLU_NEEDED
7 | #include "GL/glu.h"
8 | #endif
9 |
10 |
11 | GLWidget::GLWidget(int argc, char *argv[], QWidget *parent) :
12 | QGLWidget(QGLFormat(QGL::DoubleBuffer | QGL::DepthBuffer | QGL::Rgba), parent),
13 | m_closing(false),
14 | m_brickProg(this)
15 | {
16 | LOG(LOG_GL, Logger::Debug1, "GLWidget constructor entered");
17 |
18 | m_xRot = 0;
19 | m_yRot = 0;
20 | m_zRot = 0;
21 | m_scaleValue = 1.0;
22 | m_lastPos = QPoint(0, 0);
23 |
24 | m_rotateOn = 1;
25 | m_xLastIncr = 0;
26 | m_yLastIncr = 0;
27 | m_xInertia = -0.5;
28 | m_yInertia = 0;
29 |
30 | m_clearColorIndex = 0;
31 | m_stackVidQuads = false;
32 | m_currentModelEffectIndex = ModelEffectFirst;
33 |
34 | QTimer *timer = new QTimer(this);
35 | connect(timer, SIGNAL(timeout()), this, SLOT(animate()));
36 | timer->start(20);
37 |
38 | grabKeyboard();
39 |
40 | // Video shader effects vars
41 | m_colourHilightRangeMin = QVector4D(0.0, 0.0, 0.0, 0.0);
42 | m_colourHilightRangeMax = QVector4D(0.2, 0.2, 1.0, 1.0); // show shades of blue as they are
43 | m_colourComponentSwapR = QVector4D(1.0, 1.0, 1.0, 0.0);
44 | m_colourComponentSwapG = QVector4D(1.0, 1.0, 0.0, 0.0);
45 | m_colourComponentSwapB = QVector4D(1.0, 1.0, 1.0, 0.0);
46 | m_colourSwapDirUpwards = true;
47 | m_alphaTextureLoaded = false;
48 |
49 | // Video pipeline
50 | for(int vidIx = 1; vidIx < argc; vidIx++)
51 | {
52 | m_videoLoc.push_back(QString(argv[vidIx]));
53 | }
54 |
55 | m_model = NULL;
56 |
57 | m_frames = 0;
58 | setAttribute(Qt::WA_PaintOnScreen);
59 | setAttribute(Qt::WA_NoSystemBackground);
60 | setAutoBufferSwap(false);
61 | setAutoFillBackground(false);
62 |
63 | #ifdef ENABLE_YUV_WINDOW
64 | m_yuvWindow = new YuvDebugWindow(this);
65 | /* Build a colour map */
66 | for(int i = 0; i < 256; i++)
67 | {
68 | m_colourMap.push_back(qRgb(i, i, i));
69 | }
70 | #endif
71 |
72 | m_dataFilesDir = QString(qgetenv(DATA_DIR_ENV_VAR_NAME));
73 | if(m_dataFilesDir.size() == 0)
74 | {
75 | m_dataFilesDir = QString("./");
76 | }
77 | else
78 | {
79 | m_dataFilesDir += "/";
80 | }
81 | LOG(LOG_GL, Logger::Debug1, "m_dataFilesDir = %s", m_dataFilesDir.toUtf8().constData());
82 | }
83 |
84 | GLWidget::~GLWidget()
85 | {
86 | }
87 |
88 | void GLWidget::initVideo()
89 | {
90 | // Instantiate video pipeline for each filename specified
91 | for(int vidIx = 0; vidIx < this->m_videoLoc.size(); vidIx++)
92 | {
93 | this->m_vidPipelines.push_back(this->createPipeline(vidIx));
94 |
95 | if(this->m_vidPipelines[vidIx] == NULL)
96 | {
97 | LOG(LOG_GL, Logger::Error, "Error creating pipeline for vid %d", vidIx);
98 | return;
99 | }
100 |
101 | QObject::connect(this->m_vidPipelines[vidIx], SIGNAL(finished(int)),
102 | this, SLOT(pipelineFinished(int)));
103 | QObject::connect(this, SIGNAL(closeRequested()),
104 | this->m_vidPipelines[vidIx], SLOT(Stop()), Qt::QueuedConnection);
105 |
106 | this->m_vidPipelines[vidIx]->Configure();
107 | }
108 | }
109 |
110 | void GLWidget::initializeGL()
111 | {
112 | QString verStr((const char*)glGetString(GL_VERSION));
113 | LOG(LOG_GL, Logger::Info, "GL_VERSION: %s", verStr.toUtf8().constData());
114 |
115 | QStringList verNums = verStr.split(QRegExp("[ .]"));
116 | bool foundVerNum = false;
117 | for(int verNumIx = 0; verNumIx < verNums.length(); verNumIx++)
118 | {
119 | int verNum = verNums[verNumIx].toInt(&foundVerNum);
120 | if(foundVerNum)
121 | {
122 | if(verNum < 2)
123 | {
124 | LOG(LOG_GL, Logger::Error, "Support for OpenGL 2.0 is required for this demo...exiting");
125 | close();
126 | }
127 | break;
128 | }
129 | }
130 | if(!foundVerNum)
131 | {
132 | LOG(LOG_GL, Logger::Error, "Couldn't find OpenGL version number");
133 | }
134 |
135 | LOG(LOG_GL, Logger::Debug1, "Window is%s double buffered", ((this->format().doubleBuffer()) ? "": " not"));
136 |
137 | qglClearColor(QColor(Qt::black));
138 |
139 | setupShader(&m_brickProg, BrickGLESShaderList, NUM_SHADERS_BRICKGLES);
140 | // Set up initial uniform values
141 | // m_brickProg.setUniformValue("BrickColor", QVector3D(1.0, 0.3, 0.2));
142 | // m_brickProg.setUniformValue("MortarColor", QVector3D(0.85, 0.86, 0.84));
143 | m_brickProg.setUniformValue("BrickColor", QVector3D(0.0, 0.5, 1.0));
144 | m_brickProg.setUniformValue("MortarColor", QVector3D(0.0, 0.5, 1.0));
145 | m_brickProg.setUniformValue("BrickSize", QVector3D(0.30, 0.15, 0.30));
146 | m_brickProg.setUniformValue("BrickPct", QVector3D(0.90, 0.85, 0.90));
147 | m_brickProg.setUniformValue("LightPosition", QVector3D(0.0, 0.0, 4.0));
148 | m_brickProg.release();
149 | printOpenGLError(__FILE__, __LINE__);
150 |
151 | #ifdef VIDI420_SHADERS_NEEDED
152 | setupShader(&m_I420NoEffectNormalised, VidI420NoEffectNormalisedShaderList, NUM_SHADERS_VIDI420_NOEFFECT_NORMALISED);
153 | setupShader(&m_I420LitNormalised, VidI420LitNormalisedShaderList, NUM_SHADERS_VIDI420_LIT_NORMALISED);
154 | setupShader(&m_I420NoEffect, VidI420NoEffectShaderList, NUM_SHADERS_VIDI420_NOEFFECT);
155 | setupShader(&m_I420Lit, VidI420LitShaderList, NUM_SHADERS_VIDI420_LIT);
156 | setupShader(&m_I420ColourHilight, VidI420ColourHilightShaderList, NUM_SHADERS_VIDI420_COLOURHILIGHT);
157 | setupShader(&m_I420ColourHilightSwap, VidI420ColourHilightSwapShaderList, NUM_SHADERS_VIDI420_COLOURHILIGHTSWAP);
158 | setupShader(&m_I420AlphaMask, VidI420AlphaMaskShaderList, NUM_SHADERS_VIDI420_ALPHAMASK);
159 | #endif
160 |
161 | #ifdef VIDUYVY_SHADERS_NEEDED
162 | setupShader(&m_UYVYNoEffectNormalised, VidUYVYNoEffectNormalisedShaderList, NUM_SHADERS_VIDUYVY_NOEFFECT_NORMALISED);
163 | setupShader(&m_UYVYLitNormalised, VidUYVYLitNormalisedShaderList, NUM_SHADERS_VIDUYVY_LIT_NORMALISED);
164 | setupShader(&m_UYVYNoEffect, VidUYVYNoEffectShaderList, NUM_SHADERS_VIDUYVY_NOEFFECT);
165 | setupShader(&m_UYVYLit, VidUYVYLitShaderList, NUM_SHADERS_VIDUYVY_LIT);
166 | setupShader(&m_UYVYColourHilight, VidUYVYColourHilightShaderList, NUM_SHADERS_VIDUYVY_COLOURHILIGHT);
167 | setupShader(&m_UYVYColourHilightSwap, VidUYVYColourHilightSwapShaderList, NUM_SHADERS_VIDUYVY_COLOURHILIGHTSWAP);
168 | setupShader(&m_UYVYAlphaMask, VidUYVYAlphaMaskShaderList, NUM_SHADERS_VIDUYVY_ALPHAMASK);
169 | #endif
170 |
171 | glTexParameteri(GL_RECT_VID_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
172 | glTexParameteri(GL_RECT_VID_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
173 | glTexParameteri(GL_RECT_VID_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
174 | glTexParameteri(GL_RECT_VID_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
175 |
176 | // Set uniforms for vid shaders along with other stream details when first
177 | // frame comes through
178 |
179 |
180 | if(m_vidPipelines.size() != m_videoLoc.size())
181 | {
182 | LOG(LOG_GL, Logger::Error, "initVideo must be called before intialiseGL");
183 | return;
184 | }
185 |
186 | // Create entry in tex info vector for all pipelines
187 | for(int vidIx = 0; vidIx < this->m_vidPipelines.size(); vidIx++)
188 | {
189 | VidTextureInfo newInfo;
190 | glGenTextures(1, &newInfo.texId);
191 | newInfo.texInfoValid = false;
192 | newInfo.buffer = NULL;
193 | newInfo.effect = VidShaderNoEffect;
194 | newInfo.frameCount = 0;
195 |
196 | this->m_vidTextures.push_back(newInfo);
197 | }
198 |
199 | m_model = new Model();
200 | if(m_model->Load(m_dataFilesDir + DFLT_OBJ_MODEL_FILE_NAME) != 0)
201 | {
202 | LOG(LOG_OBJLOADER, Logger::Warning, "Couldn't load obj model file %s%s",
203 | m_dataFilesDir.toUtf8().constData(), DFLT_OBJ_MODEL_FILE_NAME);
204 | }
205 | m_model->SetScale(MODEL_BOUNDARY_SIZE);
206 |
207 |
208 | for(int vidIx = 0; vidIx < this->m_vidPipelines.size(); vidIx++)
209 | {
210 | this->m_vidPipelines[vidIx]->Start();
211 | }
212 | }
213 |
214 | Pipeline* GLWidget::createPipeline(int vidIx)
215 | {
216 | return new GStreamerPipeline(vidIx, this->m_videoLoc[vidIx], SLOT(newFrame(int)), this);
217 | }
218 |
219 | void GLWidget::paintEvent(QPaintEvent *event)
220 | {
221 | Q_UNUSED(event);
222 |
223 | makeCurrent();
224 |
225 | glDepthFunc(GL_LESS);
226 | glEnable(GL_DEPTH_TEST);
227 | glEnable (GL_BLEND);
228 | glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
229 |
230 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
231 |
232 | this->m_modelViewMatrix = QMatrix4x4();
233 | this->m_modelViewMatrix.lookAt(QVector3D(0.0, 0.0, -5.0), QVector3D(0.0, 0.0, 0.0), QVector3D(0.0, 1.0, 0.0));
234 | this->m_modelViewMatrix.rotate(-m_zRot / 16.0, 0.0, 0.0, 1.0);
235 | this->m_modelViewMatrix.rotate(-m_xRot / 16.0, 1.0, 0.0, 0.0);
236 | this->m_modelViewMatrix.rotate(m_yRot / 16.0, 0.0, 1.0, 0.0);
237 | this->m_modelViewMatrix.scale(m_scaleValue);
238 |
239 | // Draw an object in the middle
240 | ModelEffectType enabledModelEffect = m_currentModelEffectIndex;
241 | QGLShaderProgram *currentShader = NULL;
242 | switch(enabledModelEffect)
243 | {
244 | case ModelEffectBrick:
245 | m_brickProg.bind();
246 | currentShader = &m_brickProg;
247 | break;
248 | case ModelEffectVideo:
249 | glActiveTexture(GL_RECT_VID_TEXTURE0);
250 | glBindTexture(GL_RECT_VID_TEXTURE_2D, this->m_vidTextures[0].texId);
251 |
252 | #ifdef TEXCOORDS_ALREADY_NORMALISED
253 | this->m_vidTextures[0].effect = VidShaderNoEffect;
254 | #else
255 | this->m_vidTextures[0].effect = VidShaderNoEffectNormalisedTexCoords;
256 | #endif
257 | setAppropriateVidShader(0);
258 | this->m_vidTextures[0].shader->bind();
259 | setVidShaderVars(0, false);
260 |
261 | currentShader = this->m_vidTextures[0].shader;
262 | break;
263 |
264 | case ModelEffectVideoLit:
265 | glActiveTexture(GL_RECT_VID_TEXTURE0);
266 | glBindTexture(GL_RECT_VID_TEXTURE_2D, this->m_vidTextures[0].texId);
267 |
268 | #ifdef TEXCOORDS_ALREADY_NORMALISED
269 | this->m_vidTextures[0].effect = VidShaderLit;
270 | #else
271 | this->m_vidTextures[0].effect = VidShaderLitNormalisedTexCoords;
272 | #endif
273 | setAppropriateVidShader(0);
274 | this->m_vidTextures[0].shader->bind();
275 | setVidShaderVars(0, false);
276 |
277 | currentShader = this->m_vidTextures[0].shader;
278 | break;
279 | }
280 |
281 | m_model->Draw(m_modelViewMatrix, m_projectionMatrix, currentShader, false);
282 |
283 | switch(enabledModelEffect)
284 | {
285 | case ModelEffectBrick:
286 | currentShader->release();
287 | break;
288 | case ModelEffectVideo:
289 | case ModelEffectVideoLit:
290 | this->m_vidTextures[0].effect = VidShaderNoEffect;
291 | setAppropriateVidShader(0);
292 | this->m_vidTextures[0].shader->bind();
293 | setVidShaderVars(0, false);
294 |
295 | printOpenGLError(__FILE__, __LINE__);
296 | break;
297 | }
298 |
299 | // Draw videos around the object
300 | for(int vidIx = 0; vidIx < this->m_vidTextures.size(); vidIx++)
301 | {
302 | if(this->m_vidTextures[vidIx].texInfoValid)
303 | {
304 | // Render a quad with the video on it:
305 | glActiveTexture(GL_RECT_VID_TEXTURE0);
306 | glBindTexture(GL_RECT_VID_TEXTURE_2D, this->m_vidTextures[vidIx].texId);
307 | printOpenGLError(__FILE__, __LINE__);
308 |
309 | if((this->m_vidTextures[vidIx].effect == VidShaderAlphaMask) && this->m_alphaTextureLoaded)
310 | {
311 | glEnable (GL_BLEND);
312 | glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
313 | glActiveTexture(GL_RECT_TEXTURE1);
314 | glBindTexture(GL_RECT_TEXTURE_2D, this->m_alphaTextureId);
315 | }
316 |
317 | this->m_vidTextures[vidIx].shader->bind();
318 | setVidShaderVars(vidIx, false);
319 | printOpenGLError(__FILE__, __LINE__);
320 |
321 | if(this->m_vidTextures[vidIx].effect == VidShaderColourHilightSwap)
322 | {
323 | this->m_vidTextures[vidIx].shader->setUniformValue("u_componentSwapR", m_colourComponentSwapR);
324 | this->m_vidTextures[vidIx].shader->setUniformValue("u_componentSwapG", m_colourComponentSwapG);
325 | this->m_vidTextures[vidIx].shader->setUniformValue("u_componentSwapB", m_colourComponentSwapB);
326 | }
327 |
328 | QGLShaderProgram *vidShader = this->m_vidTextures[vidIx].shader;
329 |
330 | QMatrix4x4 vidQuadMatrix = this->m_modelViewMatrix;
331 |
332 | if(m_stackVidQuads)
333 | {
334 | vidQuadMatrix.translate(0.0, 0.0, 2.0);
335 | vidQuadMatrix.translate(0.0, 0.0, 0.2*vidIx);
336 | }
337 | else
338 | {
339 | vidQuadMatrix.rotate((360/this->m_vidTextures.size())*vidIx, 0.0, 1.0, 0.0);
340 | vidQuadMatrix.translate(0.0, 0.0, 2.0);
341 | }
342 |
343 |
344 | vidShader->setUniformValue("u_mvp_matrix", m_projectionMatrix * vidQuadMatrix);
345 | vidShader->setUniformValue("u_mv_matrix", vidQuadMatrix);
346 |
347 | // Need to set these arrays up here as shader instances are shared between
348 | // all the videos:
349 | vidShader->enableAttributeArray("a_texCoord");
350 | vidShader->setAttributeArray("a_texCoord", this->m_vidTextures[vidIx].triStripTexCoords);
351 |
352 | if(this->m_vidTextures[vidIx].effect == VidShaderAlphaMask)
353 | {
354 | vidShader->enableAttributeArray("a_alphaTexCoord");
355 | vidShader->setAttributeArray("a_alphaTexCoord", this->m_vidTextures[vidIx].triStripAlphaTexCoords);
356 | }
357 |
358 | vidShader->enableAttributeArray("a_vertex");
359 | vidShader->setAttributeArray("a_vertex", this->m_vidTextures[vidIx].triStripVertices);
360 |
361 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
362 |
363 | vidShader->disableAttributeArray("a_vertex");
364 | if(this->m_vidTextures[vidIx].effect == VidShaderAlphaMask)
365 | {
366 | vidShader->disableAttributeArray("a_alphaTexCoord");
367 | }
368 | vidShader->disableAttributeArray("a_texCoord");
369 | }
370 | }
371 |
372 | QPainter painter(this);
373 | painter.setRenderHint(QPainter::Antialiasing);
374 | painter.setRenderHint(QPainter::TextAntialiasing);
375 |
376 | painter.endNativePainting();
377 | QString framesPerSecond;
378 | framesPerSecond.setNum(m_frames /(m_frameTime.elapsed() / 1000.0), 'f', 2);
379 | painter.setPen(Qt::white);
380 | painter.drawText(20, 40, framesPerSecond + " fps");
381 | painter.end();
382 | swapBuffers();
383 |
384 | if (!(m_frames % 100))
385 | {
386 | m_frameTime.start();
387 | m_frames = 0;
388 | }
389 | ++m_frames;
390 | }
391 |
392 | void GLWidget::resizeGL(int wid, int ht)
393 | {
394 | float vp = 0.8f;
395 | float aspect = (float) wid / (float) ht;
396 |
397 | glViewport(0, 0, wid, ht);
398 |
399 | this->m_projectionMatrix = QMatrix4x4();
400 | this->m_projectionMatrix.frustum(-vp, vp, -vp / aspect, vp / aspect, 1.0, 50.0);
401 | }
402 |
403 | void GLWidget::newFrame(int vidIx)
404 | {
405 | if(this->m_vidPipelines[vidIx])
406 | {
407 | LOG(LOG_VIDPIPELINE, Logger::Debug2, "vid %d frame %d",
408 | vidIx, this->m_vidTextures[vidIx].frameCount++);
409 |
410 | Pipeline *pipeline = this->m_vidPipelines[vidIx];
411 |
412 |
413 | /* If we have a vid frame currently, return it back to the video
414 | system */
415 | if(this->m_vidTextures[vidIx].buffer)
416 | {
417 | pipeline->m_outgoingBufQueue.put(this->m_vidTextures[vidIx].buffer);
418 | LOG(LOG_VIDPIPELINE, Logger::Debug2, "vid %d pushed buffer %p to outgoing queue",
419 | vidIx, this->m_vidTextures[vidIx].buffer);
420 | }
421 |
422 | void *newBuf = NULL;
423 | if(pipeline->m_incomingBufQueue.get(&newBuf) == true)
424 | {
425 | this->m_vidTextures[vidIx].buffer = newBuf;
426 | }
427 | else
428 | {
429 | this->m_vidTextures[vidIx].buffer = NULL;
430 | return;
431 | }
432 |
433 | LOG(LOG_VIDPIPELINE, Logger::Debug2, "vid %d popped buffer %p from incoming queue",
434 | vidIx, this->m_vidTextures[vidIx].buffer);
435 |
436 | this->makeCurrent();
437 |
438 | // Load the gst buf into a texture
439 | if(this->m_vidTextures[vidIx].texInfoValid == false)
440 | {
441 | LOG(LOG_VIDPIPELINE, Logger::Debug2, "Setting up texture info for vid %d", vidIx);
442 |
443 | // Try and keep this fairly portable to other media frameworks by
444 | // leaving info extraction within pipeline class
445 | this->m_vidTextures[vidIx].width = pipeline->getWidth();
446 | this->m_vidTextures[vidIx].height = pipeline->getHeight();
447 | this->m_vidTextures[vidIx].colourFormat = pipeline->getColourFormat();
448 | // this->m_vidTextures[vidIx].texInfoValid = true;
449 |
450 | setAppropriateVidShader(vidIx);
451 |
452 | this->m_vidTextures[vidIx].shader->bind();
453 | printOpenGLError(__FILE__, __LINE__);
454 | // Setting shader variables here will have no effect as they are set on every render,
455 | // but do it to check for errors, so we don't need to check on every render
456 | // and program output doesn't go mad
457 | setVidShaderVars(vidIx, true);
458 |
459 | #ifdef TEXCOORDS_ALREADY_NORMALISED
460 | GLfloat vidWidth = 1.0f;
461 | GLfloat vidHeight = 1.0f;
462 | #else
463 | GLfloat vidWidth = this->m_vidTextures[vidIx].width;
464 | GLfloat vidHeight = this->m_vidTextures[vidIx].height;
465 | #endif
466 |
467 | this->m_vidTextures[vidIx].triStripTexCoords[0] = QVector2D(vidWidth, 0.0f);
468 | this->m_vidTextures[vidIx].triStripVertices[0] = QVector2D(VIDTEXTURE_RIGHT_X, VIDTEXTURE_TOP_Y);
469 |
470 | this->m_vidTextures[vidIx].triStripTexCoords[1] = QVector2D(0.0f, 0.0f);
471 | this->m_vidTextures[vidIx].triStripVertices[1] = QVector2D(VIDTEXTURE_LEFT_X, VIDTEXTURE_TOP_Y);
472 |
473 | this->m_vidTextures[vidIx].triStripTexCoords[2] = QVector2D(vidWidth, vidHeight);
474 | this->m_vidTextures[vidIx].triStripVertices[2] = QVector2D(VIDTEXTURE_RIGHT_X, VIDTEXTURE_BOT_Y);
475 |
476 | this->m_vidTextures[vidIx].triStripTexCoords[3] = QVector2D(0.0f, vidHeight);
477 | this->m_vidTextures[vidIx].triStripVertices[3] = QVector2D(VIDTEXTURE_LEFT_X, VIDTEXTURE_BOT_Y);
478 | }
479 |
480 | this->m_vidTextures[vidIx].texInfoValid = loadNewTexture(vidIx);
481 |
482 | #ifdef ENABLE_YUV_WINDOW
483 | if((vidIx == 0) && (m_yuvWindow->isVisible()))
484 | {
485 | QImage yuvImage;
486 | switch(this->m_vidTextures[vidIx].colourFormat)
487 | {
488 | case ColFmt_I420:
489 | default:
490 | yuvImage = QImage(this->m_vidPipelines[vidIx]->bufToVidDataStart(this->m_vidTextures[vidIx].buffer),
491 | this->m_vidTextures[vidIx].width,
492 | this->m_vidTextures[vidIx].height*1.5f,
493 | QImage::Format_Indexed8);
494 | break;
495 | case ColFmt_UYVY:
496 | yuvImage = QImage(this->m_vidPipelines[vidIx]->bufToVidDataStart(this->m_vidTextures[vidIx].buffer),
497 | this->m_vidTextures[vidIx].width*2,
498 | this->m_vidTextures[vidIx].height,
499 | QImage::Format_Indexed8);
500 | break;
501 | }
502 | yuvImage.setColorTable(m_colourMap);
503 | m_yuvWindow->m_imageLabel->setPixmap(QPixmap::fromImage(yuvImage));
504 | }
505 | #endif
506 |
507 | printOpenGLError(__FILE__, __LINE__);
508 |
509 | this->update();
510 | }
511 | }
512 |
513 | bool GLWidget::loadNewTexture(int vidIx)
514 | {
515 | bool texLoaded = false;
516 |
517 | glBindTexture (GL_RECT_VID_TEXTURE_2D, this->m_vidTextures[vidIx].texId);
518 |
519 | switch(this->m_vidTextures[vidIx].colourFormat)
520 | {
521 | case ColFmt_I420:
522 | glTexImage2D (GL_RECT_VID_TEXTURE_2D, 0, GL_LUMINANCE,
523 | this->m_vidTextures[vidIx].width,
524 | this->m_vidTextures[vidIx].height*1.5f,
525 | 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
526 | this->m_vidPipelines[vidIx]->bufToVidDataStart(this->m_vidTextures[vidIx].buffer));
527 | texLoaded = true;
528 | break;
529 | case ColFmt_UYVY:
530 | glTexImage2D (GL_RECT_VID_TEXTURE_2D, 0, GL_LUMINANCE,
531 | this->m_vidTextures[vidIx].width*2,
532 | this->m_vidTextures[vidIx].height,
533 | 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
534 | this->m_vidPipelines[vidIx]->bufToVidDataStart(this->m_vidTextures[vidIx].buffer));
535 | texLoaded = true;
536 | break;
537 | default:
538 | LOG(LOG_GL, Logger::Error, "Decide how to load texture for colour format %d",
539 | this->m_vidTextures[vidIx].colourFormat);
540 | break;
541 | }
542 |
543 | return texLoaded;
544 | }
545 |
546 | void GLWidget::pipelineFinished(int vidIx)
547 | {
548 | this->m_vidTextures[vidIx].frameCount = 0;
549 |
550 | if(this->m_closing)
551 | {
552 | delete(this->m_vidPipelines[vidIx]);
553 | this->m_vidPipelines.replace(vidIx, NULL);
554 | this->m_vidTextures[vidIx].texInfoValid = false;
555 |
556 | // Check if any gst threads left, if not close
557 | bool allFinished = true;
558 | for(int i = 0; i < this->m_vidPipelines.size(); i++)
559 | {
560 | if(this->m_vidPipelines[i] != NULL)
561 | {
562 | // Catch any threads which were already finished at quitting time
563 | if(this->m_vidPipelines[i]->isFinished())
564 | {
565 | delete(this->m_vidPipelines[vidIx]);
566 | this->m_vidPipelines.replace(vidIx, NULL);
567 | this->m_vidTextures[vidIx].texInfoValid = false;
568 | }
569 | else
570 | {
571 | allFinished = false;
572 | break;
573 | }
574 | }
575 | }
576 | if(allFinished)
577 | {
578 | close();
579 | }
580 | }
581 | else
582 | {
583 | delete(this->m_vidPipelines[vidIx]);
584 | this->m_vidTextures[vidIx].texInfoValid = false;
585 |
586 | this->m_vidPipelines[vidIx] = createPipeline(vidIx);
587 |
588 | if(this->m_vidPipelines[vidIx] == NULL)
589 | {
590 | LOG(LOG_GL, Logger::Error, "Error creating pipeline for vid %d", vidIx);
591 | return;
592 | }
593 |
594 | QObject::connect(this->m_vidPipelines[vidIx], SIGNAL(finished(int)),
595 | this, SLOT(pipelineFinished(int)));
596 | QObject::connect(this, SIGNAL(closeRequested()),
597 | this->m_vidPipelines[vidIx], SLOT(Stop()), Qt::QueuedConnection);
598 |
599 | this->m_vidPipelines[vidIx]->Configure();
600 | this->m_vidPipelines[vidIx]->Start();
601 | }
602 | }
603 |
604 | // Layout size
605 | QSize GLWidget::minimumSizeHint() const
606 | {
607 | return QSize(50, 50);
608 | }
609 |
610 | QSize GLWidget::sizeHint() const
611 | {
612 | return QSize(400, 400);
613 | }
614 |
615 | // Animation
616 | static int qNormalizeAngle(int angle)
617 | {
618 | while (angle < 0)
619 | angle += 360 * 16;
620 | while (angle > 360 * 16)
621 | angle -= 360 * 16;
622 |
623 | return angle;
624 | }
625 |
626 | void GLWidget::animate()
627 | {
628 | /* Increment wrt inertia */
629 | if (m_rotateOn)
630 | {
631 | m_xRot = qNormalizeAngle(m_xRot + (8 * m_yInertia));
632 | m_yRot = qNormalizeAngle(m_yRot + (8 * m_xInertia));
633 | }
634 |
635 | /* Colour swapping effect shader */
636 | if(m_colourSwapDirUpwards)
637 | {
638 | if((m_colourComponentSwapB.z() < 0.1) || (m_colourComponentSwapG.z() > 0.9))
639 | {
640 | m_colourSwapDirUpwards = false;
641 | }
642 | else
643 | {
644 | m_colourComponentSwapB.setZ(m_colourComponentSwapB.z() - 0.01);
645 | m_colourComponentSwapG.setZ(m_colourComponentSwapG.z() + 0.01);
646 | }
647 | }
648 | else
649 | {
650 | if((m_colourComponentSwapB.z() > 0.9) || (m_colourComponentSwapG.z() < 0.1))
651 | {
652 | m_colourSwapDirUpwards = true;
653 | }
654 | else
655 | {
656 | m_colourComponentSwapB.setZ(m_colourComponentSwapB.z() + 0.01);
657 | m_colourComponentSwapG.setZ(m_colourComponentSwapG.z() - 0.01);
658 | }
659 | }
660 |
661 | update();
662 | }
663 |
664 | // Input events
665 | void GLWidget::cycleVidShaderSlot()
666 | {
667 | int lastVidDrawn = this->m_vidTextures.size() - 1;
668 | if (this->m_vidTextures[lastVidDrawn].effect >= VidShaderLast)
669 | this->m_vidTextures[lastVidDrawn].effect = VidShaderFirst;
670 | else
671 | this->m_vidTextures[lastVidDrawn].effect = (VidShaderEffectType) ((int) this->m_vidTextures[lastVidDrawn].effect + 1);
672 |
673 | setAppropriateVidShader(lastVidDrawn);
674 | this->m_vidTextures[lastVidDrawn].shader->bind();
675 | printOpenGLError(__FILE__, __LINE__);
676 | // Setting shader variables here will have no effect as they are set on every render,
677 | // but do it to check for errors, so we don't need to check on every render
678 | // and program output doesn't go mad
679 | setVidShaderVars(lastVidDrawn, true);
680 |
681 | LOG(LOG_GL, Logger::Debug1, "vid shader for vid %d now set to %d",
682 | lastVidDrawn, this->m_vidTextures[lastVidDrawn].effect);
683 | }
684 |
685 | void GLWidget::cycleModelShaderSlot()
686 | {
687 | if (m_currentModelEffectIndex >= ModelEffectLast)
688 | m_currentModelEffectIndex = ModelEffectFirst;
689 | else
690 | m_currentModelEffectIndex = (ModelEffectType) ((int) m_currentModelEffectIndex + 1);
691 |
692 | LOG(LOG_GL, Logger::Debug1, "model shader now set to %d", m_currentModelEffectIndex);
693 | }
694 |
695 | void GLWidget::showYUVWindowSlot()
696 | {
697 | #ifdef ENABLE_YUV_WINDOW
698 | #ifdef HIDE_GL_WHEN_MODAL_OPEN
699 | QSize currentSize = this->size();
700 | this->resize(0, 0);
701 | #endif
702 |
703 | m_yuvWindow->show();
704 |
705 | #ifdef HIDE_GL_WHEN_MODAL_OPEN
706 | this->resize(currentSize);
707 | #endif
708 | #endif
709 | }
710 |
711 | void GLWidget::loadVideoSlot()
712 | {
713 | #ifdef HIDE_GL_WHEN_MODAL_OPEN
714 | QSize currentSize = this->size();
715 | this->resize(0, 0);
716 | #endif
717 |
718 | int lastVidDrawn = this->m_vidTextures.size() - 1;
719 |
720 | QString newFileName = QFileDialog::getOpenFileName(0, "Select a video file",
721 | m_dataFilesDir + "videos/", "Videos (*.avi *.mkv *.ogg *.asf *.mov);;All (*.*)");
722 | if(newFileName.isNull() == false)
723 | {
724 | this->m_videoLoc[lastVidDrawn] = newFileName;
725 |
726 | //this->m_vidPipelines[lastVidDrawn]->setChooseNewOnFinished();
727 | this->m_vidPipelines[lastVidDrawn]->Stop();
728 | }
729 |
730 | #ifdef HIDE_GL_WHEN_MODAL_OPEN
731 | this->resize(currentSize);
732 | #endif
733 | }
734 |
735 | void GLWidget::loadModelSlot()
736 | {
737 | #ifdef HIDE_GL_WHEN_MODAL_OPEN
738 | QSize currentSize = this->size();
739 | this->resize(0, 0);
740 | #endif
741 |
742 | // Load a Wavefront OBJ model file. Get the filename before doing anything else
743 | QString objFileName = QFileDialog::getOpenFileName(0, "Select a Wavefront OBJ file",
744 | m_dataFilesDir + "models/", "Wavefront OBJ (*.obj)");
745 | if(objFileName.isNull() == false)
746 | {
747 | if(m_model->Load(objFileName) != 0)
748 | {
749 | LOG(LOG_GL, Logger::Error, "Couldn't load obj model file %s", objFileName.toUtf8().constData());
750 | }
751 | m_model->SetScale(MODEL_BOUNDARY_SIZE);
752 | }
753 |
754 | #ifdef HIDE_GL_WHEN_MODAL_OPEN
755 | this->resize(currentSize);
756 | #endif
757 | }
758 |
759 | void GLWidget::loadAlphaSlot()
760 | {
761 | #ifdef HIDE_GL_WHEN_MODAL_OPEN
762 | QSize currentSize = this->size();
763 | this->resize(0, 0);
764 | #endif
765 |
766 | // Load an alpha mask texture. Get the filename before doing anything else
767 | QString alphaTexFileName = QFileDialog::getOpenFileName(0, "Select an image file",
768 | m_dataFilesDir + "alphamasks/", "Pictures (*.bmp *.jpg *.jpeg *.gif);;All (*.*)");
769 | if(alphaTexFileName.isNull() == false)
770 | {
771 | QImage alphaTexImage(alphaTexFileName);
772 | if(alphaTexImage.isNull() == false)
773 | {
774 | // Ok, a new image is loaded
775 | if(m_alphaTextureLoaded)
776 | {
777 | // Delete the old texture
778 | m_alphaTextureLoaded = false;
779 | deleteTexture(m_alphaTextureId);
780 | }
781 |
782 | // Bind new image to texture
783 | m_alphaTextureId = bindTexture(alphaTexImage.mirrored(true, true), GL_RECT_TEXTURE_2D);
784 | m_alphaTexWidth = alphaTexImage.width();
785 | m_alphaTexHeight = alphaTexImage.height();
786 | // Update alpha tex co-ords in shader in case it is active:
787 | setVidShaderVars((this->m_vidTextures.size() - 1), true);
788 | m_alphaTextureLoaded = true;
789 | }
790 | }
791 |
792 | #ifdef HIDE_GL_WHEN_MODAL_OPEN
793 | this->resize(currentSize);
794 | #endif
795 | }
796 |
797 | void GLWidget::rotateToggleSlot(bool toggleState)
798 | {
799 | m_rotateOn = toggleState;
800 |
801 | if (!m_rotateOn)
802 | {
803 | m_xInertiaOld = m_xInertia;
804 | m_yInertiaOld = m_yInertia;
805 | }
806 | else
807 | {
808 | m_xInertia = m_xInertiaOld;
809 | m_yInertia = m_yInertiaOld;
810 |
811 | // To prevent confusion, force some rotation
812 | if ((m_xInertia == 0.0) && (m_yInertia == 0.0))
813 | m_xInertia = -0.5;
814 | }
815 | }
816 |
817 | void GLWidget::stackVidsToggleSlot(int toggleState)
818 | {
819 | if(toggleState == Qt::Checked)
820 | m_stackVidQuads = true;
821 | else
822 | m_stackVidQuads = false;
823 | }
824 |
825 | void GLWidget::cycleBackgroundSlot()
826 | {
827 | switch( m_clearColorIndex++ )
828 | {
829 | case 0: qglClearColor(QColor(Qt::black));
830 | break;
831 | case 1: qglClearColor(QColor::fromRgbF(0.2f, 0.2f, 0.3f, 1.0f));
832 | break;
833 | default: qglClearColor(QColor::fromRgbF(0.7f, 0.7f, 0.7f, 1.0f));
834 | m_clearColorIndex = 0;
835 | break;
836 | }
837 | }
838 |
839 | void GLWidget::resetPosSlot()
840 | {
841 | m_xRot = 0;
842 | m_yRot = 35;
843 | m_zRot = 0;
844 | m_xLastIncr = 0;
845 | m_yLastIncr = 0;
846 | m_xInertia = -0.5;
847 | m_yInertia = 0;
848 | m_scaleValue = 1.0;
849 | }
850 |
851 | void GLWidget::exitSlot()
852 | {
853 | close();
854 | }
855 |
856 |
857 |
858 | void GLWidget::mousePressEvent(QMouseEvent *event)
859 | {
860 | m_lastPos = event->pos();
861 |
862 | if (event->button() == Qt::LeftButton)
863 | {
864 | m_xInertia = 0;
865 | m_yInertia = 0;
866 |
867 | m_xLastIncr = 0;
868 | m_yLastIncr = 0;
869 | }
870 | }
871 |
872 | void GLWidget::mouseReleaseEvent(QMouseEvent *event)
873 | {
874 | if (event->button() == Qt::LeftButton)
875 | {
876 | // Left button released
877 | m_lastPos.setX(-1);
878 | m_lastPos.setY(-1);
879 |
880 | if (m_xLastIncr > INERTIA_THRESHOLD)
881 | m_xInertia = (m_xLastIncr - INERTIA_THRESHOLD)*INERTIA_FACTOR;
882 |
883 | if (-m_xLastIncr > INERTIA_THRESHOLD)
884 | m_xInertia = (m_xLastIncr + INERTIA_THRESHOLD)*INERTIA_FACTOR;
885 |
886 | if (m_yLastIncr > INERTIA_THRESHOLD)
887 | m_yInertia = (m_yLastIncr - INERTIA_THRESHOLD)*INERTIA_FACTOR;
888 |
889 | if (-m_yLastIncr > INERTIA_THRESHOLD)
890 | m_yInertia = (m_yLastIncr + INERTIA_THRESHOLD)*INERTIA_FACTOR;
891 |
892 | }
893 | }
894 |
895 | void GLWidget::mouseMoveEvent(QMouseEvent *event)
896 | {
897 | if((m_lastPos.x() != -1) && (m_lastPos.y() != -1))
898 | {
899 | m_xLastIncr = event->x() - m_lastPos.x();
900 | m_yLastIncr = event->y() - m_lastPos.y();
901 |
902 | if ((event->modifiers() & Qt::ControlModifier)
903 | || (event->buttons() & Qt::RightButton))
904 | {
905 | if (m_lastPos.x() != -1)
906 | {
907 | m_zRot = qNormalizeAngle(m_zRot + (8 * m_xLastIncr));
908 | m_scaleValue += (m_yLastIncr)*SCALE_FACTOR;
909 | update();
910 | }
911 | }
912 | else
913 | {
914 | if (m_lastPos.x() != -1)
915 | {
916 | m_xRot = qNormalizeAngle(m_xRot + (8 * m_yLastIncr));
917 | m_yRot = qNormalizeAngle(m_yRot + (8 * m_xLastIncr));
918 | update();
919 | }
920 | }
921 | }
922 |
923 | m_lastPos = event->pos();
924 | }
925 |
926 | void GLWidget::keyPressEvent(QKeyEvent *e)
927 | {
928 | switch(e->key())
929 | {
930 | case Qt::Key_Question:
931 | case Qt::Key_H:
932 | std::cout << "\nKeyboard commands:\n\n"
933 | "? - Help\n"
934 | "q, - Quit\n"
935 | "b - Toggle among background clear colors\n"
936 | "m - Load a different model to render\n"
937 | "s - "
938 | "a - "
939 | "v - "
940 | "o - "
941 | "p - "
942 | " - reset zoom and rotation\n"
943 | " or - stop rotation\n"
944 | "<+>, <-> or - zoom model\n"
945 | " or - rotate model\n"
946 | #ifdef ENABLE_YUV_WINDOW
947 | "y - View yuv data of vid 0 in modeless window"
948 | #endif
949 | "\n";
950 | break;
951 | case Qt::Key_Escape:
952 | case Qt::Key_Q:
953 | exitSlot();
954 | break;
955 |
956 | case Qt::Key_B:
957 | cycleBackgroundSlot();
958 | break;
959 |
960 | case Qt::Key_S:
961 | cycleVidShaderSlot();
962 | break;
963 | case Qt::Key_A:
964 | loadAlphaSlot();
965 | break;
966 | case Qt::Key_M:
967 | loadModelSlot();
968 | break;
969 | case Qt::Key_V:
970 | loadVideoSlot();
971 | break;
972 | case Qt::Key_O:
973 | cycleModelShaderSlot();
974 | break;
975 | case Qt::Key_P:
976 | // Decouple bool used within class from Qt check box state enum values
977 | stackVidsToggleSlot(m_stackVidQuads ? Qt::Unchecked : Qt::Checked);
978 | emit stackVidsStateChanged(m_stackVidQuads ? Qt::Checked : Qt::Unchecked);
979 | break;
980 |
981 | case Qt::Key_Space:
982 | rotateToggleSlot(m_rotateOn ? false : true); //Qt::Unchecked : Qt::Checked)
983 | emit rotateStateChanged(m_rotateOn);// ? Qt::Checked : Qt::Unchecked);
984 | break;
985 | case Qt::Key_Plus:
986 | m_scaleValue += SCALE_INCREMENT;
987 | break;
988 | case Qt::Key_Minus:
989 | m_scaleValue -= SCALE_INCREMENT;
990 | break;
991 | case Qt::Key_Home:
992 | resetPosSlot();
993 | break;
994 | case Qt::Key_Left:
995 | m_yRot -= 8;
996 | break;
997 | case Qt::Key_Right:
998 | m_yRot += 8;
999 | break;
1000 | case Qt::Key_Up:
1001 | m_xRot -= 8;
1002 | break;
1003 | case Qt::Key_Down:
1004 | m_xRot += 8;
1005 | break;
1006 |
1007 | #ifdef ENABLE_YUV_WINDOW
1008 | case Qt::Key_Y:
1009 | showYUVWindowSlot();
1010 | break;
1011 | #endif
1012 |
1013 | default:
1014 | QGLWidget::keyPressEvent(e);
1015 | break;
1016 | }
1017 | }
1018 |
1019 | void GLWidget::closeEvent(QCloseEvent* event)
1020 | {
1021 | if(this->m_closing == false)
1022 | {
1023 | this->m_closing = true;
1024 | emit closeRequested();
1025 |
1026 | // Just in case, check now if any gst threads still exist, if not, close application now
1027 | bool allFinished = true;
1028 | for(int i = 0; i < this->m_vidPipelines.size(); i++)
1029 | {
1030 | if(this->m_vidPipelines[i] != NULL)
1031 | {
1032 | allFinished = false;
1033 | break;
1034 | }
1035 | }
1036 | if(allFinished)
1037 | {
1038 | close();
1039 | }
1040 | event->ignore();
1041 | }
1042 | else
1043 | {
1044 | // This is where we're all finished and really are m_closing now.
1045 | // At the mo, tell parent to close too.
1046 | QWidget* _parent = dynamic_cast(parent());
1047 | if(_parent)
1048 | _parent->close();
1049 | }
1050 | }
1051 |
1052 | // Shader management
1053 | void GLWidget::setAppropriateVidShader(int vidIx)
1054 | {
1055 | switch(this->m_vidTextures[vidIx].colourFormat)
1056 | {
1057 | #ifdef VIDI420_SHADERS_NEEDED
1058 | case ColFmt_I420:
1059 | switch(this->m_vidTextures[vidIx].effect)
1060 | {
1061 | case VidShaderNoEffect:
1062 | this->m_vidTextures[vidIx].shader = &m_I420NoEffect;
1063 | break;
1064 | case VidShaderNoEffectNormalisedTexCoords:
1065 | this->m_vidTextures[vidIx].shader = &m_I420NoEffectNormalised;
1066 | break;
1067 | case VidShaderLit:
1068 | this->m_vidTextures[vidIx].shader = &m_I420Lit;
1069 | break;
1070 | case VidShaderLitNormalisedTexCoords:
1071 | this->m_vidTextures[vidIx].shader = &m_I420LitNormalised;
1072 | break;
1073 | case VidShaderColourHilight:
1074 | this->m_vidTextures[vidIx].shader = &m_I420ColourHilight;
1075 | break;
1076 | case VidShaderColourHilightSwap:
1077 | this->m_vidTextures[vidIx].shader = &m_I420ColourHilightSwap;
1078 | break;
1079 | case VidShaderAlphaMask:
1080 | this->m_vidTextures[vidIx].shader = &m_I420AlphaMask;
1081 | break;
1082 | }
1083 | break;
1084 | #endif
1085 | #ifdef VIDUYVY_SHADERS_NEEDED
1086 | case ColFmt_UYVY:
1087 | switch(this->m_vidTextures[vidIx].effect)
1088 | {
1089 | case VidShaderNoEffect:
1090 | this->m_vidTextures[vidIx].shader = &m_UYVYNoEffect;
1091 | break;
1092 | case VidShaderNoEffectNormalisedTexCoords:
1093 | this->m_vidTextures[vidIx].shader = &m_UYVYNoEffectNormalised;
1094 | break;
1095 | case VidShaderLit:
1096 | this->m_vidTextures[vidIx].shader = &m_UYVYLit;
1097 | break;
1098 | case VidShaderLitNormalisedTexCoords:
1099 | this->m_vidTextures[vidIx].shader = &m_UYVYLitNormalised;
1100 | break;
1101 | case VidShaderColourHilight:
1102 | this->m_vidTextures[vidIx].shader = &m_UYVYColourHilight;
1103 | break;
1104 | case VidShaderColourHilightSwap:
1105 | this->m_vidTextures[vidIx].shader = &m_UYVYColourHilightSwap;
1106 | break;
1107 | case VidShaderAlphaMask:
1108 | this->m_vidTextures[vidIx].shader = &m_UYVYAlphaMask;
1109 | break;
1110 | }
1111 | break;
1112 | #endif
1113 | default:
1114 | LOG(LOG_GL, Logger::Error, "Haven't implemented a shader for colour format %d yet, or its not enabled in the build",
1115 | this->m_vidTextures[vidIx].colourFormat);
1116 | break;
1117 | }
1118 | }
1119 |
1120 | // Shader WILL be all set up for the specified video texture when this is called,
1121 | // or else!
1122 | void GLWidget::setVidShaderVars(int vidIx, bool printErrors)
1123 | {
1124 | // TODO: move common vars out of switch
1125 |
1126 | switch(this->m_vidTextures[vidIx].effect)
1127 | {
1128 | case VidShaderNoEffect:
1129 | case VidShaderNoEffectNormalisedTexCoords:
1130 | // Temp:
1131 | printOpenGLError(__FILE__, __LINE__);
1132 |
1133 | this->m_vidTextures[vidIx].shader->setUniformValue("u_vidTexture", 0); // texture unit index
1134 | // Temp:
1135 | printOpenGLError(__FILE__, __LINE__);
1136 | this->m_vidTextures[vidIx].shader->setUniformValue("u_yHeight", (GLfloat)this->m_vidTextures[vidIx].height);
1137 | // Temp:
1138 | printOpenGLError(__FILE__, __LINE__);
1139 | this->m_vidTextures[vidIx].shader->setUniformValue("u_yWidth", (GLfloat)this->m_vidTextures[vidIx].width);
1140 |
1141 | if(printErrors) printOpenGLError(__FILE__, __LINE__);
1142 | break;
1143 |
1144 | case VidShaderLit:
1145 | case VidShaderLitNormalisedTexCoords:
1146 | this->m_vidTextures[vidIx].shader->setUniformValue("u_vidTexture", 0); // texture unit index
1147 | this->m_vidTextures[vidIx].shader->setUniformValue("u_yHeight", (GLfloat)this->m_vidTextures[vidIx].height);
1148 | this->m_vidTextures[vidIx].shader->setUniformValue("u_yWidth", (GLfloat)this->m_vidTextures[vidIx].width);
1149 |
1150 | this->m_vidTextures[vidIx].shader->setUniformValue("u_lightPosition", QVector3D(0.0, 0.0, 4.0));
1151 |
1152 | if(printErrors) printOpenGLError(__FILE__, __LINE__);
1153 | break;
1154 |
1155 | case VidShaderColourHilight:
1156 | this->m_vidTextures[vidIx].shader->setUniformValue("u_vidTexture", 0); // texture unit index
1157 | this->m_vidTextures[vidIx].shader->setUniformValue("u_yHeight", (GLfloat)this->m_vidTextures[vidIx].height);
1158 | this->m_vidTextures[vidIx].shader->setUniformValue("u_yWidth", (GLfloat)this->m_vidTextures[vidIx].width);
1159 | this->m_vidTextures[vidIx].shader->setUniformValue("u_colrToDisplayMin", m_colourHilightRangeMin);
1160 | this->m_vidTextures[vidIx].shader->setUniformValue("u_colrToDisplayMax", m_colourHilightRangeMax);
1161 | if(printErrors) printOpenGLError(__FILE__, __LINE__);
1162 | break;
1163 |
1164 | case VidShaderColourHilightSwap:
1165 | this->m_vidTextures[vidIx].shader->setUniformValue("u_vidTexture", 0); // texture unit index
1166 | this->m_vidTextures[vidIx].shader->setUniformValue("u_yHeight", (GLfloat)this->m_vidTextures[vidIx].height);
1167 | this->m_vidTextures[vidIx].shader->setUniformValue("u_yWidth", (GLfloat)this->m_vidTextures[vidIx].width);
1168 | this->m_vidTextures[vidIx].shader->setUniformValue("u_colrToDisplayMin", m_colourHilightRangeMin);
1169 | this->m_vidTextures[vidIx].shader->setUniformValue("u_colrToDisplayMax", m_colourHilightRangeMax);
1170 | this->m_vidTextures[vidIx].shader->setUniformValue("u_componentSwapR", m_colourComponentSwapR);
1171 | this->m_vidTextures[vidIx].shader->setUniformValue("u_componentSwapG", m_colourComponentSwapG);
1172 | this->m_vidTextures[vidIx].shader->setUniformValue("u_componentSwapB", m_colourComponentSwapB);
1173 | if(printErrors) printOpenGLError(__FILE__, __LINE__);
1174 | break;
1175 |
1176 | case VidShaderAlphaMask:
1177 | this->m_vidTextures[vidIx].shader->setUniformValue("u_vidTexture", 0); // texture unit index
1178 | this->m_vidTextures[vidIx].shader->setUniformValue("u_yHeight", (GLfloat)this->m_vidTextures[vidIx].height);
1179 | this->m_vidTextures[vidIx].shader->setUniformValue("u_yWidth", (GLfloat)this->m_vidTextures[vidIx].width);
1180 | this->m_vidTextures[vidIx].shader->setUniformValue("u_alphaTexture", 1); // texture unit index
1181 | if(printErrors) printOpenGLError(__FILE__, __LINE__);
1182 | #ifdef TEXCOORDS_ALREADY_NORMALISED
1183 | this->m_vidTextures[vidIx].triStripAlphaTexCoords[0] = QVector2D(1.0f, 0.0f);
1184 | this->m_vidTextures[vidIx].triStripAlphaTexCoords[1] = QVector2D(0.0f, 0.0f);
1185 | this->m_vidTextures[vidIx].triStripAlphaTexCoords[2] = QVector2D(1.0f, 1.0f);
1186 | this->m_vidTextures[vidIx].triStripAlphaTexCoords[3] = QVector2D(0.0f, 1.0f);
1187 | #else
1188 | this->m_vidTextures[vidIx].triStripAlphaTexCoords[0] = QVector2D(m_alphaTexWidth, 0.0f);
1189 | this->m_vidTextures[vidIx].triStripAlphaTexCoords[1] = QVector2D(0.0f, 0.0f);
1190 | this->m_vidTextures[vidIx].triStripAlphaTexCoords[2] = QVector2D(m_alphaTexWidth, m_alphaTexHeight);
1191 | this->m_vidTextures[vidIx].triStripAlphaTexCoords[3] = QVector2D(0.0f, m_alphaTexHeight);
1192 | #endif
1193 | break;
1194 |
1195 | default:
1196 | LOG(LOG_GLSHADERS, Logger::Warning, "Invalid effect set on vidIx %d", vidIx);
1197 | break;
1198 | }
1199 | }
1200 |
1201 | int GLWidget::loadShaderFile(QString fileName, QString &shaderSource)
1202 | {
1203 | fileName = m_dataFilesDir + fileName;
1204 |
1205 | shaderSource.clear();
1206 | QFile file(fileName);
1207 | if (!file.open(QIODevice::ReadOnly | QIODevice::Text))
1208 | {
1209 | LOG(LOG_GLSHADERS, Logger::Error, "File '%s' does not exist!", qPrintable(fileName));
1210 | return -1;
1211 | }
1212 |
1213 | QTextStream in(&file);
1214 | while (!in.atEnd())
1215 | {
1216 | shaderSource += in.readLine();
1217 | shaderSource += "\n";
1218 | }
1219 |
1220 | return 0;
1221 | }
1222 |
1223 | int GLWidget::setupShader(QGLShaderProgram *prog, GLShaderModule shaderList[], int listLen)
1224 | {
1225 | bool ret;
1226 |
1227 | LOG(LOG_GLSHADERS, Logger::Debug1, "-- Setting up a new full shader: --");
1228 |
1229 | QString shaderSource;
1230 | QString shaderSourceFileNames;
1231 | QString fullShaderSourceFileNames;
1232 | for(int listIx = 0; listIx < listLen; listIx++)
1233 | {
1234 | if(shaderList[listIx].type == QGLShader::Vertex)
1235 | {
1236 | QString nextShaderSource;
1237 |
1238 | LOG(LOG_GLSHADERS, Logger::Debug1, "concatenating %s", shaderList[listIx].sourceFileName);
1239 | shaderSourceFileNames += shaderList[listIx].sourceFileName;
1240 | shaderSourceFileNames += ", ";
1241 | fullShaderSourceFileNames += shaderList[listIx].sourceFileName;
1242 | fullShaderSourceFileNames += ", ";
1243 |
1244 | ret = loadShaderFile(shaderList[listIx].sourceFileName, nextShaderSource);
1245 | if(ret != 0)
1246 | {
1247 | return ret;
1248 | }
1249 |
1250 | shaderSource += nextShaderSource;
1251 | }
1252 | }
1253 |
1254 | if(!shaderSource.isEmpty())
1255 | {
1256 | LOG(LOG_GLSHADERS, Logger::Debug1, "compiling vertex shader");
1257 |
1258 | ret = prog->addShaderFromSourceCode(QGLShader::Vertex, shaderSource);
1259 |
1260 | if(ret == false)
1261 | {
1262 | LOG(LOG_GLSHADERS, Logger::Error, "Compile log for vertex shader sources %s:\n%s\n",
1263 | shaderSourceFileNames.toUtf8().constData(),
1264 | prog->log().toUtf8().constData());
1265 | return -1;
1266 | }
1267 | }
1268 |
1269 | shaderSource.clear();
1270 | shaderSourceFileNames.clear();
1271 |
1272 | for(int listIx = 0; listIx < listLen; listIx++)
1273 | {
1274 | if(shaderList[listIx].type == QGLShader::Fragment)
1275 | {
1276 | QString nextShaderSource;
1277 |
1278 | LOG(LOG_GLSHADERS, Logger::Debug1, "concatenating %s", shaderList[listIx].sourceFileName);
1279 | shaderSourceFileNames += shaderList[listIx].sourceFileName;
1280 | shaderSourceFileNames += ", ";
1281 | fullShaderSourceFileNames += shaderList[listIx].sourceFileName;
1282 | fullShaderSourceFileNames += ", ";
1283 |
1284 | ret = loadShaderFile(shaderList[listIx].sourceFileName, nextShaderSource);
1285 | if(ret != 0)
1286 | {
1287 | return ret;
1288 | }
1289 |
1290 | shaderSource += nextShaderSource;
1291 | }
1292 | }
1293 |
1294 | if(!shaderSource.isEmpty())
1295 | {
1296 | LOG(LOG_GLSHADERS, Logger::Debug1, "compiling fragment shader");
1297 |
1298 | ret = prog->addShaderFromSourceCode(QGLShader::Fragment, shaderSource);
1299 |
1300 | if(ret == false)
1301 | {
1302 | LOG(LOG_GLSHADERS, Logger::Error, "Compile log for fragment shader sources %s:\n%s\n",
1303 | shaderSourceFileNames.toUtf8().constData(),
1304 | prog->log().toUtf8().constData());
1305 | return -1;
1306 | }
1307 | }
1308 |
1309 | ret = prog->link();
1310 | if(ret == false)
1311 | {
1312 | LOG(LOG_GLSHADERS, Logger::Error, "Link log for shader sources %s:\n%s\n",
1313 | fullShaderSourceFileNames.toUtf8().constData(),
1314 | prog->log().toUtf8().constData());
1315 | return -1;
1316 | }
1317 |
1318 | ret = prog->bind();
1319 | if(ret == false)
1320 | {
1321 | LOG(LOG_GLSHADERS, Logger::Error, "Error binding shader from sources %s",
1322 | fullShaderSourceFileNames.toUtf8().constData());
1323 | return -1;
1324 | }
1325 |
1326 | printOpenGLError(__FILE__, __LINE__);
1327 |
1328 | return 0;
1329 | }
1330 |
1331 |
1332 | int GLWidget::printOpenGLError(const char *file, int line)
1333 | {
1334 | //
1335 | // Returns 1 if an OpenGL error occurred, 0 otherwise.
1336 | //
1337 | GLenum glErr;
1338 | int retCode = 0;
1339 |
1340 | glErr = glGetError();
1341 | while (glErr != GL_NO_ERROR)
1342 | {
1343 | #ifdef GLU_NEEDED
1344 | LOG(LOG_GL, Logger::Error, "glError in file %s:%d : %s", file, line, (const char *)gluErrorString(glErr));
1345 | #else
1346 | LOG(LOG_GL, Logger::Error, "glError in file %s:%d : %d", file, line, glErr);
1347 | #endif
1348 | retCode = 1;
1349 | glErr = glGetError();
1350 | }
1351 | return retCode;
1352 | }
1353 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/glwidget.h:
--------------------------------------------------------------------------------
1 | #ifndef GLWIDGET_H
2 | #define GLWIDGET_H
3 |
4 | #define ENABLE_YUV_WINDOW 1
5 |
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include
14 | #include
15 | #include
16 | #include
17 | #include
18 |
19 | #include
20 |
21 | #include "pipeline.h"
22 |
23 | #include "model.h"
24 |
25 | #ifdef ENABLE_YUV_WINDOW
26 | #include "yuvdebugwindow.h"
27 | #endif
28 |
29 | #ifdef IMGTEX_EXT_NEEDED
30 | #include "GLES2/gl2ext.h"
31 | #endif
32 |
33 | // Handle texture extensions on different platforms with some generic
34 | // definitions here:
35 | #ifdef RECTTEX_EXT_NEEDED
36 | #define GL_RECT_TEXTURE_2D GL_TEXTURE_RECTANGLE_ARB
37 | #define GL_RECT_TEXTURE0 GL_TEXTURE0_ARB
38 | #define GL_RECT_TEXTURE1 GL_TEXTURE1_ARB
39 | #define GL_RECT_VID_TEXTURE_2D GL_TEXTURE_RECTANGLE_ARB
40 | #define GL_RECT_VID_TEXTURE0 GL_TEXTURE0_ARB
41 | #define GL_RECT_VID_TEXTURE1 GL_TEXTURE1_ARB
42 | #define VIDCONV_FRAG_SHADER_SUFFIX "-recttex"
43 | #elif IMGTEX_EXT_NEEDED
44 | #define GL_RECT_TEXTURE_2D GL_TEXTURE_2D
45 | #define GL_RECT_TEXTURE0 GL_TEXTURE0
46 | #define GL_RECT_TEXTURE1 GL_TEXTURE1
47 | #define GL_RECT_VID_TEXTURE_2D GL_TEXTURE_STREAM_IMG
48 | #define GL_RECT_VID_TEXTURE0 GL_TEXTURE0
49 | #define GL_RECT_VID_TEXTURE1 GL_TEXTURE1
50 | #define VIDCONV_FRAG_SHADER_SUFFIX "-imgstream"
51 | #else
52 | #define GL_RECT_TEXTURE_2D GL_TEXTURE_2D
53 | #define GL_RECT_TEXTURE0 GL_TEXTURE0
54 | #define GL_RECT_TEXTURE1 GL_TEXTURE1
55 | #define GL_RECT_VID_TEXTURE_2D GL_TEXTURE_2D
56 | #define GL_RECT_VID_TEXTURE0 GL_TEXTURE0
57 | #define GL_RECT_VID_TEXTURE1 GL_TEXTURE1
58 | #define VIDCONV_FRAG_SHADER_SUFFIX ""
59 | #endif
60 |
61 |
62 | #define INERTIA_THRESHOLD 1.0f
63 | #define INERTIA_FACTOR 0.5f
64 | #define SCALE_FACTOR 0.01f
65 | #define SCALE_INCREMENT 0.5f
66 |
67 | #define DATA_DIR_ENV_VAR_NAME "QTGLGST_DATA_DIR"
68 |
69 | #define DFLT_OBJ_MODEL_FILE_NAME "models/sphere.obj"
70 | #define MODEL_BOUNDARY_SIZE 2.0f
71 |
72 | typedef enum
73 | {
74 | ModelEffectFirst = 0,
75 | ModelEffectBrick = 0,
76 | ModelEffectVideo = 1,
77 | ModelEffectVideoLit = 2,
78 | ModelEffectLast = 2,
79 | } ModelEffectType;
80 |
81 | typedef enum
82 | {
83 | VidShaderFirst = 0,
84 | VidShaderNoEffect = 0,
85 | VidShaderColourHilight = 1,
86 | VidShaderColourHilightSwap = 2,
87 | VidShaderAlphaMask = 3,
88 | VidShaderLast = 3,
89 | // Any shaders after last should not be toggled through with "next shader" key:
90 | VidShaderNoEffectNormalisedTexCoords = 4,
91 | VidShaderLitNormalisedTexCoords = 5,
92 | VidShaderLit = 6,
93 | } VidShaderEffectType;
94 |
95 | #define NUM_VIDTEXTURE_VERTICES_X 2
96 | #define NUM_VIDTEXTURE_VERTICES_Y 2
97 | #define VIDTEXTURE_LEFT_X -1.3f
98 | #define VIDTEXTURE_RIGHT_X 1.3f
99 | #define VIDTEXTURE_TOP_Y 1.0f
100 | #define VIDTEXTURE_BOT_Y -1.0f
101 |
102 | typedef struct _VidTextureInfo
103 | {
104 | GLuint texId;
105 | void *buffer;
106 | bool texInfoValid;
107 | int width;
108 | int height;
109 | ColFormat colourFormat;
110 | QGLShaderProgram *shader;
111 | VidShaderEffectType effect;
112 |
113 | QVector2D triStripVertices[NUM_VIDTEXTURE_VERTICES_X * NUM_VIDTEXTURE_VERTICES_Y];
114 | QVector2D triStripTexCoords[NUM_VIDTEXTURE_VERTICES_X * NUM_VIDTEXTURE_VERTICES_Y];
115 | QVector2D triStripAlphaTexCoords[NUM_VIDTEXTURE_VERTICES_X * NUM_VIDTEXTURE_VERTICES_Y];
116 |
117 | int frameCount;
118 | } VidTextureInfo;
119 |
120 | typedef struct _GLShaderModule
121 | {
122 | const char *sourceFileName;
123 | QGLShader::ShaderType type;
124 | } GLShaderModule;
125 |
126 | class GLWidget : public QGLWidget
127 | {
128 | Q_OBJECT
129 | public:
130 | explicit GLWidget(int argc, char *argv[], QWidget *parent = 0);
131 | ~GLWidget();
132 |
133 | virtual void initVideo();
134 |
135 | QSize minimumSizeHint() const;
136 | QSize sizeHint() const;
137 |
138 | void setXRotation(int angle);
139 | void setYRotation(int angle);
140 | void setZRotation(int angle);
141 |
142 | Q_SIGNALS:
143 | void closeRequested();
144 | void stackVidsStateChanged(bool newState);
145 | void rotateStateChanged(bool newState);
146 | void xRotationChanged(int angle);
147 | void yRotationChanged(int angle);
148 | void zRotationChanged(int angle);
149 |
150 | public Q_SLOTS:
151 | /* Video related */
152 | void newFrame(int vidIx);
153 | void pipelineFinished(int vidIx);
154 | /* Input event handlers */
155 | void cycleVidShaderSlot();
156 | void cycleModelShaderSlot();
157 | void showYUVWindowSlot();
158 | void loadVideoSlot();
159 | void loadModelSlot();
160 | void loadAlphaSlot();
161 | void rotateToggleSlot(bool toggleState);
162 | void stackVidsToggleSlot(int toggleState);
163 | void cycleBackgroundSlot();
164 | void resetPosSlot();
165 | void exitSlot();
166 |
167 | void animate();
168 |
169 | protected:
170 | virtual void initializeGL();
171 | virtual Pipeline* createPipeline(int vidIx);
172 | void paintEvent(QPaintEvent *event);
173 | void resizeGL(int width, int height);
174 | void mousePressEvent(QMouseEvent *event);
175 | void mouseReleaseEvent(QMouseEvent *event);
176 | void mouseMoveEvent(QMouseEvent *event);
177 | void keyPressEvent(QKeyEvent *e);
178 | void closeEvent(QCloseEvent* event);
179 | virtual bool loadNewTexture(int vidIx);
180 | int printOpenGLError(const char *file, int line);
181 |
182 | QVector m_videoLoc;
183 | QVector m_vidPipelines;
184 | QVector m_vidTextures;
185 |
186 | private:
187 | void setAppropriateVidShader(int vidIx);
188 | void setVidShaderVars(int vidIx, bool printErrors);
189 | int loadShaderFile(QString fileName, QString &shaderSource);
190 | int setupShader(QGLShaderProgram *prog, QString baseFileName, bool vertNeeded, bool fragNeeded);
191 | int setupShader(QGLShaderProgram *prog, GLShaderModule shaderList[], int listLen);
192 | int getCallingGstVecIx(int vidIx);
193 |
194 | bool m_closing;
195 | QString m_dataFilesDir;
196 |
197 | // Camera:
198 | // Implement position later if a sky box is desired, and perhaps FPS mode
199 | // bool cameraCirclingMode;
200 | // float xPos;
201 | // float yPos;
202 | // float zPos;
203 | float m_xRot;
204 | float m_yRot;
205 | float m_zRot;
206 | QPoint m_lastPos;
207 | int m_rotateOn;
208 | int m_xLastIncr;
209 | int m_yLastIncr;
210 | float m_xInertia;
211 | float m_yInertia;
212 | float m_xInertiaOld;
213 | float m_yInertiaOld;
214 | // Deprecate:
215 | GLfloat m_scaleValue; // replace with changing zPos
216 |
217 | QMatrix4x4 m_modelViewMatrix;
218 | QMatrix4x4 m_projectionMatrix;
219 |
220 | int m_clearColorIndex;
221 | bool m_stackVidQuads;
222 | ModelEffectType m_currentModelEffectIndex;
223 |
224 | QGLShaderProgram m_brickProg;
225 | #ifdef VIDI420_SHADERS_NEEDED
226 | QGLShaderProgram m_I420NoEffectNormalised;
227 | QGLShaderProgram m_I420LitNormalised;
228 | QGLShaderProgram m_I420NoEffect;
229 | QGLShaderProgram m_I420Lit;
230 | QGLShaderProgram m_I420ColourHilight;
231 | QGLShaderProgram m_I420ColourHilightSwap;
232 | QGLShaderProgram m_I420AlphaMask;
233 | #endif
234 | #ifdef VIDUYVY_SHADERS_NEEDED
235 | QGLShaderProgram m_UYVYNoEffectNormalised;
236 | QGLShaderProgram m_UYVYLitNormalised;
237 | QGLShaderProgram m_UYVYNoEffect;
238 | QGLShaderProgram m_UYVYLit;
239 | QGLShaderProgram m_UYVYColourHilight;
240 | QGLShaderProgram m_UYVYColourHilightSwap;
241 | QGLShaderProgram m_UYVYAlphaMask;
242 | #endif
243 |
244 | // Video shader effects vars - for simplicitys sake make them general to all vids
245 | QVector4D m_colourHilightRangeMin;
246 | QVector4D m_colourHilightRangeMax;
247 | QVector4D m_colourComponentSwapR;
248 | QVector4D m_colourComponentSwapG;
249 | QVector4D m_colourComponentSwapB;
250 | bool m_colourSwapDirUpwards;
251 | GLuint m_alphaTextureId;
252 | bool m_alphaTextureLoaded;
253 | GLuint m_alphaTexWidth;
254 | GLuint m_alphaTexHeight;
255 |
256 | Model *m_model;
257 |
258 | // FPS counter
259 | int m_frames;
260 | QTime m_frameTime;
261 |
262 | #ifdef ENABLE_YUV_WINDOW
263 | YuvDebugWindow *m_yuvWindow;
264 | QVector m_colourMap;
265 | #endif
266 |
267 | };
268 |
269 | #endif // GLWIDGET_H
270 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/gstpipeline.cpp:
--------------------------------------------------------------------------------
1 |
2 | #include "gstpipeline.h"
3 | #include "applogger.h"
4 |
5 |
6 | GStreamerPipeline::GStreamerPipeline(int vidIx,
7 | const QString &videoLocation,
8 | const char *renderer_slot,
9 | QObject *parent)
10 | : Pipeline(vidIx, videoLocation, renderer_slot, parent),
11 | m_source(NULL),
12 | m_decodebin(NULL),
13 | m_videosink(NULL),
14 | m_audiosink(NULL),
15 | m_audioconvert(NULL),
16 | m_audioqueue(NULL),
17 | m_loop(NULL),
18 | m_bus(NULL),
19 | m_pipeline(NULL)
20 | {
21 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "constructor entered");
22 |
23 | m_incomingBufThread = new GstIncomingBufThread(this, this);
24 | m_outgoingBufThread = new GstOutgoingBufThread(this, this);
25 |
26 | QObject::connect(m_incomingBufThread, SIGNAL(finished()), this, SLOT(cleanUp()));
27 |
28 | }
29 |
30 | GStreamerPipeline::~GStreamerPipeline()
31 | {
32 | }
33 |
34 | void GStreamerPipeline::Configure()
35 | {
36 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "Configure entered");
37 |
38 | gst_init (NULL, NULL);
39 |
40 | #ifdef Q_WS_WIN
41 | m_loop = g_main_loop_new (NULL, FALSE);
42 | #endif
43 |
44 | /* Create the elements */
45 | this->m_pipeline = gst_pipeline_new (NULL);
46 | if(this->m_videoLocation.isEmpty())
47 | {
48 | LOG(LOG_VIDPIPELINE, Logger::Info, "No video file specified. Using video test source.");
49 | this->m_source = gst_element_factory_make ("videotestsrc", "testsrc");
50 | }
51 | else
52 | {
53 | this->m_source = gst_element_factory_make ("filesrc", "filesrc");
54 | g_object_set (G_OBJECT (this->m_source), "location", /*"video.avi"*/ m_videoLocation.toUtf8().constData(), NULL);
55 | }
56 | this->m_decodebin = gst_element_factory_make ("decodebin2", "decodebin");
57 | this->m_videosink = gst_element_factory_make ("fakesink", "videosink");
58 | this->m_audiosink = gst_element_factory_make ("alsasink", "audiosink");
59 | this->m_audioconvert = gst_element_factory_make ("audioconvert", "audioconvert");
60 | this->m_audioqueue = gst_element_factory_make ("queue", "audioqueue");
61 |
62 | if (this->m_pipeline == NULL || this->m_source == NULL || this->m_decodebin == NULL ||
63 | this->m_videosink == NULL || this->m_audiosink == NULL || this->m_audioconvert == NULL || this->m_audioqueue == NULL)
64 | g_critical ("One of the GStreamer decoding elements is missing");
65 |
66 | /* Setup the pipeline */
67 | gst_bin_add_many (GST_BIN (this->m_pipeline), this->m_source, this->m_decodebin, this->m_videosink,
68 | this->m_audiosink, this->m_audioconvert, this->m_audioqueue, /*videoqueue,*/ NULL);
69 | g_signal_connect (this->m_decodebin, "pad-added", G_CALLBACK (on_new_pad), this);
70 |
71 | /* Link the elements */
72 | gst_element_link (this->m_source, this->m_decodebin);
73 | gst_element_link (this->m_audioqueue, this->m_audioconvert);
74 | gst_element_link (this->m_audioconvert, this->m_audiosink);
75 |
76 | m_bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
77 | gst_bus_add_watch(m_bus, (GstBusFunc) bus_call, this);
78 | gst_object_unref(m_bus);
79 |
80 | gst_element_set_state (this->m_pipeline, GST_STATE_PAUSED);
81 |
82 | }
83 |
84 | void GStreamerPipeline::Start()
85 | {
86 | GstStateChangeReturn ret = gst_element_set_state(GST_ELEMENT(this->m_pipeline), GST_STATE_PLAYING);
87 | if (ret == GST_STATE_CHANGE_FAILURE)
88 | {
89 | LOG(LOG_VIDPIPELINE, Logger::Error, "Failed to start up pipeline!");
90 |
91 | /* check if there is an error message with details on the bus */
92 | GstMessage* msg = gst_bus_poll(this->m_bus, GST_MESSAGE_ERROR, 0);
93 | if (msg)
94 | {
95 | GError *err = NULL;
96 | gst_message_parse_error (msg, &err, NULL);
97 | LOG(LOG_VIDPIPELINE, Logger::Error, "ERROR: %s", err->message);
98 | g_error_free (err);
99 | gst_message_unref (msg);
100 | }
101 | return;
102 | }
103 |
104 | // Start the threads:
105 | m_incomingBufThread->start();
106 | m_outgoingBufThread->start();
107 | }
108 |
109 | void GStreamerPipeline::Stop()
110 | {
111 | #ifdef Q_WS_WIN
112 | g_main_loop_quit(m_loop);
113 | #else
114 | emit stopRequested();
115 | #endif
116 | }
117 |
118 | void GStreamerPipeline::cleanUp()
119 | {
120 | gst_element_set_state(GST_ELEMENT(this->m_pipeline), GST_STATE_NULL);
121 |
122 | // Wait for both threads to finish up
123 | m_incomingBufThread->wait(QUEUE_CLEANUP_WAITTIME_MS);
124 | m_outgoingBufThread->wait(QUEUE_CLEANUP_WAITTIME_MS);
125 |
126 | GstBuffer *buf;
127 | while(this->m_incomingBufQueue.size())
128 | {
129 | this->m_incomingBufQueue.get((void**)(&buf));
130 | gst_buffer_unref(buf);
131 | }
132 | while(this->m_outgoingBufQueue.size())
133 | {
134 | this->m_outgoingBufQueue.get((void**)(&buf));
135 | gst_buffer_unref(buf);
136 | }
137 |
138 | gst_object_unref(m_pipeline);
139 |
140 | // Done
141 | m_finished = true;
142 | emit finished(m_vidIx);
143 | }
144 |
145 | void GStreamerPipeline::on_new_pad(GstElement *element,
146 | GstPad *pad,
147 | GStreamerPipeline* p)
148 | {
149 | GstPad *sinkpad;
150 | GstCaps *caps;
151 | GstStructure *str;
152 |
153 | Q_UNUSED(element);
154 |
155 | caps = gst_pad_get_caps (pad);
156 | str = gst_caps_get_structure (caps, 0);
157 |
158 | if (g_strrstr (gst_structure_get_name (str), "video"))
159 | {
160 | sinkpad = gst_element_get_pad (p->m_videosink, "sink");
161 |
162 | g_object_set (G_OBJECT (p->m_videosink),
163 | "sync", TRUE,
164 | "signal-handoffs", TRUE,
165 | NULL);
166 | g_signal_connect (p->m_videosink,
167 | "preroll-handoff",
168 | G_CALLBACK(on_gst_buffer),
169 | p);
170 | g_signal_connect (p->m_videosink,
171 | "handoff",
172 | G_CALLBACK(on_gst_buffer),
173 | p);
174 | }
175 | else
176 | sinkpad = gst_element_get_pad (p->m_audioqueue, "sink");
177 |
178 | gst_caps_unref (caps);
179 |
180 | gst_pad_link (pad, sinkpad);
181 | gst_object_unref (sinkpad);
182 | }
183 |
184 | /* fakesink handoff callback */
185 | void GStreamerPipeline::on_gst_buffer(GstElement * element,
186 | GstBuffer * buf,
187 | GstPad * pad,
188 | GStreamerPipeline* p)
189 | {
190 | LOG(LOG_VIDPIPELINE, Logger::Debug2, "vid %d, element=%p, buf=%p, pad=%p, p=%p, bufdata=%p\n",
191 | p->getVidIx(), element, buf, pad, p, GST_BUFFER_DATA(buf));
192 |
193 | Q_UNUSED(pad)
194 | Q_UNUSED(element)
195 |
196 | if(p->m_vidInfoValid == false)
197 | {
198 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "Received first frame of vid %d", p->getVidIx());
199 |
200 | GstCaps *caps = gst_pad_get_negotiated_caps (pad);
201 | if (caps)
202 | {
203 | GstStructure *structure = gst_caps_get_structure (caps, 0);
204 | gst_structure_get_int (structure, "width", &(p->m_width));
205 | gst_structure_get_int (structure, "height", &(p->m_height));
206 | }
207 | else
208 | {
209 | LOG(LOG_VIDPIPELINE, Logger::Error, "on_gst_buffer() - Could not get caps for pad!");
210 | }
211 |
212 | p->m_colFormat = discoverColFormat(buf);
213 | p->m_vidInfoValid = true;
214 | }
215 |
216 | /* ref then push buffer to use it in qt */
217 | gst_buffer_ref(buf);
218 | p->m_incomingBufQueue.put(buf);
219 | LOG(LOG_VIDPIPELINE, Logger::Debug2, "vid %d pushed buffer %p to incoming queue", p->getVidIx(), buf);
220 |
221 | p->NotifyNewFrame();
222 | }
223 |
224 | gboolean GStreamerPipeline::bus_call(GstBus *bus, GstMessage *msg, GStreamerPipeline* p)
225 | {
226 | Q_UNUSED(bus)
227 |
228 | switch(GST_MESSAGE_TYPE(msg))
229 | {
230 | case GST_MESSAGE_EOS:
231 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "End-of-stream received. Stopping.");
232 | p->Stop();
233 | break;
234 |
235 | case GST_MESSAGE_ERROR:
236 | {
237 | gchar *debug = NULL;
238 | GError *err = NULL;
239 | gst_message_parse_error(msg, &err, &debug);
240 | LOG(LOG_VIDPIPELINE, Logger::Error, "Error: %s", err->message);
241 | g_error_free (err);
242 | if(debug)
243 | {
244 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "Debug details: %s", debug);
245 | g_free(debug);
246 | }
247 | p->Stop();
248 | break;
249 | }
250 |
251 | default:
252 | break;
253 | }
254 |
255 | return TRUE;
256 | }
257 |
258 | ColFormat GStreamerPipeline::discoverColFormat(GstBuffer * buf)
259 | {
260 | // Edit for consistent style later
261 | gchar *pTmp = NULL;
262 | GstCaps *pCaps = NULL;
263 | GstStructure *pStructure = NULL;
264 | gint iDepth;
265 | gint iBitsPerPixel;
266 | gint iRedMask;
267 | gint iGreenMask;
268 | gint iBlueMask;
269 | gint iAlphaMask;
270 | ColFormat ret = ColFmt_Unknown;
271 |
272 | pTmp = gst_caps_to_string (GST_BUFFER_CAPS(buf));
273 | LOG(LOG_VIDPIPELINE, Logger::Info, "%s", pTmp);
274 | g_free (pTmp);
275 |
276 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "buffer-size in bytes: %d", GST_BUFFER_SIZE (buf));
277 |
278 | pCaps = gst_buffer_get_caps (buf);
279 | pStructure = gst_caps_get_structure (pCaps, 0);
280 |
281 | if (gst_structure_has_name (pStructure, "video/x-raw-rgb"))
282 | {
283 | gst_structure_get_int (pStructure, "bpp", &iBitsPerPixel);
284 | gst_structure_get_int (pStructure, "depth", &iDepth);
285 | gst_structure_get_int (pStructure, "red_mask", &iRedMask);
286 | gst_structure_get_int (pStructure, "green_mask", &iGreenMask);
287 | gst_structure_get_int (pStructure, "blue_mask", &iBlueMask);
288 |
289 | switch (iDepth)
290 | {
291 | case 24:
292 | if (iRedMask == 0x00ff0000 &&
293 | iGreenMask == 0x0000ff00 &&
294 | iBlueMask == 0x000000ff)
295 | {
296 | LOG(LOG_VIDPIPELINE, Logger::Info, "format is RGB");
297 | ret = ColFmt_RGB888;
298 | }
299 | else if (iRedMask == 0x000000ff &&
300 | iGreenMask == 0x0000ff00 &&
301 | iBlueMask == 0x00ff0000)
302 | {
303 | LOG(LOG_VIDPIPELINE, Logger::Info, "format is BGR");
304 | ret = ColFmt_BGR888;
305 | }
306 | else
307 | {
308 | LOG(LOG_VIDPIPELINE, Logger::Info, "Unhandled 24 bit RGB-format");
309 | }
310 | break;
311 |
312 | case 32:
313 | gst_structure_get_int (pStructure, "alpha_mask", &iAlphaMask);
314 | if (iRedMask == 0xff000000 &&
315 | iGreenMask == 0x00ff0000 &&
316 | iBlueMask == 0x0000ff00)
317 | {
318 | LOG(LOG_VIDPIPELINE, Logger::Info, "format is RGBA");
319 | ret = ColFmt_ARGB8888;
320 | }
321 | else if (iRedMask == 0x00ff0000 &&
322 | iGreenMask == 0x0000ff00 &&
323 | iBlueMask == 0x000000ff)
324 | {
325 | LOG(LOG_VIDPIPELINE, Logger::Info, "format is BGRA");
326 | ret = ColFmt_BGRA8888;
327 | }
328 | else
329 | {
330 | LOG(LOG_VIDPIPELINE, Logger::Info, "Unhandled 32 bit RGB-format");
331 | }
332 | break;
333 |
334 | default :
335 | LOG(LOG_VIDPIPELINE, Logger::Warning, "Unhandled RGB-format of depth %d", iDepth);
336 | break;
337 | }
338 | }
339 | else if (gst_structure_has_name (pStructure, "video/x-raw-yuv"))
340 | {
341 | guint32 uiFourCC;
342 |
343 | gst_structure_get_fourcc (pStructure, "format", &uiFourCC);
344 |
345 | switch (uiFourCC)
346 | {
347 | case GST_MAKE_FOURCC ('I', '4', '2', '0'):
348 | LOG(LOG_VIDPIPELINE, Logger::Info, "I420 (0x%X)", uiFourCC);
349 | ret = ColFmt_I420;
350 | break;
351 |
352 | case GST_MAKE_FOURCC ('I', 'Y', 'U', 'V'):
353 | LOG(LOG_VIDPIPELINE, Logger::Info, "IYUV (0x%X)", uiFourCC);
354 | ret = ColFmt_IYUV;
355 | break;
356 |
357 | case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
358 | LOG(LOG_VIDPIPELINE, Logger::Info, "YV12 (0x%X)", uiFourCC);
359 | ret = ColFmt_YV12;
360 | break;
361 |
362 | case GST_MAKE_FOURCC ('Y', 'U', 'Y', 'V'):
363 | LOG(LOG_VIDPIPELINE, Logger::Info, "YUYV (0x%X)", uiFourCC);
364 | ret = ColFmt_YUYV;
365 | break;
366 |
367 | case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
368 | LOG(LOG_VIDPIPELINE, Logger::Info, "YUY2 (0x%X)", uiFourCC);
369 | ret = ColFmt_YUY2;
370 | break;
371 |
372 | case GST_MAKE_FOURCC ('V', '4', '2', '2'):
373 | LOG(LOG_VIDPIPELINE, Logger::Info, "V422 (0x%X)", uiFourCC);
374 | ret = ColFmt_V422;
375 | break;
376 |
377 | case GST_MAKE_FOURCC ('Y', 'U', 'N', 'V'):
378 | LOG(LOG_VIDPIPELINE, Logger::Info, "YUNV (0x%X)", uiFourCC);
379 | ret = ColFmt_YUNV;
380 | break;
381 |
382 | case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
383 | LOG(LOG_VIDPIPELINE, Logger::Info, "UYVY (0x%X)", uiFourCC);
384 | ret = ColFmt_UYVY;
385 | break;
386 |
387 | case GST_MAKE_FOURCC ('Y', '4', '2', '2'):
388 | LOG(LOG_VIDPIPELINE, Logger::Info, "Y422 (0x%X)", uiFourCC);
389 | ret = ColFmt_Y422;
390 | break;
391 |
392 | case GST_MAKE_FOURCC ('U', 'Y', 'N', 'V'):
393 | LOG(LOG_VIDPIPELINE, Logger::Info, "UYNV (0x%X)", uiFourCC);
394 | ret = ColFmt_YUNV;
395 | break;
396 |
397 | default :
398 | LOG(LOG_VIDPIPELINE, Logger::Warning, "Unhandled YUV-format");
399 | break;
400 | }
401 | }
402 | else
403 | {
404 | LOG(LOG_VIDPIPELINE, Logger::Warning, "Unsupported caps name %s", gst_structure_get_name (pStructure));
405 | }
406 |
407 | gst_caps_unref (pCaps);
408 | pCaps = NULL;
409 |
410 | return ret;
411 | }
412 |
413 | quint32 GStreamerPipeline::discoverFourCC(GstBuffer * buf)
414 | {
415 | guint32 uiFourCC = 0;
416 | GstCaps* pCaps = NULL;
417 | GstStructure* pStructure = NULL;
418 |
419 | pCaps = gst_buffer_get_caps (buf);
420 | pStructure = gst_caps_get_structure (pCaps, 0);
421 |
422 | if (gst_structure_has_name (pStructure, "video/x-raw-yuv"))
423 | {
424 | gst_structure_get_fourcc (pStructure, "format", &uiFourCC);
425 | }
426 |
427 | return (quint32)uiFourCC;
428 | }
429 |
430 | void GstIncomingBufThread::run()
431 | {
432 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "GStreamerPipeline: vid %d incoming buf thread started",
433 | m_pipelinePtr->getVidIx());
434 |
435 | #ifndef Q_WS_WIN
436 | //works like the gmainloop on linux (GstEvent are handled)
437 | QObject::connect(m_pipelinePtr, SIGNAL(stopRequested()), this, SLOT(quit()));
438 | exec();
439 | #else
440 | g_main_loop_run(m_loop);
441 | #endif
442 |
443 | // Incoming handling is all done in the static on_gst_buffer callback
444 |
445 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "GStreamerPipeline: vid %d incoming buf thread finished",
446 | m_pipelinePtr->getVidIx());
447 | }
448 |
449 |
450 | void GstOutgoingBufThread::run()
451 | {
452 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "GStreamerPipeline: vid %d outgoing buf thread started",
453 | m_pipelinePtr->getVidIx());
454 |
455 | QObject::connect(m_pipelinePtr, SIGNAL(stopRequested()), this, SLOT(quit()));
456 |
457 | while(m_keepRunningOutgoingThread)
458 | {
459 | /* Pop then unref buffer we have finished using in qt,
460 | block here if queue is empty */
461 | GstBuffer *buf_old = NULL;
462 | if(m_pipelinePtr->m_outgoingBufQueue.get((void**)(&buf_old), QUEUE_THREADBLOCK_WAITTIME_MS))
463 | {
464 | if (buf_old)
465 | {
466 | gst_buffer_unref(buf_old);
467 | LOG(LOG_VIDPIPELINE, Logger::Debug2, "GStreamerPipeline: vid %d popped buffer %p from outgoing queue",
468 | m_pipelinePtr->getVidIx(), buf_old);
469 | LOG(LOG_VIDPIPELINE, Logger::Debug2, "GStreamerPipeline: vid %d m_outgoingBufQueue size is = %d",
470 | m_pipelinePtr->getVidIx(), m_pipelinePtr->m_outgoingBufQueue.size());
471 | }
472 | }
473 | }
474 |
475 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "GStreamerPipeline: vid %d outgoing buf thread finished",
476 | m_pipelinePtr->getVidIx());
477 | }
478 |
479 |
480 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/gstpipeline.h:
--------------------------------------------------------------------------------
1 |
2 | #ifndef GSTPIPELINE_H
3 | #define GSTPIPELINE_H
4 |
5 | #include
6 | #include
7 |
8 | #include
9 |
10 | // Re-include base class header here to keep the MOC happy:
11 | #include "pipeline.h"
12 |
13 | //#define PIPELINE_BUFFER_VID_DATA_START GST_BUFFER_DATA
14 |
15 | #define QUEUE_CLEANUP_WAITTIME_MS 200
16 | #define QUEUE_THREADBLOCK_WAITTIME_MS 50
17 |
18 | class GStreamerPipeline;
19 |
20 | // The incoming buffer thread is really only needed in Windows
21 | // to run g_main_loop_run()
22 | class GstIncomingBufThread : public QThread
23 | {
24 | Q_OBJECT
25 |
26 | public:
27 | GstIncomingBufThread(GStreamerPipeline *pipelinePtr, QObject *parent = 0)
28 | : QThread(parent),
29 | m_pipelinePtr(pipelinePtr) { }
30 | void run();
31 |
32 | private:
33 | GStreamerPipeline *m_pipelinePtr;
34 | };
35 |
36 |
37 | class GstOutgoingBufThread : public QThread
38 | {
39 | Q_OBJECT
40 |
41 | public:
42 | GstOutgoingBufThread(GStreamerPipeline *pipelinePtr, QObject *parent = 0)
43 | : QThread(parent),
44 | m_pipelinePtr(pipelinePtr),
45 | m_keepRunningOutgoingThread(true) { }
46 | void run();
47 |
48 | public slots:
49 | void quit() { m_keepRunningOutgoingThread = false; } // Not using an event loop for this thread
50 |
51 | private:
52 | GStreamerPipeline *m_pipelinePtr;
53 | bool m_keepRunningOutgoingThread;
54 | };
55 |
56 |
57 | class GStreamerPipeline : public Pipeline
58 | {
59 | Q_OBJECT
60 |
61 | public:
62 | GStreamerPipeline(int vidIx,
63 | const QString &videoLocation,
64 | const char *renderer_slot,
65 | QObject *parent);
66 | ~GStreamerPipeline();
67 |
68 | void Configure();
69 | void Start();
70 | unsigned char *bufToVidDataStart(void *buf) { return GST_BUFFER_DATA(buf); }
71 |
72 | // bit lazy just making these public for gst callbacks, but it'll do for now
73 | GstElement *m_source;
74 | GstElement *m_decodebin;
75 | GstElement *m_videosink;
76 | GstElement *m_audiosink;
77 | GstElement *m_audioconvert;
78 | GstElement *m_audioqueue;
79 |
80 | signals:
81 | void stopRequested();
82 |
83 | public Q_SLOTS:
84 | void Stop();
85 |
86 | private slots:
87 | void cleanUp();
88 |
89 | protected:
90 | GMainLoop* m_loop;
91 | GstBus* m_bus;
92 | GstElement* m_pipeline;
93 |
94 | GstIncomingBufThread *m_incomingBufThread;
95 | GstOutgoingBufThread *m_outgoingBufThread;
96 | friend class GstIncomingBufThread;
97 | friend class GstOutgoingBufThread;
98 |
99 | static void on_gst_buffer(GstElement * element, GstBuffer * buf, GstPad * pad, GStreamerPipeline* p);
100 | static void on_new_pad(GstElement *element, GstPad *pad, GStreamerPipeline* p);
101 | static gboolean bus_call (GstBus *bus, GstMessage *msg, GStreamerPipeline* p);
102 | static ColFormat discoverColFormat(GstBuffer * buf);
103 | static quint32 discoverFourCC(GstBuffer * buf);
104 | };
105 |
106 | #endif
107 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/main.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include "mainwindow.h"
3 |
4 | int main(int argc, char *argv[])
5 | {
6 | QApplication a(argc, argv);
7 |
8 | MainWindow mainWindow(argc, argv);
9 | //mainWindow.show();
10 | mainWindow.showFullScreen();
11 |
12 | return a.exec();
13 | }
14 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/mainwindow.cpp:
--------------------------------------------------------------------------------
1 | #include "mainwindow.h"
2 | #include "applogger.h"
3 |
4 | #ifdef OMAP3530
5 | #include "glpowervrwidget.h"
6 | #else
7 | #include "glwidget.h"
8 | #endif
9 | #include "controlsform.h"
10 |
11 | MainWindow::MainWindow(int argc, char *argv[], QWidget *parent) :
12 | QMainWindow(parent)
13 | {
14 | GlobalLog.SetModuleLogLevel(LOG_GL, Logger::Info);
15 | GlobalLog.SetModuleLogLevel(LOG_GLSHADERS, Logger::Info);
16 | GlobalLog.SetModuleLogLevel(LOG_OBJLOADER, Logger::Info);
17 | GlobalLog.SetModuleLogLevel(LOG_VIDPIPELINE, Logger::Info);
18 |
19 | #ifdef OMAP3530
20 | GLWidget *glWidget = new GLPowerVRWidget(argc, argv, this);
21 | #else
22 | GLWidget *glWidget = new GLWidget(argc, argv, this);
23 | #endif
24 | glWidget->initVideo();
25 |
26 | ControlsForm *controlsForm = new ControlsForm(glWidget, this);
27 | setCentralWidget(controlsForm);
28 | }
29 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/mainwindow.h:
--------------------------------------------------------------------------------
1 | #ifndef MAINWINDOW_H
2 | #define MAINWINDOW_H
3 |
4 | #include
5 |
6 |
7 | class MainWindow : public QMainWindow
8 | {
9 | Q_OBJECT
10 | public:
11 | explicit MainWindow(int argc, char *argv[], QWidget *parent = 0);
12 |
13 | signals:
14 |
15 | public slots:
16 |
17 | };
18 |
19 | #endif // MAINWINDOW_H
20 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/model.cpp:
--------------------------------------------------------------------------------
1 |
2 |
3 | #include "model.h"
4 | #include "applogger.h"
5 |
6 | class myStream :
7 | public Assimp::LogStream
8 | {
9 | public:
10 | // Constructor
11 | myStream()
12 | {
13 | // empty
14 | }
15 | // Destructor
16 | ~myStream()
17 | {
18 | // empty
19 | }
20 | // Write womethink using your own functionality
21 | void write(const char* message)
22 | {
23 | QString alteredMessage(message);
24 | alteredMessage.remove('\n');
25 | Logger::LogLevel currentLogLevel = GlobalLog.GetModuleLogLevel(LOG_OBJLOADER);
26 | GlobalLog.LogMessage(LOG_OBJLOADER, currentLogLevel, alteredMessage.toUtf8().constData());
27 | }
28 | };
29 |
30 | Model::Model()
31 | {
32 | m_scene = NULL;
33 |
34 | Logger::LogLevel currentLogLevel = GlobalLog.GetModuleLogLevel(LOG_OBJLOADER);
35 | int assimpLogSeverity = 0;
36 | switch(currentLogLevel)
37 | {
38 | // deliberate fall through:
39 | case Logger::Debug2:
40 | assimpLogSeverity |= Assimp::Logger::DEBUGGING;
41 | case Logger::Debug1:
42 | case Logger::Info:
43 | assimpLogSeverity |= Assimp::Logger::INFO;
44 | case Logger::Warning:
45 | assimpLogSeverity |= Assimp::Logger::WARN;
46 | case Logger::Error:
47 | assimpLogSeverity |= Assimp::Logger::ERR;
48 | break;
49 | default:
50 | break;
51 | }
52 |
53 | // Create a logger instance
54 | Assimp::DefaultLogger::create("", Assimp::Logger::VERBOSE);
55 |
56 | // Attach our custom stream to the default logger
57 | Assimp::DefaultLogger::get()->attachStream( new myStream(), assimpLogSeverity );
58 |
59 | Assimp::DefaultLogger::get()->info("this is my info-call");
60 | }
61 |
62 | Model::~Model()
63 | {
64 | m_nodes.resize(0);
65 |
66 | if(m_scene)
67 | {
68 | aiReleaseImport(m_scene);
69 | m_scene = NULL;
70 | }
71 |
72 | aiDetachAllLogStreams();
73 | }
74 |
75 | void Model::aiNodesToVertexArrays()
76 | {
77 | /* Depth first traverse node tree and place m_nodes in flat QList,
78 | then work on each node in QList to create usable arrays.
79 |
80 | Each node in tree has m_meshes, each mesh has faces, each face has indices
81 | one indice is set of co-ordinates/tex co-ords/colour/normal for one point (in a polygon say)
82 |
83 | Transformation is per node,
84 | Texture/material is per mesh, so we want 1 set of arrays for each mesh
85 |
86 | Get all the points out and put them in:
87 | QVector of m_nodes
88 | Transformation matrix ptr
89 | QVector of m_meshes
90 | texid
91 | vertices: QVector of QVector3D
92 | tex co-ords: QVector of QVector3D
93 | normals: QVector of QVector3D
94 |
95 | Only bother with triangles and see how that turns out.
96 |
97 | */
98 |
99 | QList flatNodePtrList;
100 | struct aiNode* currentNode = m_scene->mRootNode;
101 | flatNodePtrList.prepend(currentNode);
102 |
103 | while(flatNodePtrList.size())
104 | {
105 | // Store children m_nodes to process next, removing the
106 | // current (parent) node from the front of the list:
107 | currentNode = flatNodePtrList.takeFirst();
108 | for(int childNodeIx = currentNode->mNumChildren-1; childNodeIx >= 0; --childNodeIx)
109 | {
110 | flatNodePtrList.prepend(currentNode->mChildren[childNodeIx]);
111 | }
112 |
113 | // Process the current node:
114 | ModelNode newModelNode;
115 |
116 | newModelNode.m_transformMatrix = QMatrix4x4((qreal)currentNode->mTransformation.a1,
117 | (qreal)currentNode->mTransformation.a2,
118 | (qreal)currentNode->mTransformation.a3,
119 | (qreal)currentNode->mTransformation.a4,
120 | (qreal)currentNode->mTransformation.b1,
121 | (qreal)currentNode->mTransformation.b2,
122 | (qreal)currentNode->mTransformation.b3,
123 | (qreal)currentNode->mTransformation.b4,
124 | (qreal)currentNode->mTransformation.c1,
125 | (qreal)currentNode->mTransformation.c2,
126 | (qreal)currentNode->mTransformation.c3,
127 | (qreal)currentNode->mTransformation.c4,
128 | (qreal)currentNode->mTransformation.d1,
129 | (qreal)currentNode->mTransformation.d2,
130 | (qreal)currentNode->mTransformation.d3,
131 | (qreal)currentNode->mTransformation.d4);
132 |
133 |
134 | for(unsigned int meshIx = 0; meshIx < currentNode->mNumMeshes; ++meshIx)
135 | {
136 | const struct aiMesh* currentMesh = m_scene->mMeshes[currentNode->mMeshes[meshIx]];
137 |
138 | ModelMesh newModelMesh;
139 |
140 | // TODO: Grab texture info/load image file here....
141 |
142 | newModelMesh.m_hasNormals = currentMesh->HasNormals();
143 | newModelMesh.m_hasTexcoords = currentMesh->HasTextureCoords(0);
144 |
145 | for(unsigned int faceIx = 0; faceIx < currentMesh->mNumFaces; ++faceIx)
146 | {
147 | const struct aiFace* currentFace = ¤tMesh->mFaces[faceIx];
148 |
149 | if(currentFace->mNumIndices != 3)
150 | {
151 | LOG(LOG_OBJLOADER, Logger::Info, "Ignoring non-triangle mesh %d face %d\n", meshIx, faceIx);
152 | }
153 |
154 |
155 | for(unsigned int i = 0; i < currentFace->mNumIndices; i++)
156 | {
157 | int vertexIndex = currentFace->mIndices[i];
158 |
159 | QVector3D vert(currentMesh->mVertices[vertexIndex].x, currentMesh->mVertices[vertexIndex].y, currentMesh->mVertices[vertexIndex].z);
160 | newModelMesh.m_triangleVertices.append(vert);
161 |
162 | if(newModelMesh.m_hasNormals)
163 | {
164 | QVector3D norm(currentMesh->mNormals[vertexIndex].x, currentMesh->mNormals[vertexIndex].y, currentMesh->mNormals[vertexIndex].z);
165 | newModelMesh.m_triangleNormals.append(norm);
166 | }
167 |
168 | if(newModelMesh.m_hasTexcoords)
169 | {
170 | QVector2D tex(currentMesh->mTextureCoords[0][vertexIndex].x, 1 - currentMesh->mTextureCoords[0][vertexIndex].y);
171 | newModelMesh.m_triangleTexcoords.append(tex);
172 | }
173 |
174 | }
175 | }
176 |
177 | newModelNode.m_meshes.append(newModelMesh);
178 | }
179 |
180 | m_nodes.append(newModelNode);
181 | }
182 | }
183 |
184 |
185 | int Model::Load(QString fileName)
186 | {
187 | if(m_scene)
188 | {
189 | // Clear extracted node data
190 | m_nodes.resize(0);
191 |
192 | aiReleaseImport(m_scene);
193 | m_scene = NULL;
194 | }
195 |
196 | // Load model
197 | m_scene = aiImportFile(fileName.toUtf8().constData(), aiProcessPreset_TargetRealtime_Quality);
198 |
199 | if (!m_scene)
200 | {
201 | LOG(LOG_OBJLOADER, Logger::Error, "Couldn't load obj model file %s", fileName.toUtf8().constData());
202 | return -1;
203 | }
204 |
205 | // Extract from ai mesh/faces into arrays
206 | aiNodesToVertexArrays();
207 |
208 | // Get the offset to center the model about the origin when drawing later
209 | get_bounding_box(&m_sceneMin,&m_sceneMax);
210 | m_sceneCenter.x = (m_sceneMin.x + m_sceneMax.x) / 2.0f;
211 | m_sceneCenter.y = (m_sceneMin.y + m_sceneMax.y) / 2.0f;
212 | m_sceneCenter.z = (m_sceneMin.z + m_sceneMax.z) / 2.0f;
213 |
214 | // Sensible default
215 | m_scaleFactor = 1.0;
216 |
217 | return 0;
218 | }
219 |
220 | void Model::SetScale(qreal boundarySize)
221 | {
222 | if (!m_scene)
223 | {
224 | LOG(LOG_OBJLOADER, Logger::Warning, "Model file not loaded yet");
225 | return;
226 | }
227 |
228 | float longestSide = m_sceneMax.x-m_sceneMin.x;
229 | longestSide = qMax(m_sceneMax.y - m_sceneMin.y, longestSide);
230 | longestSide = qMax(m_sceneMax.z - m_sceneMin.z, longestSide);
231 |
232 | m_scaleFactor = boundarySize / (qreal)longestSide;
233 | }
234 |
235 | void Model::Draw(QMatrix4x4 modelViewMatrix, QMatrix4x4 projectionMatrix, QGLShaderProgram *shaderProg, bool useModelTextures)
236 | {
237 | if (!m_scene)
238 | {
239 | LOG(LOG_OBJLOADER, Logger::Warning, "Model file not loaded yet");
240 | return;
241 | }
242 |
243 | // Center and scale the model
244 | modelViewMatrix.scale(m_scaleFactor);
245 | modelViewMatrix.translate(-m_sceneCenter.x, -m_sceneCenter.y, -m_sceneCenter.z);
246 |
247 | foreach(ModelNode node, m_nodes)
248 | {
249 | QMatrix4x4 nodeModelViewMatrix = modelViewMatrix * node.m_transformMatrix;
250 |
251 | // Load modelview projection matrix into shader. The projection matrix must
252 | // be multiplied by the modelview, not the other way round!
253 | shaderProg->setUniformValue("u_mvp_matrix", projectionMatrix * nodeModelViewMatrix);
254 | shaderProg->setUniformValue("u_mv_matrix", nodeModelViewMatrix);
255 |
256 | foreach(ModelMesh mesh, node.m_meshes)
257 | {
258 | if(useModelTextures)
259 | {
260 | // Set/enable texture id if desired ....
261 | }
262 |
263 | if(mesh.m_hasNormals)
264 | {
265 | shaderProg->enableAttributeArray("a_normal");
266 | shaderProg->setAttributeArray("a_normal", mesh.m_triangleNormals.constData());
267 | }
268 |
269 | if(mesh.m_hasTexcoords)
270 | {
271 | shaderProg->enableAttributeArray("a_texCoord");
272 | shaderProg->setAttributeArray("a_texCoord", mesh.m_triangleTexcoords.constData());
273 | }
274 |
275 | shaderProg->enableAttributeArray("a_vertex");
276 | shaderProg->setAttributeArray("a_vertex", mesh.m_triangleVertices.constData());
277 |
278 | glDrawArrays(GL_TRIANGLES, 0, mesh.m_triangleVertices.size());
279 | shaderProg->disableAttributeArray("a_vertex");
280 | shaderProg->disableAttributeArray("a_normal");
281 | shaderProg->disableAttributeArray("a_texCoord");
282 | }
283 | }
284 | }
285 |
286 |
287 | void Model::get_bounding_box_for_node (const struct aiNode* nd,
288 | struct aiVector3D* min,
289 | struct aiVector3D* max,
290 | struct aiMatrix4x4* trafo)
291 | {
292 | struct aiMatrix4x4 prev;
293 | unsigned int n = 0, t;
294 |
295 | prev = *trafo;
296 | aiMultiplyMatrix4(trafo,&nd->mTransformation);
297 |
298 | for (; n < nd->mNumMeshes; ++n) {
299 | const struct aiMesh* mesh = m_scene->mMeshes[nd->mMeshes[n]];
300 | for (t = 0; t < mesh->mNumVertices; ++t) {
301 |
302 | struct aiVector3D tmp = mesh->mVertices[t];
303 | aiTransformVecByMatrix4(&tmp,trafo);
304 |
305 | min->x = qMin(min->x,tmp.x);
306 | min->y = qMin(min->y,tmp.y);
307 | min->z = qMin(min->z,tmp.z);
308 |
309 | max->x = qMax(max->x,tmp.x);
310 | max->y = qMax(max->y,tmp.y);
311 | max->z = qMax(max->z,tmp.z);
312 | }
313 | }
314 |
315 | for (n = 0; n < nd->mNumChildren; ++n) {
316 | get_bounding_box_for_node(nd->mChildren[n],min,max,trafo);
317 | }
318 | *trafo = prev;
319 | }
320 |
321 | void Model::get_bounding_box (struct aiVector3D* min, struct aiVector3D* max)
322 | {
323 | struct aiMatrix4x4 trafo;
324 | aiIdentityMatrix4(&trafo);
325 |
326 | min->x = min->y = min->z = 1e10f;
327 | max->x = max->y = max->z = -1e10f;
328 | get_bounding_box_for_node(m_scene->mRootNode,min,max,&trafo);
329 | }
330 |
331 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/model.h:
--------------------------------------------------------------------------------
1 | #ifndef MODEL_H
2 | #define MODEL_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 |
9 | #include "assimp.h"
10 | #include "aiPostProcess.h"
11 | #include "aiScene.h"
12 | #include "DefaultLogger.h"
13 | #include "LogStream.h"
14 |
15 | class ModelMesh
16 | {
17 | public:
18 | QVector m_triangleVertices;
19 | bool m_hasNormals;
20 | QVector m_triangleNormals;
21 | bool m_hasTexcoords;
22 | QVector m_triangleTexcoords;
23 |
24 | // Could add more QVectors here for points, lines, polys.
25 |
26 | // texture related members here ....
27 |
28 | signals:
29 |
30 | public slots:
31 |
32 | };
33 |
34 | class ModelNode
35 | {
36 | public:
37 | QVector m_meshes;
38 | QMatrix4x4 m_transformMatrix;
39 | //struct aiMatrix4x4 aim_transformMatrix;
40 | signals:
41 |
42 | public slots:
43 |
44 | };
45 |
46 | class Model
47 | {
48 | public:
49 | Model();
50 | ~Model();
51 |
52 | int Load(QString fileName);
53 | void SetScale(qreal boundarySize);
54 | void Draw(QMatrix4x4 modelViewMatrix, QMatrix4x4 projectionMatrix, QGLShaderProgram *shaderProg, bool useModelTextures);
55 |
56 | private:
57 | void aiNodesToVertexArrays();
58 | void get_bounding_box_for_node (const struct aiNode* nd,
59 | struct aiVector3D* min,
60 | struct aiVector3D* max,
61 | struct aiMatrix4x4* trafo);
62 | void get_bounding_box (struct aiVector3D* min, struct aiVector3D* max);
63 |
64 | const struct aiScene* m_scene;
65 | QVector m_nodes;
66 | struct aiVector3D m_sceneCenter;
67 | struct aiVector3D m_sceneMin;
68 | struct aiVector3D m_sceneMax;
69 | qreal m_scaleFactor;
70 |
71 | };
72 |
73 | #endif // MODEL_H
74 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/pipeline.cpp:
--------------------------------------------------------------------------------
1 |
2 | #include "pipeline.h"
3 |
4 | Pipeline::Pipeline(int vidIx,
5 | const QString &videoLocation,
6 | const char *renderer_slot,
7 | QObject *parent)
8 | : QObject(parent),
9 | m_vidIx(vidIx),
10 | m_videoLocation(videoLocation),
11 | m_colFormat(ColFmt_Unknown),
12 | m_vidInfoValid(false),
13 | m_finished(false)
14 | {
15 | QObject::connect(this, SIGNAL(newFrameReady(int)), this->parent(), renderer_slot, Qt::QueuedConnection);
16 | }
17 |
18 | Pipeline::~Pipeline()
19 | {
20 | }
21 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/pipeline.h:
--------------------------------------------------------------------------------
1 | #ifndef PIPELINE_H
2 | #define PIPELINE_H
3 |
4 | #include
5 | #include "asyncwaitingqueue.h"
6 |
7 | #define COLFMT_FOUR_CC(a,b,c,d) \
8 | ((unsigned long) ((a) | (b)<<8 | (c)<<16 | (d)<<24))
9 |
10 | typedef enum _ColFormat
11 | {
12 | // these relate to fourCC codes, but abstract video framework system from outside:
13 | ColFmt_I420 = COLFMT_FOUR_CC('I', '4', '2', '0'),
14 | ColFmt_IYUV = COLFMT_FOUR_CC('I', 'Y', 'U', 'V'),
15 | ColFmt_YV12 = COLFMT_FOUR_CC('Y', 'V', '1', '2'),
16 | ColFmt_YUYV = COLFMT_FOUR_CC('Y', 'U', 'Y', 'V'),
17 | ColFmt_YUY2 = COLFMT_FOUR_CC('Y', 'U', 'Y', '2'),
18 | ColFmt_V422 = COLFMT_FOUR_CC('V', '4', '2', '2'),
19 | ColFmt_YUNV = COLFMT_FOUR_CC('Y', 'U', 'N', 'V'),
20 | ColFmt_UYVY = COLFMT_FOUR_CC('U', 'Y', 'V', 'Y'),
21 | ColFmt_Y422 = COLFMT_FOUR_CC('Y', '4', '2', '2'),
22 | ColFmt_UYNV = COLFMT_FOUR_CC('U', 'Y', 'N', 'V'),
23 |
24 | // Also capture RGBs in the same enum
25 | ColFmt_RGB888 = COLFMT_FOUR_CC('R', 'G', 'B', '8'),
26 | ColFmt_BGR888,
27 | ColFmt_ARGB8888,
28 | ColFmt_BGRA8888,
29 |
30 | ColFmt_Unknown
31 | } ColFormat;
32 |
33 | class Pipeline : public QObject
34 | {
35 | Q_OBJECT
36 |
37 | public:
38 | Pipeline(int vidIx,
39 | const QString &videoLocation,
40 | const char *renderer_slot,
41 | QObject *parent);
42 | ~Pipeline();
43 |
44 | virtual void Configure() = 0;
45 | virtual void Start() = 0;
46 | void NotifyNewFrame() { emit newFrameReady(m_vidIx); }
47 |
48 | int getVidIx() { return m_vidIx; }
49 | int getWidth() { return m_width; }
50 | int getHeight() { return m_height; }
51 | ColFormat getColourFormat() { return m_colFormat; }
52 | virtual unsigned char *bufToVidDataStart(void *buf) = 0;
53 |
54 | bool isFinished() { return this->m_finished; }
55 |
56 | AsyncQueue m_incomingBufQueue;
57 | AsyncQueue m_outgoingBufQueue;
58 |
59 | Q_SIGNALS:
60 | void newFrameReady(int vidIx);
61 | void finished(int vidIx);
62 |
63 | public slots:
64 | virtual void Stop() = 0;
65 |
66 | protected:
67 | int m_vidIx;
68 | const QString m_videoLocation;
69 | int m_width;
70 | int m_height;
71 | ColFormat m_colFormat;
72 | bool m_vidInfoValid;
73 | bool m_finished;
74 | };
75 |
76 | #if defined OMAP3530
77 |
78 | // Don't include derived classes if this include is from the base class
79 | // header file due to the MOC. In that case, base class is not defined
80 | // yet but inclusion guard is defined.
81 | #ifndef GSTPIPELINE_H
82 | #include "gstpipeline.h"
83 | #ifndef TIGSTPIPELINE_H
84 | #include "tigstpipeline.h"
85 | #endif
86 | #endif
87 |
88 | #elif defined UNIX
89 | #include "gstpipeline.h"
90 | #endif
91 |
92 | #endif // PIPELINE_H
93 |
94 |
95 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/qt_gl_gst.pro:
--------------------------------------------------------------------------------
1 | #-------------------------------------------------
2 | #
3 | # Project created by QtCreator 2012-03-03T06:40:34
4 | #
5 | #-------------------------------------------------
6 |
7 | QT += core gui opengl
8 | CONFIG += console
9 |
10 |
11 | TARGET = qt_gl_gst
12 | TEMPLATE = app
13 |
14 | DEFINES += UNIX VIDI420_SHADERS_NEEDED RECTTEX_EXT_NEEDED GLU_NEEDED
15 |
16 | SOURCES += \
17 | main.cpp \
18 | glwidget.cpp \
19 | model.cpp \
20 | gstpipeline.cpp \
21 | pipeline.cpp \
22 | shaderlists.cpp \
23 | mainwindow.cpp \
24 | yuvdebugwindow.cpp \
25 | controlsform.cpp \
26 | applogger.cpp
27 |
28 | HEADERS += \
29 | glwidget.h \
30 | asyncwaitingqueue.h \
31 | model.h \
32 | gstpipeline.h \
33 | pipeline.h \
34 | shaderlists.h \
35 | mainwindow.h \
36 | yuvdebugwindow.h \
37 | controlsform.h \
38 | applogger.h
39 |
40 | FORMS += \
41 | controlsform.ui
42 |
43 | # OpenGL support libraries:
44 | LIBS += -lGLU \
45 | -lGL \
46 | -lGLEW
47 |
48 | # Gstreamer:
49 | CONFIG += link_pkgconfig
50 | PKGCONFIG += gstreamer-0.10
51 |
52 | # Model loading using Assimp:
53 | PKGCONFIG += assimp
54 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/qt_gl_gst_omap3530.pro:
--------------------------------------------------------------------------------
1 | #-------------------------------------------------
2 | #
3 | # Project created by QtCreator 2012-03-03T06:40:34
4 | #
5 | #-------------------------------------------------
6 |
7 | QT += core gui opengl
8 | CONFIG += console
9 |
10 |
11 | TARGET = qt_gl_gst
12 | TEMPLATE = app
13 |
14 | DEFINES += UNIX OMAP3530 VIDUYVY_SHADERS_NEEDED IMGTEX_EXT_NEEDED \
15 | HIDE_GL_WHEN_MODAL_OPEN TEXCOORDS_ALREADY_NORMALISED
16 |
17 | SOURCES += main.cpp \
18 | mainwindow.cpp \
19 | glwidget.cpp \
20 | pipeline.cpp \
21 | gstpipeline.cpp \
22 | tigstpipeline.cpp \
23 | shaderlists.cpp \
24 | model.cpp \
25 | yuvdebugwindow.cpp \
26 | controlsform.cpp \
27 | glpowervrwidget.cpp \
28 | applogger.cpp
29 |
30 | HEADERS += mainwindow.h \
31 | glwidget.h \
32 | pipeline.h \
33 | gstpipeline.h \
34 | tigstpipeline.h \
35 | asyncwaitingqueue.h \
36 | shaderlists.h \
37 | model.h \
38 | yuvdebugwindow.h \
39 | controlsform.h \
40 | glpowervrwidget.h \
41 | applogger.h
42 |
43 | FORMS += \
44 | controlsform.ui
45 |
46 | # Gstreamer:
47 | CONFIG += link_pkgconfig
48 | PKGCONFIG += gstreamer-0.10
49 |
50 | # Model loading using Assimp:
51 | PKGCONFIG += assimp
52 |
53 | # TI CMem lib:
54 | INCLUDEPATH += $$(SDK_PATH)/../linuxutils_2_25_05_11/packages/ti/sdo/linuxutils/cmem/include/
55 | LIBS += -L$$(SDK_PATH)/../linuxutils_2_25_05_11/packages/ti/sdo/linuxutils/cmem/lib -l:cmem.a470MV
56 |
57 |
58 | # Run on remote Linux device parameters:
59 | target.path += /opt/qt_gl_gst-omap3530evm
60 | INSTALLS += target
61 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/run_with_1_vid_omap3.sh:
--------------------------------------------------------------------------------
1 | #export LD_LIBRARY_PATH=/opt/qt-embedded-4.6/lib
2 | #export QWS_MOUSE_PROTO=Tslib
3 |
4 | ./qt_gl_gst -qws -display powervr /usr/share/ti/data/videos/davincieffect_vga30.mp4
5 |
6 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/run_with_3_vids.sh:
--------------------------------------------------------------------------------
1 | ./qt_gl_gst /home/elangley/Videos/nokia.avi /home/elangley/Videos/HD-underwater.mpg /home/elangley/Videos/espnhd_CC.clean.m2t
2 |
3 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/run_with_5_vids.sh:
--------------------------------------------------------------------------------
1 | ./qt_gl_gst /home/elangley/Videos/nokia.avi /home/elangley/Videos/HD-underwater.mpg /home/elangley/Videos/Office_Space.avi /home/elangley/Videos/stream.ts /home/elangley/Videos/espnhd_CC.clean.m2t
2 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaderlists.cpp:
--------------------------------------------------------------------------------
1 | #include "shaderlists.h"
2 |
3 | // Arrays containing lists of shaders which can be linked and used together:
4 |
5 | GLShaderModule BrickGLESShaderList[NUM_SHADERS_BRICKGLES] =
6 | {
7 | { "shaders/brick.vert", QGLShader::Vertex },
8 | { "shaders/brick.frag", QGLShader::Fragment }
9 | };
10 |
11 | #ifdef VIDI420_SHADERS_NEEDED
12 | /* I420 */
13 | GLShaderModule VidI420NoEffectNormalisedShaderList[NUM_SHADERS_VIDI420_NOEFFECT_NORMALISED] =
14 | {
15 | { "shaders/yuv2rgbI420-normalisedtexcoords"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
16 | { "shaders/noeffect.vert", QGLShader::Vertex },
17 | { "shaders/noeffect.frag", QGLShader::Fragment }
18 | };
19 |
20 | GLShaderModule VidI420LitNormalisedShaderList[NUM_SHADERS_VIDI420_LIT_NORMALISED] =
21 | {
22 | { "shaders/yuv2rgbI420-normalisedtexcoords"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
23 | { "shaders/vidlighting.vert", QGLShader::Vertex },
24 | { "shaders/vidlighting.frag", QGLShader::Fragment }
25 | };
26 |
27 | GLShaderModule VidI420NoEffectShaderList[NUM_SHADERS_VIDI420_NOEFFECT] =
28 | {
29 | { "shaders/yuv2rgbI420"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
30 | { "shaders/noeffect.vert", QGLShader::Vertex },
31 | { "shaders/noeffect.frag", QGLShader::Fragment }
32 | };
33 |
34 | GLShaderModule VidI420LitShaderList[NUM_SHADERS_VIDI420_LIT] =
35 | {
36 | { "shaders/yuv2rgbI420"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
37 | { "shaders/vidlighting.vert", QGLShader::Vertex },
38 | { "shaders/vidlighting.frag", QGLShader::Fragment }
39 | };
40 |
41 | GLShaderModule VidI420ColourHilightShaderList[NUM_SHADERS_VIDI420_COLOURHILIGHT] =
42 | {
43 | { "shaders/yuv2rgbI420"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
44 | { "shaders/noeffect.vert", QGLShader::Vertex },
45 | { "shaders/colourhilight.frag", QGLShader::Fragment }
46 | };
47 |
48 | GLShaderModule VidI420ColourHilightSwapShaderList[NUM_SHADERS_VIDI420_COLOURHILIGHTSWAP] =
49 | {
50 | { "shaders/yuv2rgbI420"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
51 | { "shaders/noeffect.vert", QGLShader::Vertex },
52 | { "shaders/colourhilightswap.frag", QGLShader::Fragment }
53 | };
54 |
55 | GLShaderModule VidI420AlphaMaskShaderList[NUM_SHADERS_VIDI420_ALPHAMASK] =
56 | {
57 | { "shaders/yuv2rgbI420"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
58 | { "shaders/alphamask"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
59 | { "shaders/alphamask.vert", QGLShader::Vertex }
60 | };
61 | #endif
62 |
63 | #ifdef VIDUYVY_SHADERS_NEEDED
64 | /* UYVY */
65 | GLShaderModule VidUYVYNoEffectNormalisedShaderList[NUM_SHADERS_VIDUYVY_NOEFFECT_NORMALISED] =
66 | {
67 | { "shaders/yuv2rgbUYVY-normalisedtexcoords"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
68 | { "shaders/noeffect.vert", QGLShader::Vertex },
69 | { "shaders/noeffect.frag", QGLShader::Fragment }
70 | };
71 |
72 | GLShaderModule VidUYVYLitNormalisedShaderList[NUM_SHADERS_VIDUYVY_LIT_NORMALISED] =
73 | {
74 | { "shaders/yuv2rgbUYVY-normalisedtexcoords"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
75 | { "shaders/vidlighting.vert", QGLShader::Vertex },
76 | { "shaders/vidlighting.frag", QGLShader::Fragment }
77 | };
78 |
79 | GLShaderModule VidUYVYNoEffectShaderList[NUM_SHADERS_VIDUYVY_NOEFFECT] =
80 | {
81 | { "shaders/yuv2rgbUYVY"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
82 | { "shaders/noeffect.vert", QGLShader::Vertex },
83 | { "shaders/noeffect.frag", QGLShader::Fragment }
84 | };
85 |
86 | GLShaderModule VidUYVYLitShaderList[NUM_SHADERS_VIDUYVY_LIT] =
87 | {
88 | { "shaders/yuv2rgbUYVY"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
89 | { "shaders/vidlighting.vert", QGLShader::Vertex },
90 | { "shaders/vidlighting.frag", QGLShader::Fragment }
91 | };
92 |
93 | GLShaderModule VidUYVYColourHilightShaderList[NUM_SHADERS_VIDUYVY_COLOURHILIGHT] =
94 | {
95 | { "shaders/yuv2rgbUYVY"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
96 | { "shaders/noeffect.vert", QGLShader::Vertex },
97 | { "shaders/colourhilight.frag", QGLShader::Fragment }
98 | };
99 |
100 | GLShaderModule VidUYVYColourHilightSwapShaderList[NUM_SHADERS_VIDUYVY_COLOURHILIGHTSWAP] =
101 | {
102 | { "shaders/yuv2rgbUYVY"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
103 | { "shaders/noeffect.vert", QGLShader::Vertex },
104 | { "shaders/colourhilightswap.frag", QGLShader::Fragment }
105 | };
106 |
107 | GLShaderModule VidUYVYAlphaMaskShaderList[NUM_SHADERS_VIDUYVY_ALPHAMASK] =
108 | {
109 | { "shaders/yuv2rgbUYVY"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
110 | { "shaders/alphamask"VIDCONV_FRAG_SHADER_SUFFIX".frag", QGLShader::Fragment },
111 | { "shaders/alphamask.vert", QGLShader::Vertex }
112 | };
113 | #endif
114 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaderlists.h:
--------------------------------------------------------------------------------
1 | #ifndef SHADERLISTS_H
2 | #define SHADERLISTS_H
3 |
4 | #include "glwidget.h"
5 |
6 | #define NUM_SHADERS_BRICKGLES 2
7 | extern GLShaderModule BrickGLESShaderList[NUM_SHADERS_BRICKGLES];
8 |
9 | #ifdef VIDI420_SHADERS_NEEDED
10 | /* I420 */
11 | #define NUM_SHADERS_VIDI420_NOEFFECT_NORMALISED 3
12 | extern GLShaderModule VidI420NoEffectNormalisedShaderList[NUM_SHADERS_VIDI420_NOEFFECT_NORMALISED];
13 |
14 | #define NUM_SHADERS_VIDI420_LIT_NORMALISED 3
15 | extern GLShaderModule VidI420LitNormalisedShaderList[NUM_SHADERS_VIDI420_LIT_NORMALISED];
16 |
17 | #define NUM_SHADERS_VIDI420_NOEFFECT 3
18 | extern GLShaderModule VidI420NoEffectShaderList[NUM_SHADERS_VIDI420_NOEFFECT];
19 |
20 | #define NUM_SHADERS_VIDI420_LIT 3
21 | extern GLShaderModule VidI420LitShaderList[NUM_SHADERS_VIDI420_LIT];
22 |
23 | #define NUM_SHADERS_VIDI420_COLOURHILIGHT 3
24 | extern GLShaderModule VidI420ColourHilightShaderList[NUM_SHADERS_VIDI420_COLOURHILIGHT];
25 |
26 | #define NUM_SHADERS_VIDI420_COLOURHILIGHTSWAP 3
27 | extern GLShaderModule VidI420ColourHilightSwapShaderList[NUM_SHADERS_VIDI420_COLOURHILIGHTSWAP];
28 |
29 | #define NUM_SHADERS_VIDI420_ALPHAMASK 3
30 | extern GLShaderModule VidI420AlphaMaskShaderList[NUM_SHADERS_VIDI420_ALPHAMASK];
31 | #endif
32 |
33 | #ifdef VIDUYVY_SHADERS_NEEDED
34 | /* UYVY */
35 | #define NUM_SHADERS_VIDUYVY_NOEFFECT_NORMALISED 3
36 | extern GLShaderModule VidUYVYNoEffectNormalisedShaderList[NUM_SHADERS_VIDUYVY_NOEFFECT_NORMALISED];
37 |
38 | #define NUM_SHADERS_VIDUYVY_LIT_NORMALISED 3
39 | extern GLShaderModule VidUYVYLitNormalisedShaderList[NUM_SHADERS_VIDUYVY_LIT_NORMALISED];
40 |
41 | #define NUM_SHADERS_VIDUYVY_NOEFFECT 3
42 | extern GLShaderModule VidUYVYNoEffectShaderList[NUM_SHADERS_VIDUYVY_NOEFFECT];
43 |
44 | #define NUM_SHADERS_VIDUYVY_LIT 3
45 | extern GLShaderModule VidUYVYLitShaderList[NUM_SHADERS_VIDUYVY_LIT];
46 |
47 | #define NUM_SHADERS_VIDUYVY_COLOURHILIGHT 3
48 | extern GLShaderModule VidUYVYColourHilightShaderList[NUM_SHADERS_VIDUYVY_COLOURHILIGHT];
49 |
50 | #define NUM_SHADERS_VIDUYVY_COLOURHILIGHTSWAP 3
51 | extern GLShaderModule VidUYVYColourHilightSwapShaderList[NUM_SHADERS_VIDUYVY_COLOURHILIGHTSWAP];
52 |
53 | #define NUM_SHADERS_VIDUYVY_ALPHAMASK 3
54 | extern GLShaderModule VidUYVYAlphaMaskShaderList[NUM_SHADERS_VIDUYVY_ALPHAMASK];
55 | #endif
56 |
57 |
58 | #endif // SHADERLISTS_H
59 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/alphamask-imgstream.frag:
--------------------------------------------------------------------------------
1 | // GLSL shader which alpha blends output of video conversion using another texture as
2 | // an alpha mask, using an average of rgb
3 | // This shader must be linked with another containing the yuv2rgb function
4 | // to handle the video data first
5 |
6 |
7 | uniform highp sampler2D u_alphaTexture;
8 |
9 | varying highp vec3 v_alphaTexCoord;
10 |
11 | mediump vec4 yuv2rgb(void);
12 |
13 | void main(void)
14 | {
15 | highp vec4 alphaColour;
16 | highp float alphaAverage;
17 | mediump vec4 rgbColour = yuv2rgb();
18 |
19 | alphaColour = texture2D(u_alphaTexture, v_alphaTexCoord.xy);
20 | alphaAverage = alphaColour.r + alphaColour.g + alphaColour.b;
21 | alphaAverage /= 3.0;
22 |
23 | gl_FragColor = vec4(rgbColour.rgb, alphaAverage);
24 | }
25 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/alphamask-recttex.frag:
--------------------------------------------------------------------------------
1 | // GLSL shader which alpha blends output of video conversion using another texture as
2 | // an alpha mask, using an average of rgb
3 | // This shader must be linked with another containing the yuv2rgb function
4 | // to handle the video data first
5 |
6 | uniform sampler2DRect u_alphaTexture;
7 |
8 | varying vec3 v_alphaTexCoord;
9 |
10 | vec4 yuv2rgb(void);
11 |
12 | void main(void)
13 | {
14 | vec4 alphaColour;
15 | float alphaAverage;
16 | vec4 rgbColour = yuv2rgb();
17 |
18 | alphaColour = texture2DRect(u_alphaTexture, v_alphaTexCoord.xy);
19 | alphaAverage = alphaColour.r + alphaColour.g + alphaColour.b;
20 | alphaAverage /= 3.0;
21 |
22 | gl_FragColor = vec4(rgbColour.rgb, alphaAverage);
23 | }
24 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/alphamask.frag:
--------------------------------------------------------------------------------
1 | // GLSL shader which alpha blends output of video conversion using another texture as
2 | // an alpha mask, using an average of rgb
3 | // This shader must be linked with another containing the yuv2rgb function
4 | // to handle the video data first
5 |
6 |
7 | uniform highp sampler2D u_alphaTexture;
8 |
9 | varying highp vec3 v_alphaTexCoord;
10 |
11 | mediump vec4 yuv2rgb(void);
12 |
13 | void main(void)
14 | {
15 | highp vec4 alphaColour;
16 | highp float alphaAverage;
17 | mediump vec4 rgbColour = yuv2rgb();
18 |
19 | alphaColour = texture2D(u_alphaTexture, v_alphaTexCoord.xy);
20 | alphaAverage = alphaColour.r + alphaColour.g + alphaColour.b;
21 | alphaAverage /= 3.0;
22 |
23 | gl_FragColor = vec4(rgbColour.rgb, alphaAverage);
24 | }
25 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/alphamask.vert:
--------------------------------------------------------------------------------
1 | // GLES shader for passing interpolated texture co-ordinates
2 | // to the video fragment shader
3 |
4 |
5 | uniform highp mat4 u_mvp_matrix;
6 | uniform highp mat4 u_mv_matrix;
7 |
8 | attribute highp vec4 a_vertex;
9 | attribute highp vec3 a_alphaTexCoord;
10 | attribute highp vec4 a_texCoord;
11 |
12 | varying highp vec3 v_alphaTexCoord;
13 | varying highp vec4 v_texCoord;
14 |
15 | void main(void)
16 | {
17 | gl_Position = (u_mvp_matrix * a_vertex);
18 | v_alphaTexCoord = a_alphaTexCoord;
19 | v_texCoord = a_texCoord;
20 | }
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/brick.frag:
--------------------------------------------------------------------------------
1 | //
2 | // Fragment shader for procedural bricks
3 | //
4 | // Authors: Dave Baldwin, Steve Koren, Randi Rost
5 | // based on a shader by Darwyn Peachey
6 | //
7 | // Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
8 | //
9 | // See 3Dlabs-License.txt for license information
10 | //
11 |
12 | uniform highp vec3 BrickColor, MortarColor;
13 | uniform highp vec3 BrickSize;
14 | uniform highp vec3 BrickPct;
15 |
16 | varying mediump vec3 v_MCposition;
17 | varying highp float v_LightIntensity;
18 |
19 | void main(void)
20 | {
21 | highp vec3 color;
22 | mediump vec3 position, useBrick;
23 |
24 | position = v_MCposition / BrickSize;
25 |
26 | if (fract(position.y * 0.5) > 0.5)
27 | position.x += 0.5;
28 |
29 | if (fract(position.y * 0.5) > 0.5)
30 | position.z += 0.5;
31 |
32 | position = fract(position);
33 |
34 | useBrick = step(position, BrickPct);
35 |
36 | color = mix(MortarColor, BrickColor, useBrick.x * useBrick.y * useBrick.z);
37 | color *= v_LightIntensity;
38 | gl_FragColor = vec4 (color, 1.0);
39 | // gl_FragColor = vec4 (1.0, 1.0, 0.0, 1.0);
40 | }
41 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/brick.vert:
--------------------------------------------------------------------------------
1 | //
2 | // Vertex shader for procedural bricks
3 | //
4 | // Authors: Ed Langley
5 | // based on a shader by Dave Baldwin, Steve Koren,
6 | // Randi Rost
7 | // which was based on a shader by Darwyn Peachey
8 | //
9 | // Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
10 | //
11 | // See 3Dlabs-License.txt for license information
12 | //
13 |
14 | uniform vec3 LightPosition;
15 |
16 | const mediump float SpecularContribution = 0.3;
17 | const mediump float DiffuseContribution = 1.0 - SpecularContribution;
18 |
19 | uniform highp mat4 u_mvp_matrix;
20 | uniform highp mat4 u_mv_matrix;
21 |
22 | attribute highp vec4 a_vertex;
23 | attribute highp vec3 a_normal;
24 | attribute highp vec4 a_texCoord; // unused right now
25 |
26 | varying mediump float v_LightIntensity;
27 | varying mediump vec3 v_MCposition;
28 |
29 | void main(void)
30 | {
31 | // vec3 ecPosition = vec3 (gl_ModelViewMatrix * vertex);
32 | highp vec3 ecPosition = vec3 (u_mv_matrix * a_vertex);
33 |
34 | // vec3 tnorm = normalize(gl_NormalMatrix * normal);
35 | highp vec3 tnorm = normalize(u_mv_matrix * vec4(a_normal, 0.0)).xyz;
36 |
37 | highp vec3 lightVec = normalize(LightPosition - ecPosition);
38 | // Using the reflection vector:
39 | // vec3 reflectVec = reflect(-lightVec, tnorm);
40 | highp vec3 reflectVec = (2.0 * tnorm * dot(lightVec, tnorm)) - lightVec;
41 |
42 | // Using the half vector:
43 | //vec3 halfVec = normalize(-ecPosition - LightPosition);
44 |
45 | highp vec3 viewVec = normalize(-ecPosition);
46 | highp float diffuse = max(dot(lightVec, tnorm), 0.0);
47 | highp float spec = 0.0;
48 |
49 | if (diffuse > 0.0)
50 | {
51 | spec = max(dot(reflectVec, viewVec), 0.0);
52 | //spec = max(dot(halfVec, tnorm), 0.0);
53 |
54 | spec = pow(spec, 46.0);
55 | }
56 |
57 | v_LightIntensity = DiffuseContribution * diffuse +
58 | SpecularContribution * spec;
59 |
60 | v_MCposition = a_vertex.xyz;
61 | // gl_Position = ftransform();
62 | // gl_Position = (gl_ModelViewProjectionMatrix * vertex);
63 | gl_Position = (u_mvp_matrix * a_vertex);
64 | }
65 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/colourhilight.frag:
--------------------------------------------------------------------------------
1 | // GLSL shader which makes output texture monochrome except colours
2 | // in a certain range for each component
3 | // This shader must be linked with another containing yuv2rgb function
4 | // to handle the video data first
5 |
6 |
7 | uniform mediump vec4 u_colrToDisplayMin, u_colrToDisplayMax;
8 |
9 | mediump vec4 yuv2rgb(void);
10 |
11 | void main(void)
12 | {
13 | mediump vec4 rgbColour = yuv2rgb();
14 | mediump float monoComponent;
15 |
16 | if((rgbColour.r > u_colrToDisplayMin.r) && (rgbColour.r < u_colrToDisplayMax.r) &&
17 | (rgbColour.r > u_colrToDisplayMin.g) && (rgbColour.r < u_colrToDisplayMax.g) &&
18 | (rgbColour.r > u_colrToDisplayMin.b) && (rgbColour.r < u_colrToDisplayMax.b))
19 | {
20 | gl_FragColor = rgbColour;
21 |
22 | }
23 | else
24 | {
25 | // monochrome:
26 | monoComponent = rgbColour.r + rgbColour.g + rgbColour.b;
27 | monoComponent /= 3.0;
28 |
29 | gl_FragColor = vec4(monoComponent, monoComponent, monoComponent, 1.0);
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/colourhilightswap.frag:
--------------------------------------------------------------------------------
1 | // GLSL shader which makes output texture monochrome except colours
2 | // in a certain range for each component
3 | // This shader must be linked with another containing yuv2rgb function
4 | // to handle the video data first
5 |
6 |
7 | uniform mediump vec4 u_colrToDisplayMin, u_colrToDisplayMax;
8 | uniform mediump vec4 u_componentSwapR, u_componentSwapG, u_componentSwapB;
9 |
10 | mediump vec4 yuv2rgb(void);
11 |
12 | void main(void)
13 | {
14 | mediump vec4 rgbColour = yuv2rgb();
15 | mediump vec4 swapColourSum;
16 | mediump vec4 swappedColour;
17 | mediump float monoComponent;
18 |
19 | if((rgbColour.r > u_colrToDisplayMin.r) && (rgbColour.r < u_colrToDisplayMax.r) &&
20 | (rgbColour.r > u_colrToDisplayMin.g) && (rgbColour.r < u_colrToDisplayMax.g) &&
21 | (rgbColour.r > u_colrToDisplayMin.b) && (rgbColour.r < u_colrToDisplayMax.b))
22 | {
23 | /*
24 | swappedColour.r = rgbColour.g;
25 | swappedColour.g = rgbColour.b;
26 | swappedColour.b = rgbColour.r;
27 | */
28 |
29 | swapColourSum = rgbColour * u_componentSwapR;
30 | //swappedColour.r = (swapColourSum.r + swapColourSum.g + swapColourSum.b)/3;
31 | swappedColour.r = clamp((swapColourSum.r + swapColourSum.g + swapColourSum.b), 0.0, 1.0);
32 |
33 | swapColourSum = rgbColour * u_componentSwapG;
34 | //swappedColour.g = (swapColourSum.r + swapColourSum.g + swapColourSum.b)/3;
35 | swappedColour.g = clamp((swapColourSum.r + swapColourSum.g + swapColourSum.b), 0.0, 1.0);
36 |
37 | swapColourSum = rgbColour * u_componentSwapB;
38 | //swappedColour.b = (swapColourSum.r + swapColourSum.g + swapColourSum.b)/3;
39 | swappedColour.b = clamp((swapColourSum.r + swapColourSum.g + swapColourSum.b), 0.0, 1.0);
40 |
41 | swappedColour.a = 1.0;
42 | gl_FragColor = swappedColour;
43 | }
44 | else
45 | {
46 | // monochrome:
47 | monoComponent = rgbColour.r + rgbColour.g + rgbColour.b;
48 | monoComponent /= 3.0;
49 |
50 | gl_FragColor = vec4(monoComponent, monoComponent, monoComponent, 1.0);
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/noeffect.frag:
--------------------------------------------------------------------------------
1 | // GLES shader which displays YUV420P video data correctly as a texture
2 | // This shader must be linked with another containing yuv2rgb function
3 | // to handle the video data first
4 |
5 | //#extension GL_ARB_texture_rectangle : enable
6 |
7 | mediump vec4 yuv2rgb(void);
8 |
9 | void main(void)
10 | {
11 | gl_FragColor = yuv2rgb();
12 | //gl_FragColor = vec4 (1.0, 1.0, 0.0, 1.0);
13 | }
14 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/noeffect.vert:
--------------------------------------------------------------------------------
1 | // GLES shader for passing interpolated texture co-ordinates
2 | // to the video fragment shader
3 |
4 |
5 | uniform highp mat4 u_mvp_matrix;
6 | uniform highp mat4 u_mv_matrix;
7 |
8 | attribute highp vec4 a_vertex;
9 | attribute highp vec4 a_texCoord;
10 |
11 | varying mediump vec4 v_texCoord;
12 |
13 | void main(void)
14 | {
15 | gl_Position = (u_mvp_matrix * a_vertex);
16 | v_texCoord = a_texCoord;
17 | }
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/vidlighting.frag:
--------------------------------------------------------------------------------
1 |
2 |
3 | mediump vec4 yuv2rgb(void);
4 |
5 | varying highp float v_LightIntensity;
6 |
7 | void main(void)
8 | {
9 | highp vec4 colour;
10 |
11 | colour = yuv2rgb();
12 | colour *= v_LightIntensity;
13 | // gl_FragColor = colour;
14 | gl_FragColor = vec4(colour.rgb, 1.0);
15 | }
16 |
17 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/vidlighting.vert:
--------------------------------------------------------------------------------
1 | uniform vec3 u_lightPosition;
2 |
3 | const mediump float SpecularContribution = 0.3;
4 | const mediump float DiffuseContribution = 1.0 - SpecularContribution;
5 |
6 |
7 |
8 | uniform highp mat4 u_mvp_matrix;
9 | uniform highp mat4 u_mv_matrix;
10 |
11 | attribute highp vec4 a_vertex;
12 | attribute highp vec3 a_normal;
13 | attribute highp vec4 a_texCoord;
14 |
15 | varying mediump float v_LightIntensity;
16 | varying highp vec4 v_texCoord;
17 |
18 | void main(void)
19 | {
20 | highp vec3 ecPosition = vec3 (u_mv_matrix * a_vertex);
21 |
22 | highp vec3 tnorm = normalize(u_mv_matrix * vec4(a_normal, 0.0)).xyz;
23 |
24 | highp vec3 lightVec = normalize(u_lightPosition - ecPosition);
25 | highp vec3 reflectVec = (2.0 * tnorm * dot(lightVec, tnorm)) - lightVec;
26 |
27 | highp vec3 viewVec = normalize(-ecPosition);
28 | highp float diffuse = max(dot(lightVec, tnorm), 0.0);
29 | highp float spec = 0.0;
30 |
31 | if (diffuse > 0.0)
32 | {
33 | spec = max(dot(reflectVec, viewVec), 0.0);
34 | //spec = max(dot(halfVec, tnorm), 0.0);
35 |
36 | spec = pow(spec, 6.0);
37 | }
38 |
39 | v_LightIntensity = DiffuseContribution * diffuse +
40 | SpecularContribution * spec;
41 |
42 | gl_Position = (u_mvp_matrix * a_vertex);
43 | v_texCoord = a_texCoord;
44 | }
45 |
46 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/yuv2rgbI420-normalisedtexcoords-recttex.frag:
--------------------------------------------------------------------------------
1 | // Perform YUV to RGB conversion on I420 format planar YUV data
2 | // Using formula:
3 | // R = 1.164(Y - 16) + 1.596(V - 128)
4 | // G = 1.164(Y - 16) - 0.813(V - 128) - 0.391(U - 128)
5 | // B = 1.164(Y - 16) + 2.018(U - 128)
6 |
7 | #extension GL_ARB_texture_rectangle : enable
8 |
9 | uniform lowp sampler2DRect u_vidTexture;
10 | uniform lowp float u_yHeight, u_yWidth;
11 |
12 | varying highp vec4 v_texCoord;
13 |
14 | // YUV offset (reciprocals of 255 based offsets above)
15 | const mediump vec3 offset = vec3(-0.0625, -0.5, -0.5);
16 | // RGB coefficients
17 | const mediump vec3 rCoeff = vec3(1.164, 0.000, 1.596);
18 | const mediump vec3 gCoeff = vec3(1.164, -0.391, -0.813);
19 | const mediump vec3 bCoeff = vec3(1.164, 2.018, 0.000);
20 |
21 | vec4 yuv2rgb()
22 | {
23 | mediump vec3 yuv, rgb;
24 | mediump vec4 texCoord;
25 |
26 | //texCoord = gl_TexCoord[texUnit];
27 | texCoord.x = v_texCoord.x * u_yWidth;
28 | texCoord.y = v_texCoord.y * u_yHeight;
29 |
30 | // lookup Y
31 | yuv.r = texture2DRect(u_vidTexture, texCoord.xy).r;
32 | // lookup U
33 | // co-ordinate conversion algorithm for i420:
34 | // x /= 2.0; if modulo2(y) then x += width/2.0;
35 | texCoord.x /= 2.0;
36 | if((texCoord.y - floor(texCoord.y)) == 0.0)
37 | {
38 | texCoord.x += (u_yWidth/2.0);
39 | }
40 | texCoord.y = u_yHeight+(texCoord.y/4.0);
41 | yuv.g = texture2DRect(u_vidTexture, texCoord.xy).r;
42 | // lookup V
43 | texCoord.y += u_yHeight/4.0;
44 | yuv.b = texture2DRect(u_vidTexture, texCoord.xy).r;
45 |
46 | // Convert
47 | yuv += offset;
48 | rgb.r = dot(yuv, rCoeff);
49 | rgb.g = dot(yuv, gCoeff);
50 | rgb.b = dot(yuv, bCoeff);
51 |
52 | return vec4(rgb, 1.0);
53 | }
54 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/yuv2rgbI420-normalisedtexcoords.frag:
--------------------------------------------------------------------------------
1 | // Perform YUV to RGB conversion on I420 format planar YUV data
2 | // Using formula:
3 | // R = 1.164(Y - 16) + 1.596(V - 128)
4 | // G = 1.164(Y - 16) - 0.813(V - 128) - 0.391(U - 128)
5 | // B = 1.164(Y - 16) + 2.018(U - 128)
6 |
7 |
8 | uniform lowp sampler2D u_vidTexture;
9 | uniform lowp float u_yHeight, u_yWidth;
10 |
11 | varying highp vec4 v_texCoord;
12 |
13 | // YUV offset (reciprocals of 255 based offsets above)
14 | const mediump vec3 offset = vec3(-0.0625, -0.5, -0.5);
15 | // RGB coefficients
16 | const mediump vec3 rCoeff = vec3(1.164, 0.000, 1.596);
17 | const mediump vec3 gCoeff = vec3(1.164, -0.391, -0.813);
18 | const mediump vec3 bCoeff = vec3(1.164, 2.018, 0.000);
19 |
20 | mediump vec4 yuv2rgb()
21 | {
22 | mediump vec3 yuv, rgb;
23 | mediump vec4 texCoord;
24 |
25 | //texCoord = gl_TexCoord[texUnit];
26 | texCoord.x = v_texCoord.x * u_yWidth;
27 | texCoord.y = v_texCoord.y * u_yHeight;
28 |
29 | // lookup Y
30 | yuv.r = texture2D(u_vidTexture, texCoord.xy).r;
31 | // lookup U
32 | // co-ordinate conversion algorithm for i420:
33 | // x /= 2.0; if modulo2(y) then x += width/2.0;
34 | texCoord.x /= 2.0;
35 | if((texCoord.y - floor(texCoord.y)) == 0.0)
36 | {
37 | texCoord.x += (u_yWidth/2.0);
38 | }
39 | texCoord.y = u_yHeight+(texCoord.y/4.0);
40 | yuv.g = texture2D(u_vidTexture, texCoord.xy).r;
41 | // lookup V
42 | texCoord.y += u_yHeight/4.0;
43 | yuv.b = texture2D(u_vidTexture, texCoord.xy).r;
44 |
45 | // Convert
46 | yuv += offset;
47 | rgb.r = dot(yuv, rCoeff);
48 | rgb.g = dot(yuv, gCoeff);
49 | rgb.b = dot(yuv, bCoeff);
50 |
51 | return vec4(rgb, 1.0);
52 | }
53 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/yuv2rgbI420-recttex.frag:
--------------------------------------------------------------------------------
1 | // Perform YUV to RGB conversion on I420 format planar YUV data
2 | // Using formula:
3 | // R = 1.164(Y - 16) + 1.596(V - 128)
4 | // G = 1.164(Y - 16) - 0.813(V - 128) - 0.391(U - 128)
5 | // B = 1.164(Y - 16) + 2.018(U - 128)
6 |
7 | #extension GL_ARB_texture_rectangle : enable
8 |
9 | uniform lowp sampler2DRect u_vidTexture;
10 | uniform lowp float u_yHeight, u_yWidth;
11 |
12 | varying highp vec4 v_texCoord;
13 |
14 | // YUV offset (reciprocals of 255 based offsets above)
15 | const mediump vec3 offset = vec3(-0.0625, -0.5, -0.5);
16 | // RGB coefficients
17 | const mediump vec3 rCoeff = vec3(1.164, 0.000, 1.596);
18 | const mediump vec3 gCoeff = vec3(1.164, -0.391, -0.813);
19 | const mediump vec3 bCoeff = vec3(1.164, 2.018, 0.000);
20 |
21 | vec4 yuv2rgb()
22 | {
23 | mediump vec3 yuv, rgb;
24 | mediump vec4 texCoord;
25 |
26 | //texCoord = gl_TexCoord[texUnit];
27 | texCoord = v_texCoord;
28 |
29 | // lookup Y
30 | yuv.r = texture2DRect(u_vidTexture, texCoord.xy).r;
31 | // lookup U
32 | // co-ordinate conversion algorithm for i420:
33 | // x /= 2.0; if modulo2(y) then x += width/2.0;
34 | texCoord.x /= 2.0;
35 | if((texCoord.y - floor(texCoord.y)) == 0.0)
36 | {
37 | texCoord.x += (u_yWidth/2.0);
38 | }
39 | texCoord.y = u_yHeight+(texCoord.y/4.0);
40 | yuv.g = texture2DRect(u_vidTexture, texCoord.xy).r;
41 | // lookup V
42 | texCoord.y += u_yHeight/4.0;
43 | yuv.b = texture2DRect(u_vidTexture, texCoord.xy).r;
44 |
45 | // Convert
46 | yuv += offset;
47 | rgb.r = dot(yuv, rCoeff);
48 | rgb.g = dot(yuv, gCoeff);
49 | rgb.b = dot(yuv, bCoeff);
50 |
51 | return vec4(rgb, 1.0);
52 | }
53 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/yuv2rgbI420.frag:
--------------------------------------------------------------------------------
1 | // Perform YUV to RGB conversion on I420 format planar YUV data
2 | // Using formula:
3 | // R = 1.164(Y - 16) + 1.596(V - 128)
4 | // G = 1.164(Y - 16) - 0.813(V - 128) - 0.391(U - 128)
5 | // B = 1.164(Y - 16) + 2.018(U - 128)
6 |
7 |
8 | uniform lowp sampler2D u_vidTexture;
9 | uniform lowp float u_yHeight, u_yWidth;
10 |
11 | varying highp vec4 v_texCoord;
12 |
13 | // YUV offset (reciprocals of 255 based offsets above)
14 | const mediump vec3 offset = vec3(-0.0625, -0.5, -0.5);
15 | // RGB coefficients
16 | const mediump vec3 rCoeff = vec3(1.164, 0.000, 1.596);
17 | const mediump vec3 gCoeff = vec3(1.164, -0.391, -0.813);
18 | const mediump vec3 bCoeff = vec3(1.164, 2.018, 0.000);
19 |
20 | mediump vec4 yuv2rgb()
21 | {
22 | mediump vec3 yuv, rgb;
23 | mediump vec4 texCoord;
24 |
25 | //texCoord = gl_TexCoord[texUnit];
26 | texCoord = v_texCoord;
27 |
28 | // lookup Y
29 | yuv.r = texture2D(u_vidTexture, texCoord.xy).r;
30 | // lookup U
31 | // co-ordinate conversion algorithm for i420:
32 | // x /= 2.0; if modulo2(y) then x += width/2.0;
33 | texCoord.x /= 2.0;
34 | if((texCoord.y - floor(texCoord.y)) == 0.0)
35 | {
36 | texCoord.x += (u_yWidth/2.0);
37 | }
38 | texCoord.y = u_yHeight+(texCoord.y/4.0);
39 | yuv.g = texture2D(u_vidTexture, texCoord.xy).r;
40 | // lookup V
41 | texCoord.y += u_yHeight/4.0;
42 | yuv.b = texture2D(u_vidTexture, texCoord.xy).r;
43 |
44 | // Convert
45 | yuv += offset;
46 | rgb.r = dot(yuv, rCoeff);
47 | rgb.g = dot(yuv, gCoeff);
48 | rgb.b = dot(yuv, bCoeff);
49 |
50 | return vec4(rgb, 1.0);
51 | }
52 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/yuv2rgbUYVY-imgstream.frag:
--------------------------------------------------------------------------------
1 | // Perform YUV to RGB conversion on UYVY format interleaved YUV data
2 | // Using the built in IMG streaming texture extension for PowerVR.
3 |
4 | #ifdef GL_IMG_texture_stream2
5 | #extension GL_IMG_texture_stream2 : enable
6 | #endif
7 |
8 | varying highp vec4 v_texCoord;
9 | uniform samplerStreamIMG u_vidTexture;
10 |
11 | mediump vec4 yuv2rgb()
12 | {
13 | mediump vec4 rgb;
14 | mediump vec2 v2texcoord = v_texCoord.xy;
15 |
16 | rgb = textureStreamIMG(u_vidTexture, v2texcoord);
17 |
18 | return rgb;
19 | }
20 |
21 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/yuv2rgbUYVY-normalisedtexcoords-imgstream.frag:
--------------------------------------------------------------------------------
1 | // Perform YUV to RGB conversion on UYVY format interleaved YUV data
2 | // Using the built in IMG streaming texture extension for PowerVR.
3 |
4 | #ifdef GL_IMG_texture_stream2
5 | #extension GL_IMG_texture_stream2 : enable
6 | #endif
7 |
8 | varying highp vec4 v_texCoord;
9 | uniform samplerStreamIMG u_vidTexture;
10 |
11 | uniform lowp float u_yHeight, u_yWidth;
12 |
13 | mediump vec4 yuv2rgb()
14 | {
15 | mediump vec4 rgb;
16 | mediump vec2 v2texcoord = v_texCoord.xy;
17 |
18 | v2texcoord.x = v2texcoord.x * u_yWidth;
19 | v2texcoord.y = v2texcoord.y * u_yHeight;
20 |
21 | rgb = textureStreamIMG(u_vidTexture, v2texcoord);
22 |
23 | return rgb;
24 | }
25 |
26 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/yuv2rgbUYVY-normalisedtexcoords.frag:
--------------------------------------------------------------------------------
1 | // Perform YUV to RGB conversion on UYVY format interleaved YUV data
2 | // Using formula:
3 | // R = 1.164(Y - 16) + 1.596(V - 128)
4 | // G = 1.164(Y - 16) - 0.813(V - 128) - 0.391(U - 128)
5 | // B = 1.164(Y - 16) + 2.018(U - 128)
6 |
7 |
8 | uniform lowp sampler2D u_vidTexture;
9 | uniform lowp float u_yHeight, u_yWidth;
10 |
11 | varying highp vec4 v_texCoord;
12 |
13 | // YUV offset (reciprocals of 255 based offsets above)
14 | const mediump vec3 offset = vec3(-0.0625, -0.5, -0.5);
15 | // RGB coefficients
16 | const mediump vec3 rCoeff = vec3(1.164, 0.000, 1.596);
17 | const mediump vec3 gCoeff = vec3(1.164, -0.391, -0.813);
18 | const mediump vec3 bCoeff = vec3(1.164, 2.018, 0.000);
19 |
20 |
21 | mediump vec4 yuv2rgb()
22 | {
23 | mediump vec3 yuv, rgb;
24 | mediump vec4 texCoord;
25 |
26 | // For now just show something:
27 | texCoord.x = v_texCoord.x * u_yWidth;
28 | texCoord.y = v_texCoord.y * u_yHeight;
29 | yuv.r = texture2D(u_vidTexture, texCoord.xy).r;
30 | return vec4(rgb, 1.0);
31 |
32 |
33 | // lookup Y
34 | texCoord.x = v_texCoord.x * u_yWidth;
35 | texCoord.y = v_texCoord.y * u_yHeight;
36 | texCoord.x = texCoord.x / 2.0;
37 | if((texCoord.x - floor(texCoord.x)) == 0.0)
38 | {
39 | texCoord.x = (v_texCoord.x * 2.0) + 1.0;
40 | }
41 | else
42 | {
43 | texCoord.x = ((v_texCoord.x + 1.0) * 2.0) - 1.0;
44 | }
45 | yuv.r = texture2D(u_vidTexture, texCoord.xy).r;
46 |
47 | // lookup U
48 | texCoord.x = v_texCoord.x * u_yWidth;
49 | texCoord.y = v_texCoord.y * u_yHeight;
50 | texCoord.x = floor(texCoord.x / 2.0);
51 | texCoord.x *= 4.0;
52 | yuv.g = texture2D(u_vidTexture, texCoord.xy).r;
53 |
54 | // lookup V
55 | texCoord.x = v_texCoord.x * u_yWidth;
56 | texCoord.y = v_texCoord.y * u_yHeight;
57 | texCoord.x = floor(texCoord.x / 2.0);
58 | texCoord.x *= 4.0;
59 | texCoord.x += 2.0;
60 | yuv.g = texture2D(u_vidTexture, texCoord.xy).r;
61 |
62 | // Convert
63 | yuv += offset;
64 | rgb.r = dot(yuv, rCoeff);
65 | rgb.g = dot(yuv, gCoeff);
66 | rgb.b = dot(yuv, bCoeff);
67 |
68 | return vec4(rgb, 1.0);
69 | }
70 |
71 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/shaders/yuv2rgbUYVY.frag:
--------------------------------------------------------------------------------
1 | // Perform YUV to RGB conversion on UYVY format interleaved YUV data
2 | // Using formula:
3 | // R = 1.164(Y - 16) + 1.596(V - 128)
4 | // G = 1.164(Y - 16) - 0.813(V - 128) - 0.391(U - 128)
5 | // B = 1.164(Y - 16) + 2.018(U - 128)
6 |
7 |
8 | uniform lowp sampler2D u_vidTexture;
9 | uniform lowp float u_yHeight, u_yWidth;
10 |
11 | varying highp vec4 v_texCoord;
12 |
13 | // YUV offset (reciprocals of 255 based offsets above)
14 | const mediump vec3 offset = vec3(-0.0625, -0.5, -0.5);
15 | // RGB coefficients
16 | const mediump vec3 rCoeff = vec3(1.164, 0.000, 1.596);
17 | const mediump vec3 gCoeff = vec3(1.164, -0.391, -0.813);
18 | const mediump vec3 bCoeff = vec3(1.164, 2.018, 0.000);
19 |
20 |
21 | mediump vec4 yuv2rgb()
22 | {
23 | mediump vec3 yuv, rgb;
24 | mediump vec4 texCoord;
25 |
26 | // For now just show something:
27 | texCoord = v_texCoord;
28 | yuv.r = texture2D(u_vidTexture, texCoord.xy).r;
29 | rgb.r = yuv.r;
30 | rgb.g = yuv.r;
31 | rgb.b = yuv.r;
32 | return vec4(rgb, 1.0);
33 |
34 |
35 | // lookup Y
36 | texCoord = v_texCoord;
37 | texCoord.x = texCoord.x / 2.0;
38 | if((texCoord.x - floor(texCoord.x)) == 0.0)
39 | {
40 | texCoord.x = (v_texCoord.x * 2.0) + 1.0;
41 | }
42 | else
43 | {
44 | texCoord.x = ((v_texCoord.x + 1.0) * 2.0) - 1.0;
45 | }
46 | yuv.r = texture2D(u_vidTexture, texCoord.xy).r;
47 |
48 | // stop now and we should have black and white based on luminousity:
49 | rgb.r = yuv.r;
50 | rgb.g = yuv.r;
51 | rgb.b = yuv.r;
52 | return vec4(rgb, 1.0);
53 |
54 |
55 |
56 | // lookup U
57 | texCoord = v_texCoord;
58 | texCoord.x = floor(texCoord.x / 2.0);
59 | texCoord.x *= 4.0;
60 | yuv.g = texture2D(u_vidTexture, texCoord.xy).r;
61 |
62 | // lookup V
63 | texCoord = v_texCoord;
64 | texCoord.x = floor(texCoord.x / 2.0);
65 | texCoord.x *= 4.0;
66 | texCoord.x += 2.0;
67 | yuv.g = texture2D(u_vidTexture, texCoord.xy).r;
68 |
69 | // Convert
70 | yuv += offset;
71 | rgb.r = dot(yuv, rCoeff);
72 | rgb.g = dot(yuv, gCoeff);
73 | rgb.b = dot(yuv, bCoeff);
74 |
75 | return vec4(rgb, 1.0);
76 | }
77 |
78 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/tigstpipeline.cpp:
--------------------------------------------------------------------------------
1 |
2 | #include "applogger.h"
3 | #include "tigstpipeline.h"
4 |
5 | TIGStreamerPipeline::TIGStreamerPipeline(int vidIx,
6 | const QString &videoLocation,
7 | const char *renderer_slot,
8 | QObject *parent)
9 | : GStreamerPipeline(vidIx, videoLocation, renderer_slot, parent),
10 | m_qtdemux(NULL),
11 | m_tividdecode(NULL),
12 | m_tiaudiodecode(NULL),
13 | m_videoqueue(NULL)
14 | {
15 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "constructor entered");
16 | }
17 |
18 | TIGStreamerPipeline::~TIGStreamerPipeline()
19 | {
20 | }
21 |
22 | void TIGStreamerPipeline::Configure()
23 | {
24 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "Configure entered");
25 |
26 | gst_init (NULL, NULL);
27 |
28 | /* Create the elements */
29 | this->m_pipeline = gst_pipeline_new (NULL);
30 | if(this->m_videoLocation.isEmpty())
31 | {
32 | LOG(LOG_VIDPIPELINE, Logger::Info, "No video file specified. Using video test source.");
33 | this->m_source = gst_element_factory_make ("videotestsrc", "testsrc");
34 | }
35 | else
36 | {
37 | this->m_source = gst_element_factory_make ("filesrc", "filesrc");
38 | g_object_set (G_OBJECT (this->m_source), "location", m_videoLocation.toUtf8().constData(), NULL);
39 | }
40 |
41 | // gst-launch -v filesrc location=sample.mp4 ! qtdemux name=demux demux.audio_00 !
42 | // queue max-size-buffers=8000 max-size-time=0 max-size-bytes=0 ! TIAuddec1 !
43 | // alsasink demux.video_00 ! queue ! TIViddec2 ! TIDmaiVideoSink videoStd=VGA videoOutput=LCD
44 |
45 |
46 | this->m_qtdemux = gst_element_factory_make ("qtdemux", "demux");
47 | this->m_tividdecode = gst_element_factory_make ("TIViddec2", "tividdecode");
48 | this->m_tiaudiodecode = gst_element_factory_make ("TIAuddec1", "tiaudiodecode");
49 | this->m_audioqueue = gst_element_factory_make ("queue", "audioqueue");
50 | this->m_videoqueue = gst_element_factory_make ("queue", "videoqueue");
51 | this->m_videosink = gst_element_factory_make ("fakesink", "videosink");
52 | //this->m_audiosink = gst_element_factory_make ("alsasink", "audiosink");
53 |
54 | g_object_set(G_OBJECT(this->m_audioqueue),
55 | "max-size-buffers", 8000,
56 | "max-size-time", 0,
57 | "max-size-bytes", 0,
58 | NULL);
59 |
60 | if (this->m_pipeline == NULL || this->m_source == NULL || this->m_qtdemux == NULL ||
61 | this->m_tividdecode == NULL || this->m_tiaudiodecode == NULL ||
62 | this->m_audioqueue == NULL || this->m_videoqueue == NULL ||
63 | this->m_videosink == NULL)// || this->m_audiosink == NULL)
64 | {
65 | LOG(LOG_VIDPIPELINE, Logger::Error, "One of the GStreamer decoding elements is missing");
66 | }
67 |
68 | /* Setup the pipeline */
69 | gst_bin_add_many (GST_BIN(this->m_pipeline), this->m_source, this->m_qtdemux, this->m_tividdecode,
70 | this->m_tiaudiodecode, this->m_audioqueue, this->m_videoqueue, this->m_videosink,
71 | //this->m_audiosink,
72 | NULL);
73 |
74 | // Like all Gstreamer elements qtdemux inherits from gstelement which provides the
75 | // pad-added signal:
76 | g_signal_connect (this->m_qtdemux, "pad-added", G_CALLBACK (on_new_pad), this);
77 |
78 | /* Link the elements */
79 | gst_element_link (this->m_source, this->m_qtdemux);
80 | gst_element_link (this->m_audioqueue, this->m_tiaudiodecode);
81 | //gst_element_link (this->m_tiaudiodecode, this->m_audiosink);
82 | gst_element_link (this->m_videoqueue, this->m_tividdecode);
83 | #if 0
84 | /* Use caps filter to get I420 from video decoder */
85 | GstCaps *caps;
86 | caps = gst_caps_new_simple ("video/x-raw-yuv",
87 | "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),
88 | NULL);
89 | if(!gst_element_link_filtered (this->m_tividdecode, this->m_videosink, caps))
90 | {
91 | LOG(LOG_VIDPIPELINE, Logger::Error, "Failed to link viddecode and videosink");
92 | }
93 | gst_caps_unref (caps);
94 | #else
95 | gst_element_link (this->m_tividdecode, this->m_videosink);
96 | #endif
97 | m_bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
98 | gst_bus_add_watch(m_bus, (GstBusFunc) bus_call, this);
99 | gst_object_unref(m_bus);
100 |
101 | gst_element_set_state (this->m_pipeline, GST_STATE_PAUSED);
102 |
103 | }
104 |
105 | void TIGStreamerPipeline::on_new_pad(GstElement *element,
106 | GstPad *pad,
107 | TIGStreamerPipeline* p)
108 | {
109 | GstPad *sinkpad;
110 | GstCaps *caps;
111 | GstStructure *str;
112 |
113 | Q_UNUSED(element);
114 |
115 | caps = gst_pad_get_caps (pad);
116 | str = gst_caps_get_structure (caps, 0);
117 |
118 | // DEBUG:
119 | const gchar *checkName = gst_structure_get_name (str);
120 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "New pad on qtdemux, pad caps structure name: %s", checkName);
121 |
122 | if (g_strrstr (gst_structure_get_name (str), "video"))
123 | {
124 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "Pad is for video");
125 |
126 | sinkpad = gst_element_get_pad (p->m_videoqueue, "sink");
127 |
128 | g_object_set (G_OBJECT (p->m_videosink),
129 | "sync", TRUE,
130 | "signal-handoffs", TRUE,
131 | NULL);
132 |
133 | g_signal_connect (p->m_videosink,
134 | "handoff",
135 | G_CALLBACK(on_gst_buffer),
136 | p);
137 |
138 | }
139 | else
140 | {
141 | LOG(LOG_VIDPIPELINE, Logger::Debug1, "Pad is for audio, ignoring for now");
142 | gst_caps_unref (caps);
143 | return;
144 |
145 | sinkpad = gst_element_get_pad (p->m_audioqueue, "sink");
146 | }
147 |
148 | gst_caps_unref (caps);
149 |
150 | gst_pad_link (pad, sinkpad);
151 | gst_object_unref (sinkpad);
152 | }
153 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/tigstpipeline.h:
--------------------------------------------------------------------------------
1 | #ifndef TIGSTPIPELINE_H
2 | #define TIGSTPIPELINE_H
3 |
4 | // Re-include base class header here to keep the MOC happy:
5 | #include "gstpipeline.h"
6 |
7 | class TIGStreamerPipeline : public GStreamerPipeline
8 | {
9 | Q_OBJECT
10 |
11 | public:
12 | TIGStreamerPipeline(int vidIx,
13 | const QString &videoLocation,
14 | const char *renderer_slot,
15 | QObject *parent);
16 | ~TIGStreamerPipeline();
17 |
18 | void Configure();
19 |
20 | // bit lazy just making these public for gst callbacks, but it'll do for now
21 | GstElement *m_qtdemux;
22 | GstElement *m_tividdecode;
23 | GstElement *m_tiaudiodecode;
24 | GstElement *m_videoqueue;
25 |
26 | protected:
27 | static void on_new_pad(GstElement *element, GstPad *pad, TIGStreamerPipeline* p);
28 | };
29 |
30 | #endif // TIGSTPIPELINE_H
31 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/yuvdebugwindow.cpp:
--------------------------------------------------------------------------------
1 | #include "yuvdebugwindow.h"
2 |
3 | YuvDebugWindow::YuvDebugWindow(QWidget *parent) :
4 | QDialog(parent)
5 | {
6 | m_imageLabel = new QLabel(this);
7 | m_imageLabel->setText("Video will be shown here");
8 |
9 | QVBoxLayout *layout = new QVBoxLayout;
10 | layout->addWidget(m_imageLabel);
11 | setLayout(layout);
12 | }
13 |
--------------------------------------------------------------------------------
/src/qt_gl_gst/yuvdebugwindow.h:
--------------------------------------------------------------------------------
1 | #ifndef YUVDEBUGWINDOW_H
2 | #define YUVDEBUGWINDOW_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 |
9 | class YuvDebugWindow : public QDialog
10 | {
11 | Q_OBJECT
12 | public:
13 | explicit YuvDebugWindow(QWidget *parent = 0);
14 | QLabel *m_imageLabel;
15 |
16 | signals:
17 |
18 | public slots:
19 |
20 | };
21 |
22 | #endif // YUVDEBUGWINDOW_H
23 |
--------------------------------------------------------------------------------