├── .gitignore
├── LICENSE
├── Makefile
├── README.mdwn
├── rpi-camera-dump-yuv.c
├── rpi-camera-encode.c
├── rpi-camera-playback.c
└── rpi-encode-yuv.c
/.gitignore:
--------------------------------------------------------------------------------
1 | rpi-camera-dump-yuv
2 | rpi-camera-encode
3 | rpi-camera-playback
4 | rpi-encode-yuv
5 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Simple makefile for rpi-openmax-demos.
2 |
3 | PROGRAMS = rpi-camera-encode rpi-camera-dump-yuv rpi-encode-yuv rpi-camera-playback
4 | CC = gcc
5 | CFLAGS = -DSTANDALONE -D__STDC_CONSTANT_MACROS -D__STDC_LIMIT_MACROS -DTARGET_POSIX -D_LINUX -DPIC -D_REENTRANT -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64 -U_FORTIFY_SOURCE -DHAVE_LIBOPENMAX=2 -DOMX -DOMX_SKIP64BIT -ftree-vectorize -pipe -DUSE_EXTERNAL_OMX -DHAVE_LIBBCM_HOST -DUSE_EXTERNAL_LIBBCM_HOST -DUSE_VCHIQ_ARM \
6 | -I/opt/vc/include -I/opt/vc/include/interface/vcos/pthreads -I/opt/vc/include/interface/vmcs_host/linux \
7 | -fPIC -ftree-vectorize -pipe -Wall -Werror -O2 -g
8 | LDFLAGS = -L/opt/vc/lib -lopenmaxil
9 |
10 | all: $(PROGRAMS)
11 |
12 | clean:
13 | rm -f $(PROGRAMS)
14 |
15 | .PHONY: all clean
16 |
--------------------------------------------------------------------------------
/README.mdwn:
--------------------------------------------------------------------------------
1 | # OpenMAX IL demos for Raspberry Pi
2 |
3 | I wanted to develop a program that utilizes
4 | [the RaspiCam camera board for the Raspberry Pi](http://www.raspberrypi.org/archives/tag/camera).
5 | [The Raspberry Pi Foundation](http://www.raspberrypi.org/about) provides
6 | [RaspiVid command-line application](https://github.com/raspberrypi/userland/blob/master/host_applications/linux/apps/raspicam/RaspiVid.c)
7 | that can record video using the RaspiCam. However, this wasn't flexible enough
8 | for my needs so I decided to learn how one can utilize the RaspiCam
9 | programmatically from within one's own application code.
10 |
11 | Initially I had no idea what would be needed. I did my research and found out
12 | that you could use OpenMAX IL API to drive the RaspiCam and VideoCore hardware
13 | video encoder on the RaspberryPi to get H.264 encoded high-definition video out
14 | of the system in real time.
15 |
16 | So I spent countless of hours reading the OpenMAX IL API specification and some
17 | existing code using OpenMAX IL API (see [References] section below). However,
18 | it was all very difficult to understand due to the complexity of the spec and
19 | layering of the sample code. I wanted to go to the basics and write trivial,
20 | uncapsulated demo code in order to learn how things fundamentally worked.
21 |
22 | And here is the result. Because of the lack of simple sample code I decided to
23 | document, package and share my work. Maybe it helps other people in similar
24 | situation.
25 |
26 | ## Downloading
27 |
28 | OpenMAX IL demos for Raspberry Pi home page is at
29 | and it can be
30 | downloaded by cloning the public Git repository at
31 | `git://scm.solitudo.net/rpi-openmax-demos.git`.
32 | Gitweb interface is available at
33 | .
34 | The software is also available at [GitHub](https://github.com) page
35 | .
36 |
37 | ## Installation
38 |
39 | This code has been developed and tested on [Raspbian](http://www.raspbian.org)
40 | operating system running on Raspberry Pi Model B Rev. 1.0. You need the
41 | standard GCC compiler toolchain, GNU Make and Raspberry Pi system libraries
42 | development files installed, i.e. the following packages must be installed.
43 |
44 | gcc make libraspberrypi-dev
45 |
46 | The binaries can be then compiled by running `make` command in the cloned Git
47 | repository base directory. No special installation is required, you can just
48 | run the self-contained binaries directly from the source directory.
49 |
50 | ## Code structure
51 |
52 | This is not elegant or efficient code. It's aiming to be as simple as possible
53 | and each demo program is fully self-contained. That means hundreds of lines of
54 | duplicated helper and boiler-plate code in each demo program source code file.
55 | The relevant OpenMAX IL code is all sequentally placed inside a single main
56 | routine in each demo program in order to make it simple to follow what is
57 | happening. Error handling is dead simple - if something goes wrong, report the
58 | error and exit immediatelly. Other anti-patterns, such as busy waiting instead
59 | of proper signaling based control of the flow of execution, are also emloyed in
60 | the name of simplicity. Try not to be distracted by these flaws. This code is
61 | not for production usage but to show how things work in a simple way.
62 |
63 | The program flow in each demo program goes as described here.
64 |
65 | 1. Comment header with usage instructions
66 | 1. Hard-coded configuration parameters
67 | 1. General helper routines
68 | 1. Signal and event handler routines
69 | 1. Main routine implementing the program logic
70 | 1. Initialization
71 | 1. Component creation
72 | 1. Component configuration
73 | 1. Enable component execution
74 | 1. Tunneling of the subsequent components in the pipeline
75 | 1. Enabling of relevant component ports
76 | 1. Changing of the component states
77 | 1. Buffer allocation
78 | 1. Main program loop
79 | 1. Clean up and resource de-allocation
80 | 1. Flushing of the buffers
81 | 1. Disabling of relevant component ports
82 | 1. De-allocation of the buffers
83 | 1. Changing of the component states
84 | 1. De-allocation of the component handles
85 | 1. Program exit
86 |
87 | ## Usage
88 |
89 | This section describes each program included in this demo bundle. Common to
90 | each program is that any data is written to `stdout` and read from `stdin`.
91 | Quite verbose status messages are printed to `stderr`. There is no command-line
92 | switches. All configuration is hard-coded and can be found at the top of the
93 | each `.c` source code file. Execution of each program can be stopped by sending
94 | `INT`, `TERM` or `QUIT` signal to the process e.g. by pressing `Ctrl-C` when
95 | the program is running.
96 |
97 | ### rpi-camera-encode
98 |
99 | `rpi-camera-encode` records video using the RaspiCam module and encodes the
100 | stream using the VideoCore hardware encoder using H.264 codec. The raw H.264
101 | stream is emitted to `stdout`. In order to properly display the encoded video,
102 | it must be wrapped inside a container format, e.g.
103 | [Matroska](http://matroska.org/technical/specs/).
104 |
105 | The following exaple uses `mkvmerge` tool from the
106 | [MKVToolNix](http://www.bunkus.org/videotools/mkvtoolnix/) software package to
107 | create a Matroska video file from the recorded H.264 file and then play it using
108 | [omxplayer](https://github.com/huceke/omxplayer) (although omxplayer happens to
109 | deal also with the raw H.264 stream, but generally other players, such
110 | [avplay](http://libav.org/avplay.html), don't).
111 |
112 | $ ./rpi-camera-encode >test.h264
113 | # Press Ctrl-C to interrupt the recording...
114 | $ mkvmerge -o test.mkv test.h264
115 | $ omxplayer test.mkv
116 |
117 | `rpi-camera-encode` uses `camera`, `video_encode` and `null_sink` components.
118 | `camera` video output port is tunneled to `video_encode` input port and
119 | `camera` preview output port is tunneled to `null_sink` input port. H.264
120 | encoded video is read from the buffer of `video_encode` output port and dumped
121 | to `stdout`.
122 |
123 | ### rpi-camera-playback
124 |
125 | `rpi-camera-playback` records video using the RaspiCam module and displays it
126 | on the Raspberry Pi frame buffer display device, i.e. it should be run on the
127 | Raspbian console.
128 |
129 | $ ./rpi-camera-playback
130 |
131 | `rpi-camera-playback` uses `camera`, `video_render` and `null_sink` components.
132 | `camera` video output port is tunneled to `video_render` input port and
133 | `camera` preview output port is tunneled to `null_sink` input port.
134 | `video_render` component uses a display region to show the video on local
135 | display.
136 |
137 | ### rpi-camera-dump-yuv
138 |
139 | `rpi-camera-dump-yuv` records video using the RaspiCam module and dumps the raw
140 | YUV planar 4:2:0 ([I420](http://www.fourcc.org/yuv.php#IYUV)) data to `stdout`.
141 |
142 | $ ./rpi-camera-dump-yuv >test.yuv
143 |
144 | `rpi-camera-dump-yuv` uses `camera` and `null_sink` components. Uncompressed
145 | YUV planar 4:2:0 ([I420](http://www.fourcc.org/yuv.php#IYUV)) frame data is
146 | read from the buffer of `camera` video output port and dumped to stdout and
147 | `camera` preview output port is tunneled to `null_sink`.
148 |
149 | However, the camera is sending a frame divided into multiple buffers. Each
150 | buffer contains a slice of the Y, U, and V planes. This means that the plane
151 | data is fragmented if printed out just as is. Search for the definition of
152 | `OMX_COLOR_FormatYUV420PackedPlanar` in the OpenMAX IL specification for more
153 | details. Thus in order to produce valid I420 data to output file, you first
154 | have to save the received buffers until the whole frame has been delivered
155 | unpacking the plane slices in the process. Then the whole frame can be written
156 | to output file.
157 |
158 | ### rpi-encode-yuv
159 |
160 | `rpi-encode-yuv` reads YUV planar 4:2:0 ([I420](http://www.fourcc.org/yuv.php#IYUV))
161 | frame data from `stdin`, encodes the stream using the VideoCore hardware
162 | encoder using H.264 codec and emits the H.264 stream to `stdout`.
163 |
164 | $ ./rpi-encode-yuv test.h264
165 |
166 | `rpi-encode-yuv` uses the `video_encode` component. Uncompressed YUV 4:2:0
167 | ([I420](http://www.fourcc.org/yuv.php#IYUV)) frame data is read from `stdin`
168 | and passed to the buffer of input port of `video_encode`. H.264 encoded video
169 | is read from the buffer of `video_encode` output port and dumped to `stdout`.
170 |
171 | But similarly as described above in the [rpi-camera-dump-yuv] section, also
172 | `video_encode` component requires its buffers to be formatted in
173 | `OMX_COLOR_FormatYUV420PackedPlanar`. Thus we need to pack the I420 data to the
174 | desired format while reading from input file and writing to `video_encode`
175 | input buffer. Luckily no buffering is required here, you can just read the data
176 | for each of the Y, U, and V planes directly to the `video_encode` input buffer
177 | with proper alignment between the planes in the buffer.
178 |
179 | ## Bugs
180 |
181 | There's probably many bugs in component configuration and freeing of resources
182 | and also buffer management (things done in incorrect order etc.). Feel free to
183 | submit patches in order to clean things up.
184 |
185 | ## References
186 |
187 | The following resources on the Web were studied and found to be useful when
188 | this code was developed.
189 |
190 | 1. [OpenMAX™ Integration Layer Application Programming Interface Version 1.1.2](http://www.khronos.org/registry/omxil/specs/OpenMAX_IL_1_1_2_Specification.pdf). [The Khronos Group Inc.](http://www.khronos.org), 2008. URL .
191 | 1. [The OpenMAX Integration Layer standard](http://elinux.org/images/e/e0/The_OpenMAX_Integration_Layer_standard.pdf). Giulio Urlini ([Advanced System Technology](http://www.st.com)). URL .
192 | 1. [VMCS-X OpenMAX IL Components](https://github.com/raspberrypi/firmware/tree/master/documentation/ilcomponents). [The Raspberry Pi Foundation](http://www.raspberrypi.org/about). URL .
193 | 1. [RaspiCam Documentation](http://www.raspberrypi.org/wp-content/uploads/2013/07/RaspiCam-Documentation.pdf). [The Raspberry Pi Foundation](http://www.raspberrypi.org/about). URL
194 | 1. [Source code for the RaspiVid application](https://github.com/raspberrypi/userland/tree/master/host_applications/linux/apps/raspicam). [The Raspberry Pi Foundation](http://www.raspberrypi.org/about). URL .
195 | 1. [`hello_pi` sample application collection](https://github.com/raspberrypi/userland/tree/master/host_applications/linux/apps/hello_pi). [The Raspberry Pi Foundation](http://www.raspberrypi.org/about). URL .
196 | 1. [Source code for `pidvbip` tvheadend client for the Raspberry Pi](https://github.com/linuxstb/pidvbip). Dave Chapman, [Raspberry Pi client for tvheaden](http://pidvbip.org). URL .
197 |
198 | # Copyright and licensing
199 |
200 | Copyright © 2013 Tuomas Jormola
201 |
202 | Licensed under the Apache License, Version 2.0 (the "License");
203 | you may not use this file except in compliance with the License.
204 | You may obtain a copy of the License at
205 |
206 |
207 |
208 | Unless required by applicable law or agreed to in writing, software
209 | distributed under the License is distributed on an "AS IS" BASIS,
210 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
211 | See the License for the specific language governing permissions and
212 | limitations under the License.
213 |
--------------------------------------------------------------------------------
/rpi-camera-dump-yuv.c:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright © 2013 Tuomas Jormola
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | *
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *
16 | * Short intro about this program:
17 | *
18 | * `rpi-camera-dump-yuv` records video using the RaspiCam module and dumps the raw
19 | * YUV frame data to `stdout`.
20 | *
21 | * $ ./rpi-camera-dump-yuv >test.yuv
22 | *
23 | * `rpi-camera-dump-yuv` uses `camera` and `null_sink` components. Uncompressed
24 | * raw YUV frame data is read from the buffer of `camera` video output port and
25 | * dumped to stdout and `camera` preview output port is tunneled to `null_sink`
26 | * input port.
27 | *
28 | * Please see README.mdwn for more detailed description of this
29 | * OpenMAX IL demos for Raspberry Pi bundle.
30 | *
31 | */
32 |
33 | #include
34 | #include
35 | #include
36 | #include
37 | #include
38 | #include
39 |
40 | #include
41 |
42 | #include
43 | #include
44 |
45 | #include
46 | #include
47 | #include
48 | #include
49 |
50 | // Hard coded parameters
51 | #define VIDEO_WIDTH 1920 / 4
52 | #define VIDEO_HEIGHT 1080 / 4
53 | #define VIDEO_FRAMERATE 25
54 | #define CAM_DEVICE_NUMBER 0
55 | #define CAM_SHARPNESS 0 // -100 .. 100
56 | #define CAM_CONTRAST 0 // -100 .. 100
57 | #define CAM_BRIGHTNESS 50 // 0 .. 100
58 | #define CAM_SATURATION 0 // -100 .. 100
59 | #define CAM_EXPOSURE_VALUE_COMPENSTAION 0
60 | #define CAM_EXPOSURE_ISO_SENSITIVITY 100
61 | #define CAM_EXPOSURE_AUTO_SENSITIVITY OMX_FALSE
62 | #define CAM_FRAME_STABILISATION OMX_TRUE
63 | #define CAM_WHITE_BALANCE_CONTROL OMX_WhiteBalControlAuto // OMX_WHITEBALCONTROLTYPE
64 | #define CAM_IMAGE_FILTER OMX_ImageFilterNoise // OMX_IMAGEFILTERTYPE
65 | #define CAM_FLIP_HORIZONTAL OMX_FALSE
66 | #define CAM_FLIP_VERTICAL OMX_FALSE
67 |
68 | // Dunno where this is originally stolen from...
69 | #define OMX_INIT_STRUCTURE(a) \
70 | memset(&(a), 0, sizeof(a)); \
71 | (a).nSize = sizeof(a); \
72 | (a).nVersion.nVersion = OMX_VERSION; \
73 | (a).nVersion.s.nVersionMajor = OMX_VERSION_MAJOR; \
74 | (a).nVersion.s.nVersionMinor = OMX_VERSION_MINOR; \
75 | (a).nVersion.s.nRevision = OMX_VERSION_REVISION; \
76 | (a).nVersion.s.nStep = OMX_VERSION_STEP
77 |
78 | // Global variable used by the signal handler and capture loop
79 | static int want_quit = 0;
80 |
81 | // Our application context passed around
82 | // the main routine and callback handlers
83 | typedef struct {
84 | OMX_HANDLETYPE camera;
85 | OMX_BUFFERHEADERTYPE *camera_ppBuffer_in;
86 | OMX_BUFFERHEADERTYPE *camera_ppBuffer_out;
87 | int camera_ready;
88 | int camera_output_buffer_available;
89 | OMX_HANDLETYPE null_sink;
90 | int flushed;
91 | FILE *fd_out;
92 | VCOS_SEMAPHORE_T handler_lock;
93 | } appctx;
94 |
95 | // I420 frame stuff
96 | typedef struct {
97 | int width;
98 | int height;
99 | size_t size;
100 | int buf_stride;
101 | int buf_slice_height;
102 | int buf_extra_padding;
103 | int p_offset[3];
104 | int p_stride[3];
105 | } i420_frame_info;
106 |
107 | // Stolen from video-info.c of gstreamer-plugins-base
108 | #define ROUND_UP_2(num) (((num)+1)&~1)
109 | #define ROUND_UP_4(num) (((num)+3)&~3)
110 | static void get_i420_frame_info(int width, int height, int buf_stride, int buf_slice_height, i420_frame_info *info) {
111 | info->p_stride[0] = ROUND_UP_4(width);
112 | info->p_stride[1] = ROUND_UP_4(ROUND_UP_2(width) / 2);
113 | info->p_stride[2] = info->p_stride[1];
114 | info->p_offset[0] = 0;
115 | info->p_offset[1] = info->p_stride[0] * ROUND_UP_2(height);
116 | info->p_offset[2] = info->p_offset[1] + info->p_stride[1] * (ROUND_UP_2(height) / 2);
117 | info->size = info->p_offset[2] + info->p_stride[2] * (ROUND_UP_2(height) / 2);
118 | info->width = width;
119 | info->height = height;
120 | info->buf_stride = buf_stride;
121 | info->buf_slice_height = buf_slice_height;
122 | info->buf_extra_padding =
123 | buf_slice_height >= 0
124 | ? ((buf_slice_height && (height % buf_slice_height))
125 | ? (buf_slice_height - (height % buf_slice_height))
126 | : 0)
127 | : -1;
128 | }
129 |
130 | // Ugly, stupid utility functions
131 | static void say(const char* message, ...) {
132 | va_list args;
133 | char str[1024];
134 | memset(str, 0, sizeof(str));
135 | va_start(args, message);
136 | vsnprintf(str, sizeof(str) - 1, message, args);
137 | va_end(args);
138 | size_t str_len = strnlen(str, sizeof(str));
139 | if(str[str_len - 1] != '\n') {
140 | str[str_len] = '\n';
141 | }
142 | fprintf(stderr, str);
143 | }
144 |
145 | static void die(const char* message, ...) {
146 | va_list args;
147 | char str[1024];
148 | memset(str, 0, sizeof(str));
149 | va_start(args, message);
150 | vsnprintf(str, sizeof(str), message, args);
151 | va_end(args);
152 | say(str);
153 | exit(1);
154 | }
155 |
156 | static void omx_die(OMX_ERRORTYPE error, const char* message, ...) {
157 | va_list args;
158 | char str[1024];
159 | char *e;
160 | memset(str, 0, sizeof(str));
161 | va_start(args, message);
162 | vsnprintf(str, sizeof(str), message, args);
163 | va_end(args);
164 | switch(error) {
165 | case OMX_ErrorNone: e = "no error"; break;
166 | case OMX_ErrorBadParameter: e = "bad parameter"; break;
167 | case OMX_ErrorIncorrectStateOperation: e = "invalid state while trying to perform command"; break;
168 | case OMX_ErrorIncorrectStateTransition: e = "unallowed state transition"; break;
169 | case OMX_ErrorInsufficientResources: e = "insufficient resource"; break;
170 | case OMX_ErrorBadPortIndex: e = "bad port index, i.e. incorrect port"; break;
171 | case OMX_ErrorHardware: e = "hardware error"; break;
172 | /* That's all I've encountered during hacking so let's not bother with the rest... */
173 | default: e = "(no description)";
174 | }
175 | die("OMX error: %s: 0x%08x %s", str, error, e);
176 | }
177 |
178 | static void dump_frame_info(const char *message, const i420_frame_info *info) {
179 | say("%s frame info:\n"
180 | "\tWidth:\t\t\t%d\n"
181 | "\tHeight:\t\t\t%d\n"
182 | "\tSize:\t\t\t%d\n"
183 | "\tBuffer stride:\t\t%d\n"
184 | "\tBuffer slice height:\t%d\n"
185 | "\tBuffer extra padding:\t%d\n"
186 | "\tPlane strides:\t\tY:%d U:%d V:%d\n"
187 | "\tPlane offsets:\t\tY:%d U:%d V:%d\n",
188 | message,
189 | info->width, info->height, info->size, info->buf_stride, info->buf_slice_height, info->buf_extra_padding,
190 | info->p_stride[0], info->p_stride[1], info->p_stride[2],
191 | info->p_offset[0], info->p_offset[1], info->p_offset[2]);
192 | }
193 |
194 | static void dump_event(OMX_HANDLETYPE hComponent, OMX_EVENTTYPE eEvent, OMX_U32 nData1, OMX_U32 nData2) {
195 | char *e;
196 | switch(eEvent) {
197 | case OMX_EventCmdComplete: e = "command complete"; break;
198 | case OMX_EventError: e = "error"; break;
199 | case OMX_EventParamOrConfigChanged: e = "parameter or configuration changed"; break;
200 | case OMX_EventPortSettingsChanged: e = "port settings changed"; break;
201 | /* That's all I've encountered during hacking so let's not bother with the rest... */
202 | default:
203 | e = "(no description)";
204 | }
205 | say("Received event 0x%08x %s, hComponent:0x%08x, nData1:0x%08x, nData2:0x%08x",
206 | eEvent, e, hComponent, nData1, nData2);
207 | }
208 |
209 | static const char* dump_compression_format(OMX_VIDEO_CODINGTYPE c) {
210 | char *f;
211 | switch(c) {
212 | case OMX_VIDEO_CodingUnused: return "not used";
213 | case OMX_VIDEO_CodingAutoDetect: return "autodetect";
214 | case OMX_VIDEO_CodingMPEG2: return "MPEG2";
215 | case OMX_VIDEO_CodingH263: return "H.263";
216 | case OMX_VIDEO_CodingMPEG4: return "MPEG4";
217 | case OMX_VIDEO_CodingWMV: return "Windows Media Video";
218 | case OMX_VIDEO_CodingRV: return "RealVideo";
219 | case OMX_VIDEO_CodingAVC: return "H.264/AVC";
220 | case OMX_VIDEO_CodingMJPEG: return "Motion JPEG";
221 | case OMX_VIDEO_CodingVP6: return "VP6";
222 | case OMX_VIDEO_CodingVP7: return "VP7";
223 | case OMX_VIDEO_CodingVP8: return "VP8";
224 | case OMX_VIDEO_CodingYUV: return "Raw YUV video";
225 | case OMX_VIDEO_CodingSorenson: return "Sorenson";
226 | case OMX_VIDEO_CodingTheora: return "OGG Theora";
227 | case OMX_VIDEO_CodingMVC: return "H.264/MVC";
228 |
229 | default:
230 | f = calloc(23, sizeof(char));
231 | if(f == NULL) {
232 | die("Failed to allocate memory");
233 | }
234 | snprintf(f, 23 * sizeof(char) - 1, "format type 0x%08x", c);
235 | return f;
236 | }
237 | }
238 | static const char* dump_color_format(OMX_COLOR_FORMATTYPE c) {
239 | char *f;
240 | switch(c) {
241 | case OMX_COLOR_FormatUnused: return "OMX_COLOR_FormatUnused: not used";
242 | case OMX_COLOR_FormatMonochrome: return "OMX_COLOR_FormatMonochrome";
243 | case OMX_COLOR_Format8bitRGB332: return "OMX_COLOR_Format8bitRGB332";
244 | case OMX_COLOR_Format12bitRGB444: return "OMX_COLOR_Format12bitRGB444";
245 | case OMX_COLOR_Format16bitARGB4444: return "OMX_COLOR_Format16bitARGB4444";
246 | case OMX_COLOR_Format16bitARGB1555: return "OMX_COLOR_Format16bitARGB1555";
247 | case OMX_COLOR_Format16bitRGB565: return "OMX_COLOR_Format16bitRGB565";
248 | case OMX_COLOR_Format16bitBGR565: return "OMX_COLOR_Format16bitBGR565";
249 | case OMX_COLOR_Format18bitRGB666: return "OMX_COLOR_Format18bitRGB666";
250 | case OMX_COLOR_Format18bitARGB1665: return "OMX_COLOR_Format18bitARGB1665";
251 | case OMX_COLOR_Format19bitARGB1666: return "OMX_COLOR_Format19bitARGB1666";
252 | case OMX_COLOR_Format24bitRGB888: return "OMX_COLOR_Format24bitRGB888";
253 | case OMX_COLOR_Format24bitBGR888: return "OMX_COLOR_Format24bitBGR888";
254 | case OMX_COLOR_Format24bitARGB1887: return "OMX_COLOR_Format24bitARGB1887";
255 | case OMX_COLOR_Format25bitARGB1888: return "OMX_COLOR_Format25bitARGB1888";
256 | case OMX_COLOR_Format32bitBGRA8888: return "OMX_COLOR_Format32bitBGRA8888";
257 | case OMX_COLOR_Format32bitARGB8888: return "OMX_COLOR_Format32bitARGB8888";
258 | case OMX_COLOR_FormatYUV411Planar: return "OMX_COLOR_FormatYUV411Planar";
259 | case OMX_COLOR_FormatYUV411PackedPlanar: return "OMX_COLOR_FormatYUV411PackedPlanar: Planes fragmented when a frame is split in multiple buffers";
260 | case OMX_COLOR_FormatYUV420Planar: return "OMX_COLOR_FormatYUV420Planar: Planar YUV, 4:2:0 (I420)";
261 | case OMX_COLOR_FormatYUV420PackedPlanar: return "OMX_COLOR_FormatYUV420PackedPlanar: Planar YUV, 4:2:0 (I420), planes fragmented when a frame is split in multiple buffers";
262 | case OMX_COLOR_FormatYUV420SemiPlanar: return "OMX_COLOR_FormatYUV420SemiPlanar, Planar YUV, 4:2:0 (NV12), U and V planes interleaved with first U value";
263 | case OMX_COLOR_FormatYUV422Planar: return "OMX_COLOR_FormatYUV422Planar";
264 | case OMX_COLOR_FormatYUV422PackedPlanar: return "OMX_COLOR_FormatYUV422PackedPlanar: Planes fragmented when a frame is split in multiple buffers";
265 | case OMX_COLOR_FormatYUV422SemiPlanar: return "OMX_COLOR_FormatYUV422SemiPlanar";
266 | case OMX_COLOR_FormatYCbYCr: return "OMX_COLOR_FormatYCbYCr";
267 | case OMX_COLOR_FormatYCrYCb: return "OMX_COLOR_FormatYCrYCb";
268 | case OMX_COLOR_FormatCbYCrY: return "OMX_COLOR_FormatCbYCrY";
269 | case OMX_COLOR_FormatCrYCbY: return "OMX_COLOR_FormatCrYCbY";
270 | case OMX_COLOR_FormatYUV444Interleaved: return "OMX_COLOR_FormatYUV444Interleaved";
271 | case OMX_COLOR_FormatRawBayer8bit: return "OMX_COLOR_FormatRawBayer8bit";
272 | case OMX_COLOR_FormatRawBayer10bit: return "OMX_COLOR_FormatRawBayer10bit";
273 | case OMX_COLOR_FormatRawBayer8bitcompressed: return "OMX_COLOR_FormatRawBayer8bitcompressed";
274 | case OMX_COLOR_FormatL2: return "OMX_COLOR_FormatL2";
275 | case OMX_COLOR_FormatL4: return "OMX_COLOR_FormatL4";
276 | case OMX_COLOR_FormatL8: return "OMX_COLOR_FormatL8";
277 | case OMX_COLOR_FormatL16: return "OMX_COLOR_FormatL16";
278 | case OMX_COLOR_FormatL24: return "OMX_COLOR_FormatL24";
279 | case OMX_COLOR_FormatL32: return "OMX_COLOR_FormatL32";
280 | case OMX_COLOR_FormatYUV420PackedSemiPlanar: return "OMX_COLOR_FormatYUV420PackedSemiPlanar: Planar YUV, 4:2:0 (NV12), planes fragmented when a frame is split in multiple buffers, U and V planes interleaved with first U value";
281 | case OMX_COLOR_FormatYUV422PackedSemiPlanar: return "OMX_COLOR_FormatYUV422PackedSemiPlanar: Planes fragmented when a frame is split in multiple buffers";
282 | case OMX_COLOR_Format18BitBGR666: return "OMX_COLOR_Format18BitBGR666";
283 | case OMX_COLOR_Format24BitARGB6666: return "OMX_COLOR_Format24BitARGB6666";
284 | case OMX_COLOR_Format24BitABGR6666: return "OMX_COLOR_Format24BitABGR6666";
285 | case OMX_COLOR_Format32bitABGR8888: return "OMX_COLOR_Format32bitABGR8888";
286 | case OMX_COLOR_Format8bitPalette: return "OMX_COLOR_Format8bitPalette";
287 | case OMX_COLOR_FormatYUVUV128: return "OMX_COLOR_FormatYUVUV128";
288 | case OMX_COLOR_FormatRawBayer12bit: return "OMX_COLOR_FormatRawBayer12bit";
289 | case OMX_COLOR_FormatBRCMEGL: return "OMX_COLOR_FormatBRCMEGL";
290 | case OMX_COLOR_FormatBRCMOpaque: return "OMX_COLOR_FormatBRCMOpaque";
291 | case OMX_COLOR_FormatYVU420PackedPlanar: return "OMX_COLOR_FormatYVU420PackedPlanar";
292 | case OMX_COLOR_FormatYVU420PackedSemiPlanar: return "OMX_COLOR_FormatYVU420PackedSemiPlanar";
293 | default:
294 | f = calloc(23, sizeof(char));
295 | if(f == NULL) {
296 | die("Failed to allocate memory");
297 | }
298 | snprintf(f, 23 * sizeof(char) - 1, "format type 0x%08x", c);
299 | return f;
300 | }
301 | }
302 |
303 | static void dump_portdef(OMX_PARAM_PORTDEFINITIONTYPE* portdef) {
304 | say("Port %d is %s, %s, buffers wants:%d needs:%d, size:%d, pop:%d, aligned:%d",
305 | portdef->nPortIndex,
306 | (portdef->eDir == OMX_DirInput ? "input" : "output"),
307 | (portdef->bEnabled == OMX_TRUE ? "enabled" : "disabled"),
308 | portdef->nBufferCountActual,
309 | portdef->nBufferCountMin,
310 | portdef->nBufferSize,
311 | portdef->bPopulated,
312 | portdef->nBufferAlignment);
313 |
314 | OMX_VIDEO_PORTDEFINITIONTYPE *viddef = &portdef->format.video;
315 | OMX_IMAGE_PORTDEFINITIONTYPE *imgdef = &portdef->format.image;
316 | switch(portdef->eDomain) {
317 | case OMX_PortDomainVideo:
318 | say("Video type:\n"
319 | "\tWidth:\t\t%d\n"
320 | "\tHeight:\t\t%d\n"
321 | "\tStride:\t\t%d\n"
322 | "\tSliceHeight:\t%d\n"
323 | "\tBitrate:\t%d\n"
324 | "\tFramerate:\t%.02f\n"
325 | "\tError hiding:\t%s\n"
326 | "\tCodec:\t\t%s\n"
327 | "\tColor:\t\t%s\n",
328 | viddef->nFrameWidth,
329 | viddef->nFrameHeight,
330 | viddef->nStride,
331 | viddef->nSliceHeight,
332 | viddef->nBitrate,
333 | ((float)viddef->xFramerate / (float)65536),
334 | (viddef->bFlagErrorConcealment == OMX_TRUE ? "yes" : "no"),
335 | dump_compression_format(viddef->eCompressionFormat),
336 | dump_color_format(viddef->eColorFormat));
337 | break;
338 | case OMX_PortDomainImage:
339 | say("Image type:\n"
340 | "\tWidth:\t\t%d\n"
341 | "\tHeight:\t\t%d\n"
342 | "\tStride:\t\t%d\n"
343 | "\tSliceHeight:\t%d\n"
344 | "\tError hiding:\t%s\n"
345 | "\tCodec:\t\t%s\n"
346 | "\tColor:\t\t%s\n",
347 | imgdef->nFrameWidth,
348 | imgdef->nFrameHeight,
349 | imgdef->nStride,
350 | imgdef->nSliceHeight,
351 | (imgdef->bFlagErrorConcealment == OMX_TRUE ? "yes" : "no"),
352 | dump_compression_format(imgdef->eCompressionFormat),
353 | dump_color_format(imgdef->eColorFormat));
354 | break;
355 | default:
356 | break;
357 | }
358 | }
359 |
360 | static void dump_port(OMX_HANDLETYPE hComponent, OMX_U32 nPortIndex, OMX_BOOL dumpformats) {
361 | OMX_ERRORTYPE r;
362 | OMX_PARAM_PORTDEFINITIONTYPE portdef;
363 | OMX_INIT_STRUCTURE(portdef);
364 | portdef.nPortIndex = nPortIndex;
365 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamPortDefinition, &portdef)) != OMX_ErrorNone) {
366 | omx_die(r, "Failed to get port definition for port %d", nPortIndex);
367 | }
368 | dump_portdef(&portdef);
369 | if(dumpformats) {
370 | OMX_VIDEO_PARAM_PORTFORMATTYPE portformat;
371 | OMX_INIT_STRUCTURE(portformat);
372 | portformat.nPortIndex = nPortIndex;
373 | portformat.nIndex = 0;
374 | r = OMX_ErrorNone;
375 | say("Port %d supports these video formats:", nPortIndex);
376 | while(r == OMX_ErrorNone) {
377 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamVideoPortFormat, &portformat)) == OMX_ErrorNone) {
378 | say("\t%s, compression: %s", dump_color_format(portformat.eColorFormat), dump_compression_format(portformat.eCompressionFormat));
379 | portformat.nIndex++;
380 | }
381 | }
382 | }
383 | }
384 |
385 | // Some busy loops to verify we're running in order
386 | static void block_until_state_changed(OMX_HANDLETYPE hComponent, OMX_STATETYPE wanted_eState) {
387 | OMX_STATETYPE eState;
388 | int i = 0;
389 | while(i++ == 0 || eState != wanted_eState) {
390 | OMX_GetState(hComponent, &eState);
391 | if(eState != wanted_eState) {
392 | usleep(10000);
393 | }
394 | }
395 | }
396 |
397 | static void block_until_port_changed(OMX_HANDLETYPE hComponent, OMX_U32 nPortIndex, OMX_BOOL bEnabled) {
398 | OMX_ERRORTYPE r;
399 | OMX_PARAM_PORTDEFINITIONTYPE portdef;
400 | OMX_INIT_STRUCTURE(portdef);
401 | portdef.nPortIndex = nPortIndex;
402 | OMX_U32 i = 0;
403 | while(i++ == 0 || portdef.bEnabled != bEnabled) {
404 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamPortDefinition, &portdef)) != OMX_ErrorNone) {
405 | omx_die(r, "Failed to get port definition");
406 | }
407 | if(portdef.bEnabled != bEnabled) {
408 | usleep(10000);
409 | }
410 | }
411 | }
412 |
413 | static void block_until_flushed(appctx *ctx) {
414 | int quit;
415 | while(!quit) {
416 | vcos_semaphore_wait(&ctx->handler_lock);
417 | if(ctx->flushed) {
418 | ctx->flushed = 0;
419 | quit = 1;
420 | }
421 | vcos_semaphore_post(&ctx->handler_lock);
422 | if(!quit) {
423 | usleep(10000);
424 | }
425 | }
426 | }
427 |
428 | static void init_component_handle(
429 | const char *name,
430 | OMX_HANDLETYPE* hComponent,
431 | OMX_PTR pAppData,
432 | OMX_CALLBACKTYPE* callbacks) {
433 | OMX_ERRORTYPE r;
434 | char fullname[32];
435 |
436 | // Get handle
437 | memset(fullname, 0, sizeof(fullname));
438 | strcat(fullname, "OMX.broadcom.");
439 | strncat(fullname, name, strlen(fullname) - 1);
440 | say("Initializing component %s", fullname);
441 | if((r = OMX_GetHandle(hComponent, fullname, pAppData, callbacks)) != OMX_ErrorNone) {
442 | omx_die(r, "Failed to get handle for component %s", fullname);
443 | }
444 |
445 | // Disable ports
446 | OMX_INDEXTYPE types[] = {
447 | OMX_IndexParamAudioInit,
448 | OMX_IndexParamVideoInit,
449 | OMX_IndexParamImageInit,
450 | OMX_IndexParamOtherInit
451 | };
452 | OMX_PORT_PARAM_TYPE ports;
453 | OMX_INIT_STRUCTURE(ports);
454 | OMX_GetParameter(*hComponent, OMX_IndexParamVideoInit, &ports);
455 |
456 | int i;
457 | for(i = 0; i < 4; i++) {
458 | if(OMX_GetParameter(*hComponent, types[i], &ports) == OMX_ErrorNone) {
459 | OMX_U32 nPortIndex;
460 | for(nPortIndex = ports.nStartPortNumber; nPortIndex < ports.nStartPortNumber + ports.nPorts; nPortIndex++) {
461 | say("Disabling port %d of component %s", nPortIndex, fullname);
462 | if((r = OMX_SendCommand(*hComponent, OMX_CommandPortDisable, nPortIndex, NULL)) != OMX_ErrorNone) {
463 | omx_die(r, "Failed to disable port %d of component %s", nPortIndex, fullname);
464 | }
465 | block_until_port_changed(*hComponent, nPortIndex, OMX_FALSE);
466 | }
467 | }
468 | }
469 | }
470 |
471 | // Global signal handler for trapping SIGINT, SIGTERM, and SIGQUIT
472 | static void signal_handler(int signal) {
473 | want_quit = 1;
474 | }
475 |
476 | // OMX calls this handler for all the events it emits
477 | static OMX_ERRORTYPE event_handler(
478 | OMX_HANDLETYPE hComponent,
479 | OMX_PTR pAppData,
480 | OMX_EVENTTYPE eEvent,
481 | OMX_U32 nData1,
482 | OMX_U32 nData2,
483 | OMX_PTR pEventData) {
484 |
485 | dump_event(hComponent, eEvent, nData1, nData2);
486 |
487 | appctx *ctx = (appctx *)pAppData;
488 |
489 | switch(eEvent) {
490 | case OMX_EventCmdComplete:
491 | vcos_semaphore_wait(&ctx->handler_lock);
492 | if(nData1 == OMX_CommandFlush) {
493 | ctx->flushed = 1;
494 | }
495 | vcos_semaphore_post(&ctx->handler_lock);
496 | break;
497 | case OMX_EventParamOrConfigChanged:
498 | vcos_semaphore_wait(&ctx->handler_lock);
499 | if(nData2 == OMX_IndexParamCameraDeviceNumber) {
500 | ctx->camera_ready = 1;
501 | }
502 | vcos_semaphore_post(&ctx->handler_lock);
503 | break;
504 | case OMX_EventError:
505 | omx_die(nData1, "error event received");
506 | break;
507 | default:
508 | break;
509 | }
510 |
511 | return OMX_ErrorNone;
512 | }
513 |
514 | // Called by OMX when the camera component has filled
515 | // the output buffer with captured video data
516 | static OMX_ERRORTYPE fill_output_buffer_done_handler(
517 | OMX_HANDLETYPE hComponent,
518 | OMX_PTR pAppData,
519 | OMX_BUFFERHEADERTYPE* pBuffer) {
520 | appctx *ctx = ((appctx*)pAppData);
521 | vcos_semaphore_wait(&ctx->handler_lock);
522 | // The main loop can now flush the buffer to output file
523 | ctx->camera_output_buffer_available = 1;
524 | vcos_semaphore_post(&ctx->handler_lock);
525 | return OMX_ErrorNone;
526 | }
527 |
528 | int main(int argc, char **argv) {
529 | bcm_host_init();
530 |
531 | OMX_ERRORTYPE r;
532 |
533 | if((r = OMX_Init()) != OMX_ErrorNone) {
534 | omx_die(r, "OMX initalization failed");
535 | }
536 |
537 | // Init context
538 | appctx ctx;
539 | memset(&ctx, 0, sizeof(ctx));
540 | if(vcos_semaphore_create(&ctx.handler_lock, "handler_lock", 1) != VCOS_SUCCESS) {
541 | die("Failed to create handler lock semaphore");
542 | }
543 |
544 | // Init component handles
545 | OMX_CALLBACKTYPE callbacks;
546 | memset(&ctx, 0, sizeof(callbacks));
547 | callbacks.EventHandler = event_handler;
548 | callbacks.FillBufferDone = fill_output_buffer_done_handler;
549 |
550 | init_component_handle("camera", &ctx.camera , &ctx, &callbacks);
551 | init_component_handle("null_sink", &ctx.null_sink, &ctx, &callbacks);
552 |
553 | say("Configuring camera...");
554 |
555 | say("Default port definition for camera input port 73");
556 | dump_port(ctx.camera, 73, OMX_TRUE);
557 | say("Default port definition for camera preview output port 70");
558 | dump_port(ctx.camera, 70, OMX_TRUE);
559 | say("Default port definition for camera video output port 71");
560 | dump_port(ctx.camera, 71, OMX_TRUE);
561 |
562 | // Request a callback to be made when OMX_IndexParamCameraDeviceNumber is
563 | // changed signaling that the camera device is ready for use.
564 | OMX_CONFIG_REQUESTCALLBACKTYPE cbtype;
565 | OMX_INIT_STRUCTURE(cbtype);
566 | cbtype.nPortIndex = OMX_ALL;
567 | cbtype.nIndex = OMX_IndexParamCameraDeviceNumber;
568 | cbtype.bEnable = OMX_TRUE;
569 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigRequestCallback, &cbtype)) != OMX_ErrorNone) {
570 | omx_die(r, "Failed to request camera device number parameter change callback for camera");
571 | }
572 | // Set device number, this triggers the callback configured just above
573 | OMX_PARAM_U32TYPE device;
574 | OMX_INIT_STRUCTURE(device);
575 | device.nPortIndex = OMX_ALL;
576 | device.nU32 = CAM_DEVICE_NUMBER;
577 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexParamCameraDeviceNumber, &device)) != OMX_ErrorNone) {
578 | omx_die(r, "Failed to set camera parameter device number");
579 | }
580 | // Configure video format emitted by camera preview output port
581 | OMX_PARAM_PORTDEFINITIONTYPE camera_portdef;
582 | OMX_INIT_STRUCTURE(camera_portdef);
583 | camera_portdef.nPortIndex = 70;
584 | if((r = OMX_GetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
585 | omx_die(r, "Failed to get port definition for camera preview output port 70");
586 | }
587 | camera_portdef.format.video.nFrameWidth = VIDEO_WIDTH;
588 | camera_portdef.format.video.nFrameHeight = VIDEO_HEIGHT;
589 | camera_portdef.format.video.xFramerate = VIDEO_FRAMERATE << 16;
590 | // Stolen from gstomxvideodec.c of gst-omx
591 | camera_portdef.format.video.nStride = (camera_portdef.format.video.nFrameWidth + camera_portdef.nBufferAlignment - 1) & (~(camera_portdef.nBufferAlignment - 1));
592 | camera_portdef.format.video.eColorFormat = OMX_COLOR_FormatYUV420PackedPlanar;
593 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
594 | omx_die(r, "Failed to set port definition for camera preview output port 70");
595 | }
596 | // Configure video format emitted by camera video output port
597 | // Use configuration from camera preview output as basis for
598 | // camera video output configuration
599 | OMX_INIT_STRUCTURE(camera_portdef);
600 | camera_portdef.nPortIndex = 70;
601 | if((r = OMX_GetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
602 | omx_die(r, "Failed to get port definition for camera preview output port 70");
603 | }
604 | camera_portdef.nPortIndex = 71;
605 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
606 | omx_die(r, "Failed to set port definition for camera video output port 71");
607 | }
608 | // Configure frame rate
609 | OMX_CONFIG_FRAMERATETYPE framerate;
610 | OMX_INIT_STRUCTURE(framerate);
611 | framerate.nPortIndex = 70;
612 | framerate.xEncodeFramerate = camera_portdef.format.video.xFramerate;
613 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigVideoFramerate, &framerate)) != OMX_ErrorNone) {
614 | omx_die(r, "Failed to set framerate configuration for camera preview output port 70");
615 | }
616 | framerate.nPortIndex = 71;
617 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigVideoFramerate, &framerate)) != OMX_ErrorNone) {
618 | omx_die(r, "Failed to set framerate configuration for camera video output port 71");
619 | }
620 | // Configure sharpness
621 | OMX_CONFIG_SHARPNESSTYPE sharpness;
622 | OMX_INIT_STRUCTURE(sharpness);
623 | sharpness.nPortIndex = OMX_ALL;
624 | sharpness.nSharpness = CAM_SHARPNESS;
625 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonSharpness, &sharpness)) != OMX_ErrorNone) {
626 | omx_die(r, "Failed to set camera sharpness configuration");
627 | }
628 | // Configure contrast
629 | OMX_CONFIG_CONTRASTTYPE contrast;
630 | OMX_INIT_STRUCTURE(contrast);
631 | contrast.nPortIndex = OMX_ALL;
632 | contrast.nContrast = CAM_CONTRAST;
633 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonContrast, &contrast)) != OMX_ErrorNone) {
634 | omx_die(r, "Failed to set camera contrast configuration");
635 | }
636 | // Configure saturation
637 | OMX_CONFIG_SATURATIONTYPE saturation;
638 | OMX_INIT_STRUCTURE(saturation);
639 | saturation.nPortIndex = OMX_ALL;
640 | saturation.nSaturation = CAM_SATURATION;
641 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonSaturation, &saturation)) != OMX_ErrorNone) {
642 | omx_die(r, "Failed to set camera saturation configuration");
643 | }
644 | // Configure brightness
645 | OMX_CONFIG_BRIGHTNESSTYPE brightness;
646 | OMX_INIT_STRUCTURE(brightness);
647 | brightness.nPortIndex = OMX_ALL;
648 | brightness.nBrightness = CAM_BRIGHTNESS;
649 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonBrightness, &brightness)) != OMX_ErrorNone) {
650 | omx_die(r, "Failed to set camera brightness configuration");
651 | }
652 | // Configure exposure value
653 | OMX_CONFIG_EXPOSUREVALUETYPE exposure_value;
654 | OMX_INIT_STRUCTURE(exposure_value);
655 | exposure_value.nPortIndex = OMX_ALL;
656 | exposure_value.xEVCompensation = CAM_EXPOSURE_VALUE_COMPENSTAION;
657 | exposure_value.bAutoSensitivity = CAM_EXPOSURE_AUTO_SENSITIVITY;
658 | exposure_value.nSensitivity = CAM_EXPOSURE_ISO_SENSITIVITY;
659 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonExposureValue, &exposure_value)) != OMX_ErrorNone) {
660 | omx_die(r, "Failed to set camera exposure value configuration");
661 | }
662 | // Configure frame frame stabilisation
663 | OMX_CONFIG_FRAMESTABTYPE frame_stabilisation_control;
664 | OMX_INIT_STRUCTURE(frame_stabilisation_control);
665 | frame_stabilisation_control.nPortIndex = OMX_ALL;
666 | frame_stabilisation_control.bStab = CAM_FRAME_STABILISATION;
667 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonFrameStabilisation, &frame_stabilisation_control)) != OMX_ErrorNone) {
668 | omx_die(r, "Failed to set camera frame frame stabilisation control configuration");
669 | }
670 | // Configure frame white balance control
671 | OMX_CONFIG_WHITEBALCONTROLTYPE white_balance_control;
672 | OMX_INIT_STRUCTURE(white_balance_control);
673 | white_balance_control.nPortIndex = OMX_ALL;
674 | white_balance_control.eWhiteBalControl = CAM_WHITE_BALANCE_CONTROL;
675 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonWhiteBalance, &white_balance_control)) != OMX_ErrorNone) {
676 | omx_die(r, "Failed to set camera frame white balance control configuration");
677 | }
678 | // Configure image filter
679 | OMX_CONFIG_IMAGEFILTERTYPE image_filter;
680 | OMX_INIT_STRUCTURE(image_filter);
681 | image_filter.nPortIndex = OMX_ALL;
682 | image_filter.eImageFilter = CAM_IMAGE_FILTER;
683 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonImageFilter, &image_filter)) != OMX_ErrorNone) {
684 | omx_die(r, "Failed to set camera image filter configuration");
685 | }
686 | // Configure mirror
687 | OMX_MIRRORTYPE eMirror = OMX_MirrorNone;
688 | if(CAM_FLIP_HORIZONTAL && !CAM_FLIP_VERTICAL) {
689 | eMirror = OMX_MirrorHorizontal;
690 | } else if(!CAM_FLIP_HORIZONTAL && CAM_FLIP_VERTICAL) {
691 | eMirror = OMX_MirrorVertical;
692 | } else if(CAM_FLIP_HORIZONTAL && CAM_FLIP_VERTICAL) {
693 | eMirror = OMX_MirrorBoth;
694 | }
695 | OMX_CONFIG_MIRRORTYPE mirror;
696 | OMX_INIT_STRUCTURE(mirror);
697 | mirror.nPortIndex = 71;
698 | mirror.eMirror = eMirror;
699 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonMirror, &mirror)) != OMX_ErrorNone) {
700 | omx_die(r, "Failed to set mirror configuration for camera video output port 71");
701 | }
702 |
703 | // Ensure camera is ready
704 | while(!ctx.camera_ready) {
705 | usleep(10000);
706 | }
707 |
708 | say("Configuring null sink...");
709 |
710 | say("Default port definition for null sink input port 240");
711 | dump_port(ctx.null_sink, 240, OMX_TRUE);
712 |
713 | // Null sink input port definition is done automatically upon tunneling
714 |
715 | // Tunnel camera preview output port and null sink input port
716 | say("Setting up tunnel from camera preview output port 70 to null sink input port 240...");
717 | if((r = OMX_SetupTunnel(ctx.camera, 70, ctx.null_sink, 240)) != OMX_ErrorNone) {
718 | omx_die(r, "Failed to setup tunnel between camera preview output port 70 and null sink input port 240");
719 | }
720 |
721 | // Switch components to idle state
722 | say("Switching state of the camera component to idle...");
723 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
724 | omx_die(r, "Failed to switch state of the camera component to idle");
725 | }
726 | block_until_state_changed(ctx.camera, OMX_StateIdle);
727 | say("Switching state of the null sink component to idle...");
728 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
729 | omx_die(r, "Failed to switch state of the null sink component to idle");
730 | }
731 | block_until_state_changed(ctx.null_sink, OMX_StateIdle);
732 |
733 | // Enable ports
734 | say("Enabling ports...");
735 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortEnable, 73, NULL)) != OMX_ErrorNone) {
736 | omx_die(r, "Failed to enable camera input port 73");
737 | }
738 | block_until_port_changed(ctx.camera, 73, OMX_TRUE);
739 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortEnable, 70, NULL)) != OMX_ErrorNone) {
740 | omx_die(r, "Failed to enable camera preview output port 70");
741 | }
742 | block_until_port_changed(ctx.camera, 70, OMX_TRUE);
743 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortEnable, 71, NULL)) != OMX_ErrorNone) {
744 | omx_die(r, "Failed to enable camera video output port 71");
745 | }
746 | block_until_port_changed(ctx.camera, 71, OMX_TRUE);
747 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandPortEnable, 240, NULL)) != OMX_ErrorNone) {
748 | omx_die(r, "Failed to enable null sink input port 240");
749 | }
750 | block_until_port_changed(ctx.null_sink, 240, OMX_TRUE);
751 |
752 | // Allocate camera input and video output buffers,
753 | // buffers for tunneled ports are allocated internally by OMX
754 | say("Allocating buffers...");
755 | OMX_INIT_STRUCTURE(camera_portdef);
756 | camera_portdef.nPortIndex = 73;
757 | if((r = OMX_GetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
758 | omx_die(r, "Failed to get port definition for camera input port 73");
759 | }
760 | if((r = OMX_AllocateBuffer(ctx.camera, &ctx.camera_ppBuffer_in, 73, NULL, camera_portdef.nBufferSize)) != OMX_ErrorNone) {
761 | omx_die(r, "Failed to allocate buffer for camera input port 73");
762 | }
763 | camera_portdef.nPortIndex = 71;
764 | if((r = OMX_GetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
765 | omx_die(r, "Failed to get port definition for camera vіdeo output port 71");
766 | }
767 | if((r = OMX_AllocateBuffer(ctx.camera, &ctx.camera_ppBuffer_out, 71, NULL, camera_portdef.nBufferSize)) != OMX_ErrorNone) {
768 | omx_die(r, "Failed to allocate buffer for camera video output port 71");
769 | }
770 |
771 | // Just use stdout for output
772 | say("Opening input and output files...");
773 | ctx.fd_out = stdout;
774 |
775 | // Switch state of the components prior to starting
776 | // the video capture loop
777 | say("Switching state of the camera component to executing...");
778 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateExecuting, NULL)) != OMX_ErrorNone) {
779 | omx_die(r, "Failed to switch state of the camera component to executing");
780 | }
781 | block_until_state_changed(ctx.camera, OMX_StateExecuting);
782 | say("Switching state of the null sink component to executing...");
783 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateExecuting, NULL)) != OMX_ErrorNone) {
784 | omx_die(r, "Failed to switch state of the null sink component to executing");
785 | }
786 | block_until_state_changed(ctx.null_sink, OMX_StateExecuting);
787 |
788 | // Start capturing video with the camera
789 | say("Switching on capture on camera video output port 71...");
790 | OMX_CONFIG_PORTBOOLEANTYPE capture;
791 | OMX_INIT_STRUCTURE(capture);
792 | capture.nPortIndex = 71;
793 | capture.bEnabled = OMX_TRUE;
794 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexConfigPortCapturing, &capture)) != OMX_ErrorNone) {
795 | omx_die(r, "Failed to switch on capture on camera video output port 71");
796 | }
797 |
798 | say("Configured port definition for camera input port 73");
799 | dump_port(ctx.camera, 73, OMX_FALSE);
800 | say("Configured port definition for camera preview output port 70");
801 | dump_port(ctx.camera, 70, OMX_FALSE);
802 | say("Configured port definition for camera video output port 71");
803 | dump_port(ctx.camera, 71, OMX_FALSE);
804 | say("Configured port definition for null sink input port 240");
805 | dump_port(ctx.null_sink, 240, OMX_FALSE);
806 |
807 | i420_frame_info frame_info, buf_info;
808 | get_i420_frame_info(camera_portdef.format.image.nFrameWidth, camera_portdef.format.image.nFrameHeight, camera_portdef.format.image.nStride, camera_portdef.format.video.nSliceHeight, &frame_info);
809 | get_i420_frame_info(frame_info.buf_stride, frame_info.buf_slice_height, -1, -1, &buf_info);
810 | dump_frame_info("Destination frame", &frame_info);
811 | dump_frame_info("Source buffer", &buf_info);
812 |
813 | // Buffer representing an I420 frame where to unpack
814 | // the fragmented Y, U, and V plane spans from the OMX buffers
815 | char *frame = calloc(1, frame_info.size);
816 | if(frame == NULL) {
817 | die("Failed to allocate frame buffer");
818 | }
819 |
820 | // Some counters
821 | int frame_num = 1, buf_num = 0;
822 | size_t output_written, frame_bytes = 0, buf_size, buf_bytes_read = 0, buf_bytes_copied;
823 | int i;
824 | // I420 spec: U and V plane span size half of the size of the Y plane span size
825 | int max_spans_y = buf_info.height, max_spans_uv = max_spans_y / 2;
826 | int valid_spans_y, valid_spans_uv;
827 | // For unpack memory copy operation
828 | unsigned char *buf_start;
829 | int max_spans, valid_spans;
830 | int dst_offset, src_offset, span_size;
831 | // For controlling the loop
832 | int quit_detected = 0, quit_in_frame_boundry = 0, need_next_buffer_to_be_filled = 1;
833 |
834 | say("Enter capture loop, press Ctrl-C to quit...");
835 |
836 | signal(SIGINT, signal_handler);
837 | signal(SIGTERM, signal_handler);
838 | signal(SIGQUIT, signal_handler);
839 |
840 | while(1) {
841 | // fill_output_buffer_done_handler() has marked that there's
842 | // a buffer for us to flush
843 | if(ctx.camera_output_buffer_available) {
844 | // Print a message if the user wants to quit, but don't exit
845 | // the loop until we are certain that we have processed
846 | // a full frame till end of the frame. This way we should always
847 | // avoid corruption of the last encoded at the expense of
848 | // small delay in exiting.
849 | if(want_quit && !quit_detected) {
850 | say("Exit signal detected, waiting for next frame boundry before exiting...");
851 | quit_detected = 1;
852 | quit_in_frame_boundry = ctx.camera_ppBuffer_out->nFlags & OMX_BUFFERFLAG_ENDOFFRAME;
853 | }
854 | if(quit_detected &&
855 | (quit_in_frame_boundry ^
856 | (ctx.camera_ppBuffer_out->nFlags & OMX_BUFFERFLAG_ENDOFFRAME))) {
857 | say("Frame boundry reached, exiting loop...");
858 | break;
859 | }
860 | // Start of the OMX buffer data
861 | buf_start = ctx.camera_ppBuffer_out->pBuffer
862 | + ctx.camera_ppBuffer_out->nOffset;
863 | // Size of the OMX buffer data;
864 | buf_size = ctx.camera_ppBuffer_out->nFilledLen;
865 | buf_bytes_read += buf_size;
866 | buf_bytes_copied = 0;
867 | // Detect the possibly non-full buffer in the last buffer of a frame
868 | valid_spans_y = max_spans_y
869 | - ((ctx.camera_ppBuffer_out->nFlags & OMX_BUFFERFLAG_ENDOFFRAME)
870 | ? frame_info.buf_extra_padding
871 | : 0);
872 | // I420 spec: U and V plane span size half of the size of the Y plane span size
873 | valid_spans_uv = valid_spans_y / 2;
874 | // Unpack Y, U, and V plane spans from the buffer to the I420 frame
875 | for(i = 0; i < 3; i++) {
876 | // Number of maximum and valid spans for this plane
877 | max_spans = (i == 0 ? max_spans_y : max_spans_uv);
878 | valid_spans = (i == 0 ? valid_spans_y : valid_spans_uv);
879 | dst_offset =
880 | // Start of the plane span in the I420 frame
881 | frame_info.p_offset[i] +
882 | // Plane spans copied from the previous buffers
883 | (buf_num * frame_info.p_stride[i] * max_spans);
884 | src_offset =
885 | // Start of the plane span in the buffer
886 | buf_info.p_offset[i];
887 | span_size =
888 | // Plane span size multiplied by the available spans in the buffer
889 | frame_info.p_stride[i] * valid_spans;
890 | memcpy(
891 | // Destination starts from the beginning of the frame and move forward by offset
892 | frame + dst_offset,
893 | // Source starts from the beginning of the OMX component buffer and move forward by offset
894 | buf_start + src_offset,
895 | // The final plane span size, possible padding at the end of
896 | // the plane span section in the buffer isn't included
897 | // since the size is based on the final frame plane span size
898 | span_size);
899 | buf_bytes_copied += span_size;
900 | }
901 | frame_bytes += buf_bytes_copied;
902 | buf_num++;
903 | say("Read %d bytes from buffer %d of frame %d, copied %d bytes from %d Y spans and %d U/V spans available",
904 | buf_size, buf_num, frame_num, buf_bytes_copied, valid_spans_y, valid_spans_uv);
905 | if(ctx.camera_ppBuffer_out->nFlags & OMX_BUFFERFLAG_ENDOFFRAME) {
906 | // Dump the complete I420 frame
907 | say("Captured frame %d, %d packed bytes read, %d bytes unpacked, writing %d unpacked frame bytes",
908 | frame_num, buf_bytes_read, frame_bytes, frame_info.size);
909 | if(frame_bytes != frame_info.size) {
910 | die("Frame bytes read %d doesn't match the frame size %d",
911 | frame_bytes, frame_info.size);
912 | }
913 | output_written = fwrite(frame, 1, frame_info.size, ctx.fd_out);
914 | if(output_written != frame_info.size) {
915 | die("Failed to write to output file: Requested to write %d bytes, but only %d bytes written: %s",
916 | frame_info.size, output_written, strerror(errno));
917 | }
918 | frame_num++;
919 | buf_num = 0;
920 | buf_bytes_read = 0;
921 | frame_bytes = 0;
922 | memset(frame, 0, frame_info.size);
923 | }
924 | need_next_buffer_to_be_filled = 1;
925 | }
926 | // Buffer flushed, request a new buffer to be filled by the camera component
927 | if(need_next_buffer_to_be_filled) {
928 | need_next_buffer_to_be_filled = 0;
929 | ctx.camera_output_buffer_available = 0;
930 | if((r = OMX_FillThisBuffer(ctx.camera, ctx.camera_ppBuffer_out)) != OMX_ErrorNone) {
931 | omx_die(r, "Failed to request filling of the output buffer on camera video output port 71");
932 | }
933 | }
934 | // Would be better to use signaling here but hey this works too
935 | usleep(10);
936 | }
937 | say("Cleaning up...");
938 |
939 | // Restore signal handlers
940 | signal(SIGINT, SIG_DFL);
941 | signal(SIGTERM, SIG_DFL);
942 | signal(SIGQUIT, SIG_DFL);
943 |
944 | // Stop capturing video with the camera
945 | OMX_INIT_STRUCTURE(capture);
946 | capture.nPortIndex = 71;
947 | capture.bEnabled = OMX_FALSE;
948 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexConfigPortCapturing, &capture)) != OMX_ErrorNone) {
949 | omx_die(r, "Failed to switch off capture on camera video output port 71");
950 | }
951 |
952 | // Return the last full buffer back to the camera component
953 | if((r = OMX_FillThisBuffer(ctx.camera, ctx.camera_ppBuffer_out)) != OMX_ErrorNone) {
954 | omx_die(r, "Failed to request filling of the output buffer on camera video output port 71");
955 | }
956 |
957 | // Flush the buffers on each component
958 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandFlush, 73, NULL)) != OMX_ErrorNone) {
959 | omx_die(r, "Failed to flush buffers of camera input port 73");
960 | }
961 | block_until_flushed(&ctx);
962 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandFlush, 70, NULL)) != OMX_ErrorNone) {
963 | omx_die(r, "Failed to flush buffers of camera preview output port 70");
964 | }
965 | block_until_flushed(&ctx);
966 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandFlush, 71, NULL)) != OMX_ErrorNone) {
967 | omx_die(r, "Failed to flush buffers of camera video output port 71");
968 | }
969 | block_until_flushed(&ctx);
970 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandFlush, 240, NULL)) != OMX_ErrorNone) {
971 | omx_die(r, "Failed to flush buffers of null sink input port 240");
972 | }
973 | block_until_flushed(&ctx);
974 |
975 | // Disable all the ports
976 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortDisable, 73, NULL)) != OMX_ErrorNone) {
977 | omx_die(r, "Failed to disable camera input port 73");
978 | }
979 | block_until_port_changed(ctx.camera, 73, OMX_FALSE);
980 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortDisable, 70, NULL)) != OMX_ErrorNone) {
981 | omx_die(r, "Failed to disable camera preview output port 70");
982 | }
983 | block_until_port_changed(ctx.camera, 70, OMX_FALSE);
984 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortDisable, 71, NULL)) != OMX_ErrorNone) {
985 | omx_die(r, "Failed to disable camera video output port 71");
986 | }
987 | block_until_port_changed(ctx.camera, 71, OMX_FALSE);
988 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandPortDisable, 240, NULL)) != OMX_ErrorNone) {
989 | omx_die(r, "Failed to disable null sink input port 240");
990 | }
991 | block_until_port_changed(ctx.null_sink, 240, OMX_FALSE);
992 |
993 | // Free all the buffers
994 | if((r = OMX_FreeBuffer(ctx.camera, 73, ctx.camera_ppBuffer_in)) != OMX_ErrorNone) {
995 | omx_die(r, "Failed to free buffer for camera input port 73");
996 | }
997 | if((r = OMX_FreeBuffer(ctx.camera, 71, ctx.camera_ppBuffer_out)) != OMX_ErrorNone) {
998 | omx_die(r, "Failed to free buffer for camera video output port 71");
999 | }
1000 |
1001 | // Transition all the components to idle and then to loaded states
1002 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
1003 | omx_die(r, "Failed to switch state of the camera component to idle");
1004 | }
1005 | block_until_state_changed(ctx.camera, OMX_StateIdle);
1006 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
1007 | omx_die(r, "Failed to switch state of the null sink component to idle");
1008 | }
1009 | block_until_state_changed(ctx.null_sink, OMX_StateIdle);
1010 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateLoaded, NULL)) != OMX_ErrorNone) {
1011 | omx_die(r, "Failed to switch state of the camera component to loaded");
1012 | }
1013 | block_until_state_changed(ctx.camera, OMX_StateLoaded);
1014 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateLoaded, NULL)) != OMX_ErrorNone) {
1015 | omx_die(r, "Failed to switch state of the null sink component to loaded");
1016 | }
1017 | block_until_state_changed(ctx.null_sink, OMX_StateLoaded);
1018 |
1019 | // Free the component handles
1020 | if((r = OMX_FreeHandle(ctx.camera)) != OMX_ErrorNone) {
1021 | omx_die(r, "Failed to free camera component handle");
1022 | }
1023 | if((r = OMX_FreeHandle(ctx.null_sink)) != OMX_ErrorNone) {
1024 | omx_die(r, "Failed to free null sink component handle");
1025 | }
1026 |
1027 | // Exit
1028 | fclose(ctx.fd_out);
1029 | free(frame);
1030 |
1031 | vcos_semaphore_delete(&ctx.handler_lock);
1032 | if((r = OMX_Deinit()) != OMX_ErrorNone) {
1033 | omx_die(r, "OMX de-initalization failed");
1034 | }
1035 |
1036 | say("Exit!");
1037 |
1038 | return 0;
1039 | }
1040 |
--------------------------------------------------------------------------------
/rpi-camera-encode.c:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright © 2013 Tuomas Jormola
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | *
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *
16 | * Short intro about this program:
17 | *
18 | * `rpi-camera-encode` records video using the RaspiCam module and encodes the
19 | * stream using the VideoCore hardware encoder using H.264 codec. The raw H.264
20 | * stream is emitted to `stdout`. In order to properly display the encoded video,
21 | * it must be wrapped inside a container format, e.g.
22 | * [Matroska](http://matroska.org/technical/specs/).
23 | *
24 | * The following exaple uses `mkvmerge` tool from the
25 | * [MKVToolNix](http://www.bunkus.org/videotools/mkvtoolnix/) software package to
26 | * create a Matroska video file from the recorded H.264 file and then play it using
27 | * [omxplayer](https://github.com/huceke/omxplayer) (although omxplayer happens to
28 | * deal also with the raw H.264 stream, but generally other players, such
29 | * [avplay](http://libav.org/avplay.html), don't).
30 | *
31 | * $ ./rpi-camera-encode >test.h264
32 | * # Press Ctrl-C to interrupt the recording...
33 | * $ mkvmerge -o test.mkv test.h264
34 | * $ omxplayer test.mkv
35 | *
36 | * `rpi-camera-encode` uses `camera`, `video_encode` and `null_sink` components.
37 | * `camera` video output port is tunneled to `video_encode` input port and
38 | * `camera` preview output port is tunneled to `null_sink` input port. H.264
39 | * encoded video is read from the buffer of `video_encode` output port and dumped
40 | * to `stdout`.
41 | *
42 | * Please see README.mdwn for more detailed description of this
43 | * OpenMAX IL demos for Raspberry Pi bundle.
44 | *
45 | */
46 |
47 | #include
48 | #include
49 | #include
50 | #include
51 | #include
52 | #include
53 |
54 | #include
55 |
56 | #include
57 | #include
58 |
59 | #include
60 | #include
61 | #include
62 | #include
63 |
64 | // Hard coded parameters
65 | #define VIDEO_WIDTH 1920
66 | #define VIDEO_HEIGHT 1080
67 | #define VIDEO_FRAMERATE 25
68 | #define VIDEO_BITRATE 10000000
69 | #define CAM_DEVICE_NUMBER 0
70 | #define CAM_SHARPNESS 0 // -100 .. 100
71 | #define CAM_CONTRAST 0 // -100 .. 100
72 | #define CAM_BRIGHTNESS 50 // 0 .. 100
73 | #define CAM_SATURATION 0 // -100 .. 100
74 | #define CAM_EXPOSURE_VALUE_COMPENSTAION 0
75 | #define CAM_EXPOSURE_ISO_SENSITIVITY 100
76 | #define CAM_EXPOSURE_AUTO_SENSITIVITY OMX_FALSE
77 | #define CAM_FRAME_STABILISATION OMX_TRUE
78 | #define CAM_WHITE_BALANCE_CONTROL OMX_WhiteBalControlAuto // OMX_WHITEBALCONTROLTYPE
79 | #define CAM_IMAGE_FILTER OMX_ImageFilterNoise // OMX_IMAGEFILTERTYPE
80 | #define CAM_FLIP_HORIZONTAL OMX_FALSE
81 | #define CAM_FLIP_VERTICAL OMX_FALSE
82 |
83 | // Dunno where this is originally stolen from...
84 | #define OMX_INIT_STRUCTURE(a) \
85 | memset(&(a), 0, sizeof(a)); \
86 | (a).nSize = sizeof(a); \
87 | (a).nVersion.nVersion = OMX_VERSION; \
88 | (a).nVersion.s.nVersionMajor = OMX_VERSION_MAJOR; \
89 | (a).nVersion.s.nVersionMinor = OMX_VERSION_MINOR; \
90 | (a).nVersion.s.nRevision = OMX_VERSION_REVISION; \
91 | (a).nVersion.s.nStep = OMX_VERSION_STEP
92 |
93 | // Global variable used by the signal handler and capture/encoding loop
94 | static int want_quit = 0;
95 |
96 | // Our application context passed around
97 | // the main routine and callback handlers
98 | typedef struct {
99 | OMX_HANDLETYPE camera;
100 | OMX_BUFFERHEADERTYPE *camera_ppBuffer_in;
101 | int camera_ready;
102 | OMX_HANDLETYPE encoder;
103 | OMX_BUFFERHEADERTYPE *encoder_ppBuffer_out;
104 | int encoder_output_buffer_available;
105 | OMX_HANDLETYPE null_sink;
106 | int flushed;
107 | FILE *fd_out;
108 | VCOS_SEMAPHORE_T handler_lock;
109 | } appctx;
110 |
111 | // Ugly, stupid utility functions
112 | static void say(const char* message, ...) {
113 | va_list args;
114 | char str[1024];
115 | memset(str, 0, sizeof(str));
116 | va_start(args, message);
117 | vsnprintf(str, sizeof(str) - 1, message, args);
118 | va_end(args);
119 | size_t str_len = strnlen(str, sizeof(str));
120 | if(str[str_len - 1] != '\n') {
121 | str[str_len] = '\n';
122 | }
123 | fprintf(stderr, str);
124 | }
125 |
126 | static void die(const char* message, ...) {
127 | va_list args;
128 | char str[1024];
129 | memset(str, 0, sizeof(str));
130 | va_start(args, message);
131 | vsnprintf(str, sizeof(str), message, args);
132 | va_end(args);
133 | say(str);
134 | exit(1);
135 | }
136 |
137 | static void omx_die(OMX_ERRORTYPE error, const char* message, ...) {
138 | va_list args;
139 | char str[1024];
140 | char *e;
141 | memset(str, 0, sizeof(str));
142 | va_start(args, message);
143 | vsnprintf(str, sizeof(str), message, args);
144 | va_end(args);
145 | switch(error) {
146 | case OMX_ErrorNone: e = "no error"; break;
147 | case OMX_ErrorBadParameter: e = "bad parameter"; break;
148 | case OMX_ErrorIncorrectStateOperation: e = "invalid state while trying to perform command"; break;
149 | case OMX_ErrorIncorrectStateTransition: e = "unallowed state transition"; break;
150 | case OMX_ErrorInsufficientResources: e = "insufficient resource"; break;
151 | case OMX_ErrorBadPortIndex: e = "bad port index, i.e. incorrect port"; break;
152 | case OMX_ErrorHardware: e = "hardware error"; break;
153 | /* That's all I've encountered during hacking so let's not bother with the rest... */
154 | default: e = "(no description)";
155 | }
156 | die("OMX error: %s: 0x%08x %s", str, error, e);
157 | }
158 |
159 | static void dump_event(OMX_HANDLETYPE hComponent, OMX_EVENTTYPE eEvent, OMX_U32 nData1, OMX_U32 nData2) {
160 | char *e;
161 | switch(eEvent) {
162 | case OMX_EventCmdComplete: e = "command complete"; break;
163 | case OMX_EventError: e = "error"; break;
164 | case OMX_EventParamOrConfigChanged: e = "parameter or configuration changed"; break;
165 | case OMX_EventPortSettingsChanged: e = "port settings changed"; break;
166 | /* That's all I've encountered during hacking so let's not bother with the rest... */
167 | default:
168 | e = "(no description)";
169 | }
170 | say("Received event 0x%08x %s, hComponent:0x%08x, nData1:0x%08x, nData2:0x%08x",
171 | eEvent, e, hComponent, nData1, nData2);
172 | }
173 |
174 | static const char* dump_compression_format(OMX_VIDEO_CODINGTYPE c) {
175 | char *f;
176 | switch(c) {
177 | case OMX_VIDEO_CodingUnused: return "not used";
178 | case OMX_VIDEO_CodingAutoDetect: return "autodetect";
179 | case OMX_VIDEO_CodingMPEG2: return "MPEG2";
180 | case OMX_VIDEO_CodingH263: return "H.263";
181 | case OMX_VIDEO_CodingMPEG4: return "MPEG4";
182 | case OMX_VIDEO_CodingWMV: return "Windows Media Video";
183 | case OMX_VIDEO_CodingRV: return "RealVideo";
184 | case OMX_VIDEO_CodingAVC: return "H.264/AVC";
185 | case OMX_VIDEO_CodingMJPEG: return "Motion JPEG";
186 | case OMX_VIDEO_CodingVP6: return "VP6";
187 | case OMX_VIDEO_CodingVP7: return "VP7";
188 | case OMX_VIDEO_CodingVP8: return "VP8";
189 | case OMX_VIDEO_CodingYUV: return "Raw YUV video";
190 | case OMX_VIDEO_CodingSorenson: return "Sorenson";
191 | case OMX_VIDEO_CodingTheora: return "OGG Theora";
192 | case OMX_VIDEO_CodingMVC: return "H.264/MVC";
193 |
194 | default:
195 | f = calloc(23, sizeof(char));
196 | if(f == NULL) {
197 | die("Failed to allocate memory");
198 | }
199 | snprintf(f, 23 * sizeof(char) - 1, "format type 0x%08x", c);
200 | return f;
201 | }
202 | }
203 | static const char* dump_color_format(OMX_COLOR_FORMATTYPE c) {
204 | char *f;
205 | switch(c) {
206 | case OMX_COLOR_FormatUnused: return "OMX_COLOR_FormatUnused: not used";
207 | case OMX_COLOR_FormatMonochrome: return "OMX_COLOR_FormatMonochrome";
208 | case OMX_COLOR_Format8bitRGB332: return "OMX_COLOR_Format8bitRGB332";
209 | case OMX_COLOR_Format12bitRGB444: return "OMX_COLOR_Format12bitRGB444";
210 | case OMX_COLOR_Format16bitARGB4444: return "OMX_COLOR_Format16bitARGB4444";
211 | case OMX_COLOR_Format16bitARGB1555: return "OMX_COLOR_Format16bitARGB1555";
212 | case OMX_COLOR_Format16bitRGB565: return "OMX_COLOR_Format16bitRGB565";
213 | case OMX_COLOR_Format16bitBGR565: return "OMX_COLOR_Format16bitBGR565";
214 | case OMX_COLOR_Format18bitRGB666: return "OMX_COLOR_Format18bitRGB666";
215 | case OMX_COLOR_Format18bitARGB1665: return "OMX_COLOR_Format18bitARGB1665";
216 | case OMX_COLOR_Format19bitARGB1666: return "OMX_COLOR_Format19bitARGB1666";
217 | case OMX_COLOR_Format24bitRGB888: return "OMX_COLOR_Format24bitRGB888";
218 | case OMX_COLOR_Format24bitBGR888: return "OMX_COLOR_Format24bitBGR888";
219 | case OMX_COLOR_Format24bitARGB1887: return "OMX_COLOR_Format24bitARGB1887";
220 | case OMX_COLOR_Format25bitARGB1888: return "OMX_COLOR_Format25bitARGB1888";
221 | case OMX_COLOR_Format32bitBGRA8888: return "OMX_COLOR_Format32bitBGRA8888";
222 | case OMX_COLOR_Format32bitARGB8888: return "OMX_COLOR_Format32bitARGB8888";
223 | case OMX_COLOR_FormatYUV411Planar: return "OMX_COLOR_FormatYUV411Planar";
224 | case OMX_COLOR_FormatYUV411PackedPlanar: return "OMX_COLOR_FormatYUV411PackedPlanar: Planes fragmented when a frame is split in multiple buffers";
225 | case OMX_COLOR_FormatYUV420Planar: return "OMX_COLOR_FormatYUV420Planar: Planar YUV, 4:2:0 (I420)";
226 | case OMX_COLOR_FormatYUV420PackedPlanar: return "OMX_COLOR_FormatYUV420PackedPlanar: Planar YUV, 4:2:0 (I420), planes fragmented when a frame is split in multiple buffers";
227 | case OMX_COLOR_FormatYUV420SemiPlanar: return "OMX_COLOR_FormatYUV420SemiPlanar, Planar YUV, 4:2:0 (NV12), U and V planes interleaved with first U value";
228 | case OMX_COLOR_FormatYUV422Planar: return "OMX_COLOR_FormatYUV422Planar";
229 | case OMX_COLOR_FormatYUV422PackedPlanar: return "OMX_COLOR_FormatYUV422PackedPlanar: Planes fragmented when a frame is split in multiple buffers";
230 | case OMX_COLOR_FormatYUV422SemiPlanar: return "OMX_COLOR_FormatYUV422SemiPlanar";
231 | case OMX_COLOR_FormatYCbYCr: return "OMX_COLOR_FormatYCbYCr";
232 | case OMX_COLOR_FormatYCrYCb: return "OMX_COLOR_FormatYCrYCb";
233 | case OMX_COLOR_FormatCbYCrY: return "OMX_COLOR_FormatCbYCrY";
234 | case OMX_COLOR_FormatCrYCbY: return "OMX_COLOR_FormatCrYCbY";
235 | case OMX_COLOR_FormatYUV444Interleaved: return "OMX_COLOR_FormatYUV444Interleaved";
236 | case OMX_COLOR_FormatRawBayer8bit: return "OMX_COLOR_FormatRawBayer8bit";
237 | case OMX_COLOR_FormatRawBayer10bit: return "OMX_COLOR_FormatRawBayer10bit";
238 | case OMX_COLOR_FormatRawBayer8bitcompressed: return "OMX_COLOR_FormatRawBayer8bitcompressed";
239 | case OMX_COLOR_FormatL2: return "OMX_COLOR_FormatL2";
240 | case OMX_COLOR_FormatL4: return "OMX_COLOR_FormatL4";
241 | case OMX_COLOR_FormatL8: return "OMX_COLOR_FormatL8";
242 | case OMX_COLOR_FormatL16: return "OMX_COLOR_FormatL16";
243 | case OMX_COLOR_FormatL24: return "OMX_COLOR_FormatL24";
244 | case OMX_COLOR_FormatL32: return "OMX_COLOR_FormatL32";
245 | case OMX_COLOR_FormatYUV420PackedSemiPlanar: return "OMX_COLOR_FormatYUV420PackedSemiPlanar: Planar YUV, 4:2:0 (NV12), planes fragmented when a frame is split in multiple buffers, U and V planes interleaved with first U value";
246 | case OMX_COLOR_FormatYUV422PackedSemiPlanar: return "OMX_COLOR_FormatYUV422PackedSemiPlanar: Planes fragmented when a frame is split in multiple buffers";
247 | case OMX_COLOR_Format18BitBGR666: return "OMX_COLOR_Format18BitBGR666";
248 | case OMX_COLOR_Format24BitARGB6666: return "OMX_COLOR_Format24BitARGB6666";
249 | case OMX_COLOR_Format24BitABGR6666: return "OMX_COLOR_Format24BitABGR6666";
250 | case OMX_COLOR_Format32bitABGR8888: return "OMX_COLOR_Format32bitABGR8888";
251 | case OMX_COLOR_Format8bitPalette: return "OMX_COLOR_Format8bitPalette";
252 | case OMX_COLOR_FormatYUVUV128: return "OMX_COLOR_FormatYUVUV128";
253 | case OMX_COLOR_FormatRawBayer12bit: return "OMX_COLOR_FormatRawBayer12bit";
254 | case OMX_COLOR_FormatBRCMEGL: return "OMX_COLOR_FormatBRCMEGL";
255 | case OMX_COLOR_FormatBRCMOpaque: return "OMX_COLOR_FormatBRCMOpaque";
256 | case OMX_COLOR_FormatYVU420PackedPlanar: return "OMX_COLOR_FormatYVU420PackedPlanar";
257 | case OMX_COLOR_FormatYVU420PackedSemiPlanar: return "OMX_COLOR_FormatYVU420PackedSemiPlanar";
258 | default:
259 | f = calloc(23, sizeof(char));
260 | if(f == NULL) {
261 | die("Failed to allocate memory");
262 | }
263 | snprintf(f, 23 * sizeof(char) - 1, "format type 0x%08x", c);
264 | return f;
265 | }
266 | }
267 |
268 | static void dump_portdef(OMX_PARAM_PORTDEFINITIONTYPE* portdef) {
269 | say("Port %d is %s, %s, buffers wants:%d needs:%d, size:%d, pop:%d, aligned:%d",
270 | portdef->nPortIndex,
271 | (portdef->eDir == OMX_DirInput ? "input" : "output"),
272 | (portdef->bEnabled == OMX_TRUE ? "enabled" : "disabled"),
273 | portdef->nBufferCountActual,
274 | portdef->nBufferCountMin,
275 | portdef->nBufferSize,
276 | portdef->bPopulated,
277 | portdef->nBufferAlignment);
278 |
279 | OMX_VIDEO_PORTDEFINITIONTYPE *viddef = &portdef->format.video;
280 | OMX_IMAGE_PORTDEFINITIONTYPE *imgdef = &portdef->format.image;
281 | switch(portdef->eDomain) {
282 | case OMX_PortDomainVideo:
283 | say("Video type:\n"
284 | "\tWidth:\t\t%d\n"
285 | "\tHeight:\t\t%d\n"
286 | "\tStride:\t\t%d\n"
287 | "\tSliceHeight:\t%d\n"
288 | "\tBitrate:\t%d\n"
289 | "\tFramerate:\t%.02f\n"
290 | "\tError hiding:\t%s\n"
291 | "\tCodec:\t\t%s\n"
292 | "\tColor:\t\t%s\n",
293 | viddef->nFrameWidth,
294 | viddef->nFrameHeight,
295 | viddef->nStride,
296 | viddef->nSliceHeight,
297 | viddef->nBitrate,
298 | ((float)viddef->xFramerate / (float)65536),
299 | (viddef->bFlagErrorConcealment == OMX_TRUE ? "yes" : "no"),
300 | dump_compression_format(viddef->eCompressionFormat),
301 | dump_color_format(viddef->eColorFormat));
302 | break;
303 | case OMX_PortDomainImage:
304 | say("Image type:\n"
305 | "\tWidth:\t\t%d\n"
306 | "\tHeight:\t\t%d\n"
307 | "\tStride:\t\t%d\n"
308 | "\tSliceHeight:\t%d\n"
309 | "\tError hiding:\t%s\n"
310 | "\tCodec:\t\t%s\n"
311 | "\tColor:\t\t%s\n",
312 | imgdef->nFrameWidth,
313 | imgdef->nFrameHeight,
314 | imgdef->nStride,
315 | imgdef->nSliceHeight,
316 | (imgdef->bFlagErrorConcealment == OMX_TRUE ? "yes" : "no"),
317 | dump_compression_format(imgdef->eCompressionFormat),
318 | dump_color_format(imgdef->eColorFormat));
319 | break;
320 | default:
321 | break;
322 | }
323 | }
324 |
325 | static void dump_port(OMX_HANDLETYPE hComponent, OMX_U32 nPortIndex, OMX_BOOL dumpformats) {
326 | OMX_ERRORTYPE r;
327 | OMX_PARAM_PORTDEFINITIONTYPE portdef;
328 | OMX_INIT_STRUCTURE(portdef);
329 | portdef.nPortIndex = nPortIndex;
330 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamPortDefinition, &portdef)) != OMX_ErrorNone) {
331 | omx_die(r, "Failed to get port definition for port %d", nPortIndex);
332 | }
333 | dump_portdef(&portdef);
334 | if(dumpformats) {
335 | OMX_VIDEO_PARAM_PORTFORMATTYPE portformat;
336 | OMX_INIT_STRUCTURE(portformat);
337 | portformat.nPortIndex = nPortIndex;
338 | portformat.nIndex = 0;
339 | r = OMX_ErrorNone;
340 | say("Port %d supports these video formats:", nPortIndex);
341 | while(r == OMX_ErrorNone) {
342 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamVideoPortFormat, &portformat)) == OMX_ErrorNone) {
343 | say("\t%s, compression: %s", dump_color_format(portformat.eColorFormat), dump_compression_format(portformat.eCompressionFormat));
344 | portformat.nIndex++;
345 | }
346 | }
347 | }
348 | }
349 |
350 | // Some busy loops to verify we're running in order
351 | static void block_until_state_changed(OMX_HANDLETYPE hComponent, OMX_STATETYPE wanted_eState) {
352 | OMX_STATETYPE eState;
353 | int i = 0;
354 | while(i++ == 0 || eState != wanted_eState) {
355 | OMX_GetState(hComponent, &eState);
356 | if(eState != wanted_eState) {
357 | usleep(10000);
358 | }
359 | }
360 | }
361 |
362 | static void block_until_port_changed(OMX_HANDLETYPE hComponent, OMX_U32 nPortIndex, OMX_BOOL bEnabled) {
363 | OMX_ERRORTYPE r;
364 | OMX_PARAM_PORTDEFINITIONTYPE portdef;
365 | OMX_INIT_STRUCTURE(portdef);
366 | portdef.nPortIndex = nPortIndex;
367 | OMX_U32 i = 0;
368 | while(i++ == 0 || portdef.bEnabled != bEnabled) {
369 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamPortDefinition, &portdef)) != OMX_ErrorNone) {
370 | omx_die(r, "Failed to get port definition");
371 | }
372 | if(portdef.bEnabled != bEnabled) {
373 | usleep(10000);
374 | }
375 | }
376 | }
377 |
378 | static void block_until_flushed(appctx *ctx) {
379 | int quit;
380 | while(!quit) {
381 | vcos_semaphore_wait(&ctx->handler_lock);
382 | if(ctx->flushed) {
383 | ctx->flushed = 0;
384 | quit = 1;
385 | }
386 | vcos_semaphore_post(&ctx->handler_lock);
387 | if(!quit) {
388 | usleep(10000);
389 | }
390 | }
391 | }
392 |
393 | static void init_component_handle(
394 | const char *name,
395 | OMX_HANDLETYPE* hComponent,
396 | OMX_PTR pAppData,
397 | OMX_CALLBACKTYPE* callbacks) {
398 | OMX_ERRORTYPE r;
399 | char fullname[32];
400 |
401 | // Get handle
402 | memset(fullname, 0, sizeof(fullname));
403 | strcat(fullname, "OMX.broadcom.");
404 | strncat(fullname, name, strlen(fullname) - 1);
405 | say("Initializing component %s", fullname);
406 | if((r = OMX_GetHandle(hComponent, fullname, pAppData, callbacks)) != OMX_ErrorNone) {
407 | omx_die(r, "Failed to get handle for component %s", fullname);
408 | }
409 |
410 | // Disable ports
411 | OMX_INDEXTYPE types[] = {
412 | OMX_IndexParamAudioInit,
413 | OMX_IndexParamVideoInit,
414 | OMX_IndexParamImageInit,
415 | OMX_IndexParamOtherInit
416 | };
417 | OMX_PORT_PARAM_TYPE ports;
418 | OMX_INIT_STRUCTURE(ports);
419 | OMX_GetParameter(*hComponent, OMX_IndexParamVideoInit, &ports);
420 |
421 | int i;
422 | for(i = 0; i < 4; i++) {
423 | if(OMX_GetParameter(*hComponent, types[i], &ports) == OMX_ErrorNone) {
424 | OMX_U32 nPortIndex;
425 | for(nPortIndex = ports.nStartPortNumber; nPortIndex < ports.nStartPortNumber + ports.nPorts; nPortIndex++) {
426 | say("Disabling port %d of component %s", nPortIndex, fullname);
427 | if((r = OMX_SendCommand(*hComponent, OMX_CommandPortDisable, nPortIndex, NULL)) != OMX_ErrorNone) {
428 | omx_die(r, "Failed to disable port %d of component %s", nPortIndex, fullname);
429 | }
430 | block_until_port_changed(*hComponent, nPortIndex, OMX_FALSE);
431 | }
432 | }
433 | }
434 | }
435 |
436 | // Global signal handler for trapping SIGINT, SIGTERM, and SIGQUIT
437 | static void signal_handler(int signal) {
438 | want_quit = 1;
439 | }
440 |
441 | // OMX calls this handler for all the events it emits
442 | static OMX_ERRORTYPE event_handler(
443 | OMX_HANDLETYPE hComponent,
444 | OMX_PTR pAppData,
445 | OMX_EVENTTYPE eEvent,
446 | OMX_U32 nData1,
447 | OMX_U32 nData2,
448 | OMX_PTR pEventData) {
449 |
450 | dump_event(hComponent, eEvent, nData1, nData2);
451 |
452 | appctx *ctx = (appctx *)pAppData;
453 |
454 | switch(eEvent) {
455 | case OMX_EventCmdComplete:
456 | vcos_semaphore_wait(&ctx->handler_lock);
457 | if(nData1 == OMX_CommandFlush) {
458 | ctx->flushed = 1;
459 | }
460 | vcos_semaphore_post(&ctx->handler_lock);
461 | break;
462 | case OMX_EventParamOrConfigChanged:
463 | vcos_semaphore_wait(&ctx->handler_lock);
464 | if(nData2 == OMX_IndexParamCameraDeviceNumber) {
465 | ctx->camera_ready = 1;
466 | }
467 | vcos_semaphore_post(&ctx->handler_lock);
468 | break;
469 | case OMX_EventError:
470 | omx_die(nData1, "error event received");
471 | break;
472 | default:
473 | break;
474 | }
475 |
476 | return OMX_ErrorNone;
477 | }
478 |
479 | // Called by OMX when the encoder component has filled
480 | // the output buffer with H.264 encoded video data
481 | static OMX_ERRORTYPE fill_output_buffer_done_handler(
482 | OMX_HANDLETYPE hComponent,
483 | OMX_PTR pAppData,
484 | OMX_BUFFERHEADERTYPE* pBuffer) {
485 | appctx *ctx = ((appctx*)pAppData);
486 | vcos_semaphore_wait(&ctx->handler_lock);
487 | // The main loop can now flush the buffer to output file
488 | ctx->encoder_output_buffer_available = 1;
489 | vcos_semaphore_post(&ctx->handler_lock);
490 | return OMX_ErrorNone;
491 | }
492 |
493 | int main(int argc, char **argv) {
494 | bcm_host_init();
495 |
496 | OMX_ERRORTYPE r;
497 |
498 | if((r = OMX_Init()) != OMX_ErrorNone) {
499 | omx_die(r, "OMX initalization failed");
500 | }
501 |
502 | // Init context
503 | appctx ctx;
504 | memset(&ctx, 0, sizeof(ctx));
505 | if(vcos_semaphore_create(&ctx.handler_lock, "handler_lock", 1) != VCOS_SUCCESS) {
506 | die("Failed to create handler lock semaphore");
507 | }
508 |
509 | // Init component handles
510 | OMX_CALLBACKTYPE callbacks;
511 | memset(&ctx, 0, sizeof(callbacks));
512 | callbacks.EventHandler = event_handler;
513 | callbacks.FillBufferDone = fill_output_buffer_done_handler;
514 |
515 | init_component_handle("camera", &ctx.camera , &ctx, &callbacks);
516 | init_component_handle("video_encode", &ctx.encoder, &ctx, &callbacks);
517 | init_component_handle("null_sink", &ctx.null_sink, &ctx, &callbacks);
518 |
519 | say("Configuring camera...");
520 |
521 | say("Default port definition for camera input port 73");
522 | dump_port(ctx.camera, 73, OMX_TRUE);
523 | say("Default port definition for camera preview output port 70");
524 | dump_port(ctx.camera, 70, OMX_TRUE);
525 | say("Default port definition for camera video output port 71");
526 | dump_port(ctx.camera, 71, OMX_TRUE);
527 |
528 | // Request a callback to be made when OMX_IndexParamCameraDeviceNumber is
529 | // changed signaling that the camera device is ready for use.
530 | OMX_CONFIG_REQUESTCALLBACKTYPE cbtype;
531 | OMX_INIT_STRUCTURE(cbtype);
532 | cbtype.nPortIndex = OMX_ALL;
533 | cbtype.nIndex = OMX_IndexParamCameraDeviceNumber;
534 | cbtype.bEnable = OMX_TRUE;
535 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigRequestCallback, &cbtype)) != OMX_ErrorNone) {
536 | omx_die(r, "Failed to request camera device number parameter change callback for camera");
537 | }
538 | // Set device number, this triggers the callback configured just above
539 | OMX_PARAM_U32TYPE device;
540 | OMX_INIT_STRUCTURE(device);
541 | device.nPortIndex = OMX_ALL;
542 | device.nU32 = CAM_DEVICE_NUMBER;
543 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexParamCameraDeviceNumber, &device)) != OMX_ErrorNone) {
544 | omx_die(r, "Failed to set camera parameter device number");
545 | }
546 | // Configure video format emitted by camera preview output port
547 | OMX_PARAM_PORTDEFINITIONTYPE camera_portdef;
548 | OMX_INIT_STRUCTURE(camera_portdef);
549 | camera_portdef.nPortIndex = 70;
550 | if((r = OMX_GetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
551 | omx_die(r, "Failed to get port definition for camera preview output port 70");
552 | }
553 | camera_portdef.format.video.nFrameWidth = VIDEO_WIDTH;
554 | camera_portdef.format.video.nFrameHeight = VIDEO_HEIGHT;
555 | camera_portdef.format.video.xFramerate = VIDEO_FRAMERATE << 16;
556 | // Stolen from gstomxvideodec.c of gst-omx
557 | camera_portdef.format.video.nStride = (camera_portdef.format.video.nFrameWidth + camera_portdef.nBufferAlignment - 1) & (~(camera_portdef.nBufferAlignment - 1));
558 | camera_portdef.format.video.eColorFormat = OMX_COLOR_FormatYUV420PackedPlanar;
559 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
560 | omx_die(r, "Failed to set port definition for camera preview output port 70");
561 | }
562 | // Configure video format emitted by camera video output port
563 | // Use configuration from camera preview output as basis for
564 | // camera video output configuration
565 | OMX_INIT_STRUCTURE(camera_portdef);
566 | camera_portdef.nPortIndex = 70;
567 | if((r = OMX_GetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
568 | omx_die(r, "Failed to get port definition for camera preview output port 70");
569 | }
570 | camera_portdef.nPortIndex = 71;
571 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
572 | omx_die(r, "Failed to set port definition for camera video output port 71");
573 | }
574 | // Configure frame rate
575 | OMX_CONFIG_FRAMERATETYPE framerate;
576 | OMX_INIT_STRUCTURE(framerate);
577 | framerate.nPortIndex = 70;
578 | framerate.xEncodeFramerate = camera_portdef.format.video.xFramerate;
579 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigVideoFramerate, &framerate)) != OMX_ErrorNone) {
580 | omx_die(r, "Failed to set framerate configuration for camera preview output port 70");
581 | }
582 | framerate.nPortIndex = 71;
583 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigVideoFramerate, &framerate)) != OMX_ErrorNone) {
584 | omx_die(r, "Failed to set framerate configuration for camera video output port 71");
585 | }
586 | // Configure sharpness
587 | OMX_CONFIG_SHARPNESSTYPE sharpness;
588 | OMX_INIT_STRUCTURE(sharpness);
589 | sharpness.nPortIndex = OMX_ALL;
590 | sharpness.nSharpness = CAM_SHARPNESS;
591 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonSharpness, &sharpness)) != OMX_ErrorNone) {
592 | omx_die(r, "Failed to set camera sharpness configuration");
593 | }
594 | // Configure contrast
595 | OMX_CONFIG_CONTRASTTYPE contrast;
596 | OMX_INIT_STRUCTURE(contrast);
597 | contrast.nPortIndex = OMX_ALL;
598 | contrast.nContrast = CAM_CONTRAST;
599 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonContrast, &contrast)) != OMX_ErrorNone) {
600 | omx_die(r, "Failed to set camera contrast configuration");
601 | }
602 | // Configure saturation
603 | OMX_CONFIG_SATURATIONTYPE saturation;
604 | OMX_INIT_STRUCTURE(saturation);
605 | saturation.nPortIndex = OMX_ALL;
606 | saturation.nSaturation = CAM_SATURATION;
607 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonSaturation, &saturation)) != OMX_ErrorNone) {
608 | omx_die(r, "Failed to set camera saturation configuration");
609 | }
610 | // Configure brightness
611 | OMX_CONFIG_BRIGHTNESSTYPE brightness;
612 | OMX_INIT_STRUCTURE(brightness);
613 | brightness.nPortIndex = OMX_ALL;
614 | brightness.nBrightness = CAM_BRIGHTNESS;
615 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonBrightness, &brightness)) != OMX_ErrorNone) {
616 | omx_die(r, "Failed to set camera brightness configuration");
617 | }
618 | // Configure exposure value
619 | OMX_CONFIG_EXPOSUREVALUETYPE exposure_value;
620 | OMX_INIT_STRUCTURE(exposure_value);
621 | exposure_value.nPortIndex = OMX_ALL;
622 | exposure_value.xEVCompensation = CAM_EXPOSURE_VALUE_COMPENSTAION;
623 | exposure_value.bAutoSensitivity = CAM_EXPOSURE_AUTO_SENSITIVITY;
624 | exposure_value.nSensitivity = CAM_EXPOSURE_ISO_SENSITIVITY;
625 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonExposureValue, &exposure_value)) != OMX_ErrorNone) {
626 | omx_die(r, "Failed to set camera exposure value configuration");
627 | }
628 | // Configure frame frame stabilisation
629 | OMX_CONFIG_FRAMESTABTYPE frame_stabilisation_control;
630 | OMX_INIT_STRUCTURE(frame_stabilisation_control);
631 | frame_stabilisation_control.nPortIndex = OMX_ALL;
632 | frame_stabilisation_control.bStab = CAM_FRAME_STABILISATION;
633 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonFrameStabilisation, &frame_stabilisation_control)) != OMX_ErrorNone) {
634 | omx_die(r, "Failed to set camera frame frame stabilisation control configuration");
635 | }
636 | // Configure frame white balance control
637 | OMX_CONFIG_WHITEBALCONTROLTYPE white_balance_control;
638 | OMX_INIT_STRUCTURE(white_balance_control);
639 | white_balance_control.nPortIndex = OMX_ALL;
640 | white_balance_control.eWhiteBalControl = CAM_WHITE_BALANCE_CONTROL;
641 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonWhiteBalance, &white_balance_control)) != OMX_ErrorNone) {
642 | omx_die(r, "Failed to set camera frame white balance control configuration");
643 | }
644 | // Configure image filter
645 | OMX_CONFIG_IMAGEFILTERTYPE image_filter;
646 | OMX_INIT_STRUCTURE(image_filter);
647 | image_filter.nPortIndex = OMX_ALL;
648 | image_filter.eImageFilter = CAM_IMAGE_FILTER;
649 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonImageFilter, &image_filter)) != OMX_ErrorNone) {
650 | omx_die(r, "Failed to set camera image filter configuration");
651 | }
652 | // Configure mirror
653 | OMX_MIRRORTYPE eMirror = OMX_MirrorNone;
654 | if(CAM_FLIP_HORIZONTAL && !CAM_FLIP_VERTICAL) {
655 | eMirror = OMX_MirrorHorizontal;
656 | } else if(!CAM_FLIP_HORIZONTAL && CAM_FLIP_VERTICAL) {
657 | eMirror = OMX_MirrorVertical;
658 | } else if(CAM_FLIP_HORIZONTAL && CAM_FLIP_VERTICAL) {
659 | eMirror = OMX_MirrorBoth;
660 | }
661 | OMX_CONFIG_MIRRORTYPE mirror;
662 | OMX_INIT_STRUCTURE(mirror);
663 | mirror.nPortIndex = 71;
664 | mirror.eMirror = eMirror;
665 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonMirror, &mirror)) != OMX_ErrorNone) {
666 | omx_die(r, "Failed to set mirror configuration for camera video output port 71");
667 | }
668 |
669 | // Ensure camera is ready
670 | while(!ctx.camera_ready) {
671 | usleep(10000);
672 | }
673 |
674 | say("Configuring encoder...");
675 |
676 | say("Default port definition for encoder input port 200");
677 | dump_port(ctx.encoder, 200, OMX_TRUE);
678 | say("Default port definition for encoder output port 201");
679 | dump_port(ctx.encoder, 201, OMX_TRUE);
680 |
681 | // Encoder input port definition is done automatically upon tunneling
682 |
683 | // Configure video format emitted by encoder output port
684 | OMX_PARAM_PORTDEFINITIONTYPE encoder_portdef;
685 | OMX_INIT_STRUCTURE(encoder_portdef);
686 | encoder_portdef.nPortIndex = 201;
687 | if((r = OMX_GetParameter(ctx.encoder, OMX_IndexParamPortDefinition, &encoder_portdef)) != OMX_ErrorNone) {
688 | omx_die(r, "Failed to get port definition for encoder output port 201");
689 | }
690 | // Copy some of the encoder output port configuration
691 | // from camera output port
692 | encoder_portdef.format.video.nFrameWidth = camera_portdef.format.video.nFrameWidth;
693 | encoder_portdef.format.video.nFrameHeight = camera_portdef.format.video.nFrameHeight;
694 | encoder_portdef.format.video.xFramerate = camera_portdef.format.video.xFramerate;
695 | encoder_portdef.format.video.nStride = camera_portdef.format.video.nStride;
696 | // Which one is effective, this or the configuration just below?
697 | encoder_portdef.format.video.nBitrate = VIDEO_BITRATE;
698 | if((r = OMX_SetParameter(ctx.encoder, OMX_IndexParamPortDefinition, &encoder_portdef)) != OMX_ErrorNone) {
699 | omx_die(r, "Failed to set port definition for encoder output port 201");
700 | }
701 | // Configure bitrate
702 | OMX_VIDEO_PARAM_BITRATETYPE bitrate;
703 | OMX_INIT_STRUCTURE(bitrate);
704 | bitrate.eControlRate = OMX_Video_ControlRateVariable;
705 | bitrate.nTargetBitrate = encoder_portdef.format.video.nBitrate;
706 | bitrate.nPortIndex = 201;
707 | if((r = OMX_SetParameter(ctx.encoder, OMX_IndexParamVideoBitrate, &bitrate)) != OMX_ErrorNone) {
708 | omx_die(r, "Failed to set bitrate for encoder output port 201");
709 | }
710 | // Configure format
711 | OMX_VIDEO_PARAM_PORTFORMATTYPE format;
712 | OMX_INIT_STRUCTURE(format);
713 | format.nPortIndex = 201;
714 | format.eCompressionFormat = OMX_VIDEO_CodingAVC;
715 | if((r = OMX_SetParameter(ctx.encoder, OMX_IndexParamVideoPortFormat, &format)) != OMX_ErrorNone) {
716 | omx_die(r, "Failed to set video format for encoder output port 201");
717 | }
718 |
719 | say("Configuring null sink...");
720 |
721 | say("Default port definition for null sink input port 240");
722 | dump_port(ctx.null_sink, 240, OMX_TRUE);
723 |
724 | // Null sink input port definition is done automatically upon tunneling
725 |
726 | // Tunnel camera preview output port and null sink input port
727 | say("Setting up tunnel from camera preview output port 70 to null sink input port 240...");
728 | if((r = OMX_SetupTunnel(ctx.camera, 70, ctx.null_sink, 240)) != OMX_ErrorNone) {
729 | omx_die(r, "Failed to setup tunnel between camera preview output port 70 and null sink input port 240");
730 | }
731 |
732 | // Tunnel camera video output port and encoder input port
733 | say("Setting up tunnel from camera video output port 71 to encoder input port 200...");
734 | if((r = OMX_SetupTunnel(ctx.camera, 71, ctx.encoder, 200)) != OMX_ErrorNone) {
735 | omx_die(r, "Failed to setup tunnel between camera video output port 71 and encoder input port 200");
736 | }
737 |
738 | // Switch components to idle state
739 | say("Switching state of the camera component to idle...");
740 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
741 | omx_die(r, "Failed to switch state of the camera component to idle");
742 | }
743 | block_until_state_changed(ctx.camera, OMX_StateIdle);
744 | say("Switching state of the encoder component to idle...");
745 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
746 | omx_die(r, "Failed to switch state of the encoder component to idle");
747 | }
748 | block_until_state_changed(ctx.encoder, OMX_StateIdle);
749 | say("Switching state of the null sink component to idle...");
750 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
751 | omx_die(r, "Failed to switch state of the null sink component to idle");
752 | }
753 | block_until_state_changed(ctx.null_sink, OMX_StateIdle);
754 |
755 | // Enable ports
756 | say("Enabling ports...");
757 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortEnable, 73, NULL)) != OMX_ErrorNone) {
758 | omx_die(r, "Failed to enable camera input port 73");
759 | }
760 | block_until_port_changed(ctx.camera, 73, OMX_TRUE);
761 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortEnable, 70, NULL)) != OMX_ErrorNone) {
762 | omx_die(r, "Failed to enable camera preview output port 70");
763 | }
764 | block_until_port_changed(ctx.camera, 70, OMX_TRUE);
765 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortEnable, 71, NULL)) != OMX_ErrorNone) {
766 | omx_die(r, "Failed to enable camera video output port 71");
767 | }
768 | block_until_port_changed(ctx.camera, 71, OMX_TRUE);
769 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandPortEnable, 200, NULL)) != OMX_ErrorNone) {
770 | omx_die(r, "Failed to enable encoder input port 200");
771 | }
772 | block_until_port_changed(ctx.encoder, 200, OMX_TRUE);
773 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandPortEnable, 201, NULL)) != OMX_ErrorNone) {
774 | omx_die(r, "Failed to enable encoder output port 201");
775 | }
776 | block_until_port_changed(ctx.encoder, 201, OMX_TRUE);
777 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandPortEnable, 240, NULL)) != OMX_ErrorNone) {
778 | omx_die(r, "Failed to enable null sink input port 240");
779 | }
780 | block_until_port_changed(ctx.null_sink, 240, OMX_TRUE);
781 |
782 | // Allocate camera input buffer and encoder output buffer,
783 | // buffers for tunneled ports are allocated internally by OMX
784 | say("Allocating buffers...");
785 | OMX_INIT_STRUCTURE(camera_portdef);
786 | camera_portdef.nPortIndex = 73;
787 | if((r = OMX_GetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
788 | omx_die(r, "Failed to get port definition for camera input port 73");
789 | }
790 | if((r = OMX_AllocateBuffer(ctx.camera, &ctx.camera_ppBuffer_in, 73, NULL, camera_portdef.nBufferSize)) != OMX_ErrorNone) {
791 | omx_die(r, "Failed to allocate buffer for camera input port 73");
792 | }
793 | OMX_INIT_STRUCTURE(encoder_portdef);
794 | encoder_portdef.nPortIndex = 201;
795 | if((r = OMX_GetParameter(ctx.encoder, OMX_IndexParamPortDefinition, &encoder_portdef)) != OMX_ErrorNone) {
796 | omx_die(r, "Failed to get port definition for encoder output port 201");
797 | }
798 | if((r = OMX_AllocateBuffer(ctx.encoder, &ctx.encoder_ppBuffer_out, 201, NULL, encoder_portdef.nBufferSize)) != OMX_ErrorNone) {
799 | omx_die(r, "Failed to allocate buffer for encoder output port 201");
800 | }
801 |
802 | // Just use stdout for output
803 | say("Opening output file...");
804 | ctx.fd_out = stdout;
805 |
806 | // Switch state of the components prior to starting
807 | // the video capture and encoding loop
808 | say("Switching state of the camera component to executing...");
809 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateExecuting, NULL)) != OMX_ErrorNone) {
810 | omx_die(r, "Failed to switch state of the camera component to executing");
811 | }
812 | block_until_state_changed(ctx.camera, OMX_StateExecuting);
813 | say("Switching state of the encoder component to executing...");
814 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandStateSet, OMX_StateExecuting, NULL)) != OMX_ErrorNone) {
815 | omx_die(r, "Failed to switch state of the encoder component to executing");
816 | }
817 | block_until_state_changed(ctx.encoder, OMX_StateExecuting);
818 | say("Switching state of the null sink component to executing...");
819 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateExecuting, NULL)) != OMX_ErrorNone) {
820 | omx_die(r, "Failed to switch state of the null sink component to executing");
821 | }
822 | block_until_state_changed(ctx.null_sink, OMX_StateExecuting);
823 |
824 | // Start capturing video with the camera
825 | say("Switching on capture on camera video output port 71...");
826 | OMX_CONFIG_PORTBOOLEANTYPE capture;
827 | OMX_INIT_STRUCTURE(capture);
828 | capture.nPortIndex = 71;
829 | capture.bEnabled = OMX_TRUE;
830 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexConfigPortCapturing, &capture)) != OMX_ErrorNone) {
831 | omx_die(r, "Failed to switch on capture on camera video output port 71");
832 | }
833 |
834 | say("Configured port definition for camera input port 73");
835 | dump_port(ctx.camera, 73, OMX_FALSE);
836 | say("Configured port definition for camera preview output port 70");
837 | dump_port(ctx.camera, 70, OMX_FALSE);
838 | say("Configured port definition for camera video output port 71");
839 | dump_port(ctx.camera, 71, OMX_FALSE);
840 | say("Configured port definition for encoder input port 200");
841 | dump_port(ctx.encoder, 200, OMX_FALSE);
842 | say("Configured port definition for encoder output port 201");
843 | dump_port(ctx.encoder, 201, OMX_FALSE);
844 | say("Configured port definition for null sink input port 240");
845 | dump_port(ctx.null_sink, 240, OMX_FALSE);
846 |
847 | say("Enter capture and encode loop, press Ctrl-C to quit...");
848 |
849 | int quit_detected = 0, quit_in_keyframe = 0, need_next_buffer_to_be_filled = 1;
850 | size_t output_written;
851 |
852 | signal(SIGINT, signal_handler);
853 | signal(SIGTERM, signal_handler);
854 | signal(SIGQUIT, signal_handler);
855 |
856 | while(1) {
857 | // fill_output_buffer_done_handler() has marked that there's
858 | // a buffer for us to flush
859 | if(ctx.encoder_output_buffer_available) {
860 | // Print a message if the user wants to quit, but don't exit
861 | // the loop until we are certain that we have processed
862 | // a full frame till end of the frame, i.e. we're at the end
863 | // of the current key frame if processing one or until
864 | // the next key frame is detected. This way we should always
865 | // avoid corruption of the last encoded at the expense of
866 | // small delay in exiting.
867 | if(want_quit && !quit_detected) {
868 | say("Exit signal detected, waiting for next key frame boundry before exiting...");
869 | quit_detected = 1;
870 | quit_in_keyframe = ctx.encoder_ppBuffer_out->nFlags & OMX_BUFFERFLAG_SYNCFRAME;
871 | }
872 | if(quit_detected && (quit_in_keyframe ^ (ctx.encoder_ppBuffer_out->nFlags & OMX_BUFFERFLAG_SYNCFRAME))) {
873 | say("Key frame boundry reached, exiting loop...");
874 | break;
875 | }
876 | // Flush buffer to output file
877 | output_written = fwrite(ctx.encoder_ppBuffer_out->pBuffer + ctx.encoder_ppBuffer_out->nOffset, 1, ctx.encoder_ppBuffer_out->nFilledLen, ctx.fd_out);
878 | if(output_written != ctx.encoder_ppBuffer_out->nFilledLen) {
879 | die("Failed to write to output file: %s", strerror(errno));
880 | }
881 | say("Read from output buffer and wrote to output file %d/%d", ctx.encoder_ppBuffer_out->nFilledLen, ctx.encoder_ppBuffer_out->nAllocLen);
882 | need_next_buffer_to_be_filled = 1;
883 | }
884 | // Buffer flushed, request a new buffer to be filled by the encoder component
885 | if(need_next_buffer_to_be_filled) {
886 | need_next_buffer_to_be_filled = 0;
887 | ctx.encoder_output_buffer_available = 0;
888 | if((r = OMX_FillThisBuffer(ctx.encoder, ctx.encoder_ppBuffer_out)) != OMX_ErrorNone) {
889 | omx_die(r, "Failed to request filling of the output buffer on encoder output port 201");
890 | }
891 | }
892 | // Would be better to use signaling here but hey this works too
893 | usleep(1000);
894 | }
895 | say("Cleaning up...");
896 |
897 | // Restore signal handlers
898 | signal(SIGINT, SIG_DFL);
899 | signal(SIGTERM, SIG_DFL);
900 | signal(SIGQUIT, SIG_DFL);
901 |
902 | // Stop capturing video with the camera
903 | OMX_INIT_STRUCTURE(capture);
904 | capture.nPortIndex = 71;
905 | capture.bEnabled = OMX_FALSE;
906 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexConfigPortCapturing, &capture)) != OMX_ErrorNone) {
907 | omx_die(r, "Failed to switch off capture on camera video output port 71");
908 | }
909 |
910 | // Return the last full buffer back to the encoder component
911 | ctx.encoder_ppBuffer_out->nFlags = OMX_BUFFERFLAG_EOS;
912 | if((r = OMX_FillThisBuffer(ctx.encoder, ctx.encoder_ppBuffer_out)) != OMX_ErrorNone) {
913 | omx_die(r, "Failed to request filling of the output buffer on encoder output port 201");
914 | }
915 |
916 | // Flush the buffers on each component
917 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandFlush, 73, NULL)) != OMX_ErrorNone) {
918 | omx_die(r, "Failed to flush buffers of camera input port 73");
919 | }
920 | block_until_flushed(&ctx);
921 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandFlush, 70, NULL)) != OMX_ErrorNone) {
922 | omx_die(r, "Failed to flush buffers of camera preview output port 70");
923 | }
924 | block_until_flushed(&ctx);
925 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandFlush, 71, NULL)) != OMX_ErrorNone) {
926 | omx_die(r, "Failed to flush buffers of camera video output port 71");
927 | }
928 | block_until_flushed(&ctx);
929 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandFlush, 200, NULL)) != OMX_ErrorNone) {
930 | omx_die(r, "Failed to flush buffers of encoder input port 200");
931 | }
932 | block_until_flushed(&ctx);
933 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandFlush, 201, NULL)) != OMX_ErrorNone) {
934 | omx_die(r, "Failed to flush buffers of encoder output port 201");
935 | }
936 | block_until_flushed(&ctx);
937 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandFlush, 240, NULL)) != OMX_ErrorNone) {
938 | omx_die(r, "Failed to flush buffers of null sink input port 240");
939 | }
940 | block_until_flushed(&ctx);
941 |
942 | // Disable all the ports
943 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortDisable, 73, NULL)) != OMX_ErrorNone) {
944 | omx_die(r, "Failed to disable camera input port 73");
945 | }
946 | block_until_port_changed(ctx.camera, 73, OMX_FALSE);
947 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortDisable, 70, NULL)) != OMX_ErrorNone) {
948 | omx_die(r, "Failed to disable camera preview output port 70");
949 | }
950 | block_until_port_changed(ctx.camera, 70, OMX_FALSE);
951 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortDisable, 71, NULL)) != OMX_ErrorNone) {
952 | omx_die(r, "Failed to disable camera video output port 71");
953 | }
954 | block_until_port_changed(ctx.camera, 71, OMX_FALSE);
955 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandPortDisable, 200, NULL)) != OMX_ErrorNone) {
956 | omx_die(r, "Failed to disable encoder input port 200");
957 | }
958 | block_until_port_changed(ctx.encoder, 200, OMX_FALSE);
959 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandPortDisable, 201, NULL)) != OMX_ErrorNone) {
960 | omx_die(r, "Failed to disable encoder output port 201");
961 | }
962 | block_until_port_changed(ctx.encoder, 201, OMX_FALSE);
963 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandPortDisable, 240, NULL)) != OMX_ErrorNone) {
964 | omx_die(r, "Failed to disable null sink input port 240");
965 | }
966 | block_until_port_changed(ctx.null_sink, 240, OMX_FALSE);
967 |
968 | // Free all the buffers
969 | if((r = OMX_FreeBuffer(ctx.camera, 73, ctx.camera_ppBuffer_in)) != OMX_ErrorNone) {
970 | omx_die(r, "Failed to free buffer for camera input port 73");
971 | }
972 | if((r = OMX_FreeBuffer(ctx.encoder, 201, ctx.encoder_ppBuffer_out)) != OMX_ErrorNone) {
973 | omx_die(r, "Failed to free buffer for encoder output port 201");
974 | }
975 |
976 | // Transition all the components to idle and then to loaded states
977 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
978 | omx_die(r, "Failed to switch state of the camera component to idle");
979 | }
980 | block_until_state_changed(ctx.camera, OMX_StateIdle);
981 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
982 | omx_die(r, "Failed to switch state of the encoder component to idle");
983 | }
984 | block_until_state_changed(ctx.encoder, OMX_StateIdle);
985 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
986 | omx_die(r, "Failed to switch state of the null sink component to idle");
987 | }
988 | block_until_state_changed(ctx.null_sink, OMX_StateIdle);
989 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateLoaded, NULL)) != OMX_ErrorNone) {
990 | omx_die(r, "Failed to switch state of the camera component to loaded");
991 | }
992 | block_until_state_changed(ctx.camera, OMX_StateLoaded);
993 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandStateSet, OMX_StateLoaded, NULL)) != OMX_ErrorNone) {
994 | omx_die(r, "Failed to switch state of the encoder component to loaded");
995 | }
996 | block_until_state_changed(ctx.encoder, OMX_StateLoaded);
997 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateLoaded, NULL)) != OMX_ErrorNone) {
998 | omx_die(r, "Failed to switch state of the null sink component to loaded");
999 | }
1000 | block_until_state_changed(ctx.null_sink, OMX_StateLoaded);
1001 |
1002 | // Free the component handles
1003 | if((r = OMX_FreeHandle(ctx.camera)) != OMX_ErrorNone) {
1004 | omx_die(r, "Failed to free camera component handle");
1005 | }
1006 | if((r = OMX_FreeHandle(ctx.encoder)) != OMX_ErrorNone) {
1007 | omx_die(r, "Failed to free encoder component handle");
1008 | }
1009 | if((r = OMX_FreeHandle(ctx.null_sink)) != OMX_ErrorNone) {
1010 | omx_die(r, "Failed to free null sink component handle");
1011 | }
1012 |
1013 | // Exit
1014 | fclose(ctx.fd_out);
1015 |
1016 | vcos_semaphore_delete(&ctx.handler_lock);
1017 | if((r = OMX_Deinit()) != OMX_ErrorNone) {
1018 | omx_die(r, "OMX de-initalization failed");
1019 | }
1020 |
1021 | say("Exit!");
1022 |
1023 | return 0;
1024 | }
1025 |
--------------------------------------------------------------------------------
/rpi-camera-playback.c:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright © 2013 Tuomas Jormola
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | *
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *
16 | * Short intro about this program:
17 | *
18 | * `rpi-camera-playback` records video using the RaspiCam module and displays it
19 | * on the Raspberry Pi frame buffer display device, i.e. it should be run on the
20 | * Raspbian console.
21 | *
22 | * $ ./rpi-camera-playback
23 | *
24 | * `rpi-camera-playback` uses `camera`, `video_render` and `null_sink` components.
25 | * `camera` video output port is tunneled to `video_render` input port and
26 | * `camera` preview output port is tunneled to `null_sink` input port.
27 | * `video_render` component uses a display region to show the video on local
28 | * display.
29 | *
30 | * Please see README.mdwn for more detailed description of this
31 | * OpenMAX IL demos for Raspberry Pi bundle.
32 | *
33 | */
34 |
35 | #include
36 | #include
37 | #include
38 | #include
39 | #include
40 | #include
41 |
42 | #include
43 |
44 | #include
45 | #include
46 |
47 | #include
48 | #include
49 | #include
50 | #include
51 |
52 | // Hard coded parameters
53 | #define VIDEO_FRAMERATE 25
54 | #define VIDEO_BITRATE 10000000
55 | #define CAM_DEVICE_NUMBER 0
56 | #define CAM_SHARPNESS 0 // -100 .. 100
57 | #define CAM_CONTRAST 0 // -100 .. 100
58 | #define CAM_BRIGHTNESS 50 // 0 .. 100
59 | #define CAM_SATURATION 0 // -100 .. 100
60 | #define CAM_EXPOSURE_VALUE_COMPENSTAION 0
61 | #define CAM_EXPOSURE_ISO_SENSITIVITY 100
62 | #define CAM_EXPOSURE_AUTO_SENSITIVITY OMX_FALSE
63 | #define CAM_FRAME_STABILISATION OMX_TRUE
64 | #define CAM_WHITE_BALANCE_CONTROL OMX_WhiteBalControlAuto // OMX_WHITEBALCONTROLTYPE
65 | #define CAM_IMAGE_FILTER OMX_ImageFilterNoise // OMX_IMAGEFILTERTYPE
66 | #define CAM_FLIP_HORIZONTAL OMX_FALSE
67 | #define CAM_FLIP_VERTICAL OMX_FALSE
68 | #define DISPLAY_DEVICE 0
69 |
70 | // Dunno where this is originally stolen from...
71 | #define OMX_INIT_STRUCTURE(a) \
72 | memset(&(a), 0, sizeof(a)); \
73 | (a).nSize = sizeof(a); \
74 | (a).nVersion.nVersion = OMX_VERSION; \
75 | (a).nVersion.s.nVersionMajor = OMX_VERSION_MAJOR; \
76 | (a).nVersion.s.nVersionMinor = OMX_VERSION_MINOR; \
77 | (a).nVersion.s.nRevision = OMX_VERSION_REVISION; \
78 | (a).nVersion.s.nStep = OMX_VERSION_STEP
79 |
80 | // Global variable used by the signal handler and capture/encoding loop
81 | static int want_quit = 0;
82 |
83 | // Our application context passed around
84 | // the main routine and callback handlers
85 | typedef struct {
86 | OMX_HANDLETYPE camera;
87 | OMX_BUFFERHEADERTYPE *camera_ppBuffer_in;
88 | int camera_ready;
89 | OMX_HANDLETYPE render;
90 | OMX_HANDLETYPE null_sink;
91 | int flushed;
92 | VCOS_SEMAPHORE_T handler_lock;
93 | } appctx;
94 |
95 | // Ugly, stupid utility functions
96 | static void say(const char* message, ...) {
97 | va_list args;
98 | char str[1024];
99 | memset(str, 0, sizeof(str));
100 | va_start(args, message);
101 | vsnprintf(str, sizeof(str) - 1, message, args);
102 | va_end(args);
103 | size_t str_len = strnlen(str, sizeof(str));
104 | if(str[str_len - 1] != '\n') {
105 | str[str_len] = '\n';
106 | }
107 | fprintf(stderr, str);
108 | }
109 |
110 | static void die(const char* message, ...) {
111 | va_list args;
112 | char str[1024];
113 | memset(str, 0, sizeof(str));
114 | va_start(args, message);
115 | vsnprintf(str, sizeof(str), message, args);
116 | va_end(args);
117 | say(str);
118 | exit(1);
119 | }
120 |
121 | static void omx_die(OMX_ERRORTYPE error, const char* message, ...) {
122 | va_list args;
123 | char str[1024];
124 | char *e;
125 | memset(str, 0, sizeof(str));
126 | va_start(args, message);
127 | vsnprintf(str, sizeof(str), message, args);
128 | va_end(args);
129 | switch(error) {
130 | case OMX_ErrorNone: e = "no error"; break;
131 | case OMX_ErrorBadParameter: e = "bad parameter"; break;
132 | case OMX_ErrorIncorrectStateOperation: e = "invalid state while trying to perform command"; break;
133 | case OMX_ErrorIncorrectStateTransition: e = "unallowed state transition"; break;
134 | case OMX_ErrorInsufficientResources: e = "insufficient resource"; break;
135 | case OMX_ErrorBadPortIndex: e = "bad port index, i.e. incorrect port"; break;
136 | case OMX_ErrorHardware: e = "hardware error"; break;
137 | /* That's all I've encountered during hacking so let's not bother with the rest... */
138 | default: e = "(no description)";
139 | }
140 | die("OMX error: %s: 0x%08x %s", str, error, e);
141 | }
142 |
143 | static void dump_event(OMX_HANDLETYPE hComponent, OMX_EVENTTYPE eEvent, OMX_U32 nData1, OMX_U32 nData2) {
144 | char *e;
145 | switch(eEvent) {
146 | case OMX_EventCmdComplete: e = "command complete"; break;
147 | case OMX_EventError: e = "error"; break;
148 | case OMX_EventParamOrConfigChanged: e = "parameter or configuration changed"; break;
149 | case OMX_EventPortSettingsChanged: e = "port settings changed"; break;
150 | /* That's all I've encountered during hacking so let's not bother with the rest... */
151 | default:
152 | e = "(no description)";
153 | }
154 | say("Received event 0x%08x %s, hComponent:0x%08x, nData1:0x%08x, nData2:0x%08x",
155 | eEvent, e, hComponent, nData1, nData2);
156 | }
157 |
158 | static const char* dump_compression_format(OMX_VIDEO_CODINGTYPE c) {
159 | char *f;
160 | switch(c) {
161 | case OMX_VIDEO_CodingUnused: return "not used";
162 | case OMX_VIDEO_CodingAutoDetect: return "autodetect";
163 | case OMX_VIDEO_CodingMPEG2: return "MPEG2";
164 | case OMX_VIDEO_CodingH263: return "H.263";
165 | case OMX_VIDEO_CodingMPEG4: return "MPEG4";
166 | case OMX_VIDEO_CodingWMV: return "Windows Media Video";
167 | case OMX_VIDEO_CodingRV: return "RealVideo";
168 | case OMX_VIDEO_CodingAVC: return "H.264/AVC";
169 | case OMX_VIDEO_CodingMJPEG: return "Motion JPEG";
170 | case OMX_VIDEO_CodingVP6: return "VP6";
171 | case OMX_VIDEO_CodingVP7: return "VP7";
172 | case OMX_VIDEO_CodingVP8: return "VP8";
173 | case OMX_VIDEO_CodingYUV: return "Raw YUV video";
174 | case OMX_VIDEO_CodingSorenson: return "Sorenson";
175 | case OMX_VIDEO_CodingTheora: return "OGG Theora";
176 | case OMX_VIDEO_CodingMVC: return "H.264/MVC";
177 |
178 | default:
179 | f = calloc(23, sizeof(char));
180 | if(f == NULL) {
181 | die("Failed to allocate memory");
182 | }
183 | snprintf(f, 23 * sizeof(char) - 1, "format type 0x%08x", c);
184 | return f;
185 | }
186 | }
187 | static const char* dump_color_format(OMX_COLOR_FORMATTYPE c) {
188 | char *f;
189 | switch(c) {
190 | case OMX_COLOR_FormatUnused: return "OMX_COLOR_FormatUnused: not used";
191 | case OMX_COLOR_FormatMonochrome: return "OMX_COLOR_FormatMonochrome";
192 | case OMX_COLOR_Format8bitRGB332: return "OMX_COLOR_Format8bitRGB332";
193 | case OMX_COLOR_Format12bitRGB444: return "OMX_COLOR_Format12bitRGB444";
194 | case OMX_COLOR_Format16bitARGB4444: return "OMX_COLOR_Format16bitARGB4444";
195 | case OMX_COLOR_Format16bitARGB1555: return "OMX_COLOR_Format16bitARGB1555";
196 | case OMX_COLOR_Format16bitRGB565: return "OMX_COLOR_Format16bitRGB565";
197 | case OMX_COLOR_Format16bitBGR565: return "OMX_COLOR_Format16bitBGR565";
198 | case OMX_COLOR_Format18bitRGB666: return "OMX_COLOR_Format18bitRGB666";
199 | case OMX_COLOR_Format18bitARGB1665: return "OMX_COLOR_Format18bitARGB1665";
200 | case OMX_COLOR_Format19bitARGB1666: return "OMX_COLOR_Format19bitARGB1666";
201 | case OMX_COLOR_Format24bitRGB888: return "OMX_COLOR_Format24bitRGB888";
202 | case OMX_COLOR_Format24bitBGR888: return "OMX_COLOR_Format24bitBGR888";
203 | case OMX_COLOR_Format24bitARGB1887: return "OMX_COLOR_Format24bitARGB1887";
204 | case OMX_COLOR_Format25bitARGB1888: return "OMX_COLOR_Format25bitARGB1888";
205 | case OMX_COLOR_Format32bitBGRA8888: return "OMX_COLOR_Format32bitBGRA8888";
206 | case OMX_COLOR_Format32bitARGB8888: return "OMX_COLOR_Format32bitARGB8888";
207 | case OMX_COLOR_FormatYUV411Planar: return "OMX_COLOR_FormatYUV411Planar";
208 | case OMX_COLOR_FormatYUV411PackedPlanar: return "OMX_COLOR_FormatYUV411PackedPlanar: Planes fragmented when a frame is split in multiple buffers";
209 | case OMX_COLOR_FormatYUV420Planar: return "OMX_COLOR_FormatYUV420Planar: Planar YUV, 4:2:0 (I420)";
210 | case OMX_COLOR_FormatYUV420PackedPlanar: return "OMX_COLOR_FormatYUV420PackedPlanar: Planar YUV, 4:2:0 (I420), planes fragmented when a frame is split in multiple buffers";
211 | case OMX_COLOR_FormatYUV420SemiPlanar: return "OMX_COLOR_FormatYUV420SemiPlanar, Planar YUV, 4:2:0 (NV12), U and V planes interleaved with first U value";
212 | case OMX_COLOR_FormatYUV422Planar: return "OMX_COLOR_FormatYUV422Planar";
213 | case OMX_COLOR_FormatYUV422PackedPlanar: return "OMX_COLOR_FormatYUV422PackedPlanar: Planes fragmented when a frame is split in multiple buffers";
214 | case OMX_COLOR_FormatYUV422SemiPlanar: return "OMX_COLOR_FormatYUV422SemiPlanar";
215 | case OMX_COLOR_FormatYCbYCr: return "OMX_COLOR_FormatYCbYCr";
216 | case OMX_COLOR_FormatYCrYCb: return "OMX_COLOR_FormatYCrYCb";
217 | case OMX_COLOR_FormatCbYCrY: return "OMX_COLOR_FormatCbYCrY";
218 | case OMX_COLOR_FormatCrYCbY: return "OMX_COLOR_FormatCrYCbY";
219 | case OMX_COLOR_FormatYUV444Interleaved: return "OMX_COLOR_FormatYUV444Interleaved";
220 | case OMX_COLOR_FormatRawBayer8bit: return "OMX_COLOR_FormatRawBayer8bit";
221 | case OMX_COLOR_FormatRawBayer10bit: return "OMX_COLOR_FormatRawBayer10bit";
222 | case OMX_COLOR_FormatRawBayer8bitcompressed: return "OMX_COLOR_FormatRawBayer8bitcompressed";
223 | case OMX_COLOR_FormatL2: return "OMX_COLOR_FormatL2";
224 | case OMX_COLOR_FormatL4: return "OMX_COLOR_FormatL4";
225 | case OMX_COLOR_FormatL8: return "OMX_COLOR_FormatL8";
226 | case OMX_COLOR_FormatL16: return "OMX_COLOR_FormatL16";
227 | case OMX_COLOR_FormatL24: return "OMX_COLOR_FormatL24";
228 | case OMX_COLOR_FormatL32: return "OMX_COLOR_FormatL32";
229 | case OMX_COLOR_FormatYUV420PackedSemiPlanar: return "OMX_COLOR_FormatYUV420PackedSemiPlanar: Planar YUV, 4:2:0 (NV12), planes fragmented when a frame is split in multiple buffers, U and V planes interleaved with first U value";
230 | case OMX_COLOR_FormatYUV422PackedSemiPlanar: return "OMX_COLOR_FormatYUV422PackedSemiPlanar: Planes fragmented when a frame is split in multiple buffers";
231 | case OMX_COLOR_Format18BitBGR666: return "OMX_COLOR_Format18BitBGR666";
232 | case OMX_COLOR_Format24BitARGB6666: return "OMX_COLOR_Format24BitARGB6666";
233 | case OMX_COLOR_Format24BitABGR6666: return "OMX_COLOR_Format24BitABGR6666";
234 | case OMX_COLOR_Format32bitABGR8888: return "OMX_COLOR_Format32bitABGR8888";
235 | case OMX_COLOR_Format8bitPalette: return "OMX_COLOR_Format8bitPalette";
236 | case OMX_COLOR_FormatYUVUV128: return "OMX_COLOR_FormatYUVUV128";
237 | case OMX_COLOR_FormatRawBayer12bit: return "OMX_COLOR_FormatRawBayer12bit";
238 | case OMX_COLOR_FormatBRCMEGL: return "OMX_COLOR_FormatBRCMEGL";
239 | case OMX_COLOR_FormatBRCMOpaque: return "OMX_COLOR_FormatBRCMOpaque";
240 | case OMX_COLOR_FormatYVU420PackedPlanar: return "OMX_COLOR_FormatYVU420PackedPlanar";
241 | case OMX_COLOR_FormatYVU420PackedSemiPlanar: return "OMX_COLOR_FormatYVU420PackedSemiPlanar";
242 | default:
243 | f = calloc(23, sizeof(char));
244 | if(f == NULL) {
245 | die("Failed to allocate memory");
246 | }
247 | snprintf(f, 23 * sizeof(char) - 1, "format type 0x%08x", c);
248 | return f;
249 | }
250 | }
251 |
252 | static void dump_portdef(OMX_PARAM_PORTDEFINITIONTYPE* portdef) {
253 | say("Port %d is %s, %s, buffers wants:%d needs:%d, size:%d, pop:%d, aligned:%d",
254 | portdef->nPortIndex,
255 | (portdef->eDir == OMX_DirInput ? "input" : "output"),
256 | (portdef->bEnabled == OMX_TRUE ? "enabled" : "disabled"),
257 | portdef->nBufferCountActual,
258 | portdef->nBufferCountMin,
259 | portdef->nBufferSize,
260 | portdef->bPopulated,
261 | portdef->nBufferAlignment);
262 |
263 | OMX_VIDEO_PORTDEFINITIONTYPE *viddef = &portdef->format.video;
264 | OMX_IMAGE_PORTDEFINITIONTYPE *imgdef = &portdef->format.image;
265 | switch(portdef->eDomain) {
266 | case OMX_PortDomainVideo:
267 | say("Video type:\n"
268 | "\tWidth:\t\t%d\n"
269 | "\tHeight:\t\t%d\n"
270 | "\tStride:\t\t%d\n"
271 | "\tSliceHeight:\t%d\n"
272 | "\tBitrate:\t%d\n"
273 | "\tFramerate:\t%.02f\n"
274 | "\tError hiding:\t%s\n"
275 | "\tCodec:\t\t%s\n"
276 | "\tColor:\t\t%s\n",
277 | viddef->nFrameWidth,
278 | viddef->nFrameHeight,
279 | viddef->nStride,
280 | viddef->nSliceHeight,
281 | viddef->nBitrate,
282 | ((float)viddef->xFramerate / (float)65536),
283 | (viddef->bFlagErrorConcealment == OMX_TRUE ? "yes" : "no"),
284 | dump_compression_format(viddef->eCompressionFormat),
285 | dump_color_format(viddef->eColorFormat));
286 | break;
287 | case OMX_PortDomainImage:
288 | say("Image type:\n"
289 | "\tWidth:\t\t%d\n"
290 | "\tHeight:\t\t%d\n"
291 | "\tStride:\t\t%d\n"
292 | "\tSliceHeight:\t%d\n"
293 | "\tError hiding:\t%s\n"
294 | "\tCodec:\t\t%s\n"
295 | "\tColor:\t\t%s\n",
296 | imgdef->nFrameWidth,
297 | imgdef->nFrameHeight,
298 | imgdef->nStride,
299 | imgdef->nSliceHeight,
300 | (imgdef->bFlagErrorConcealment == OMX_TRUE ? "yes" : "no"),
301 | dump_compression_format(imgdef->eCompressionFormat),
302 | dump_color_format(imgdef->eColorFormat));
303 | break;
304 | default:
305 | break;
306 | }
307 | }
308 |
309 | static void dump_port(OMX_HANDLETYPE hComponent, OMX_U32 nPortIndex, OMX_BOOL dumpformats) {
310 | OMX_ERRORTYPE r;
311 | OMX_PARAM_PORTDEFINITIONTYPE portdef;
312 | OMX_INIT_STRUCTURE(portdef);
313 | portdef.nPortIndex = nPortIndex;
314 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamPortDefinition, &portdef)) != OMX_ErrorNone) {
315 | omx_die(r, "Failed to get port definition for port %d", nPortIndex);
316 | }
317 | dump_portdef(&portdef);
318 | if(dumpformats) {
319 | OMX_VIDEO_PARAM_PORTFORMATTYPE portformat;
320 | OMX_INIT_STRUCTURE(portformat);
321 | portformat.nPortIndex = nPortIndex;
322 | portformat.nIndex = 0;
323 | r = OMX_ErrorNone;
324 | say("Port %d supports these video formats:", nPortIndex);
325 | while(r == OMX_ErrorNone) {
326 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamVideoPortFormat, &portformat)) == OMX_ErrorNone) {
327 | say("\t%s, compression: %s", dump_color_format(portformat.eColorFormat), dump_compression_format(portformat.eCompressionFormat));
328 | portformat.nIndex++;
329 | }
330 | }
331 | }
332 | }
333 |
334 | // Some busy loops to verify we're running in order
335 | static void block_until_state_changed(OMX_HANDLETYPE hComponent, OMX_STATETYPE wanted_eState) {
336 | OMX_STATETYPE eState;
337 | int i = 0;
338 | while(i++ == 0 || eState != wanted_eState) {
339 | OMX_GetState(hComponent, &eState);
340 | if(eState != wanted_eState) {
341 | usleep(10000);
342 | }
343 | }
344 | }
345 |
346 | static void block_until_port_changed(OMX_HANDLETYPE hComponent, OMX_U32 nPortIndex, OMX_BOOL bEnabled) {
347 | OMX_ERRORTYPE r;
348 | OMX_PARAM_PORTDEFINITIONTYPE portdef;
349 | OMX_INIT_STRUCTURE(portdef);
350 | portdef.nPortIndex = nPortIndex;
351 | OMX_U32 i = 0;
352 | while(i++ == 0 || portdef.bEnabled != bEnabled) {
353 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamPortDefinition, &portdef)) != OMX_ErrorNone) {
354 | omx_die(r, "Failed to get port definition");
355 | }
356 | if(portdef.bEnabled != bEnabled) {
357 | usleep(10000);
358 | }
359 | }
360 | }
361 |
362 | static void block_until_flushed(appctx *ctx) {
363 | int quit;
364 | while(!quit) {
365 | vcos_semaphore_wait(&ctx->handler_lock);
366 | if(ctx->flushed) {
367 | ctx->flushed = 0;
368 | quit = 1;
369 | }
370 | vcos_semaphore_post(&ctx->handler_lock);
371 | if(!quit) {
372 | usleep(10000);
373 | }
374 | }
375 | }
376 |
377 | static void init_component_handle(
378 | const char *name,
379 | OMX_HANDLETYPE* hComponent,
380 | OMX_PTR pAppData,
381 | OMX_CALLBACKTYPE* callbacks) {
382 | OMX_ERRORTYPE r;
383 | char fullname[32];
384 |
385 | // Get handle
386 | memset(fullname, 0, sizeof(fullname));
387 | strcat(fullname, "OMX.broadcom.");
388 | strncat(fullname, name, strlen(fullname) - 1);
389 | say("Initializing component %s", fullname);
390 | if((r = OMX_GetHandle(hComponent, fullname, pAppData, callbacks)) != OMX_ErrorNone) {
391 | omx_die(r, "Failed to get handle for component %s", fullname);
392 | }
393 |
394 | // Disable ports
395 | OMX_INDEXTYPE types[] = {
396 | OMX_IndexParamAudioInit,
397 | OMX_IndexParamVideoInit,
398 | OMX_IndexParamImageInit,
399 | OMX_IndexParamOtherInit
400 | };
401 | OMX_PORT_PARAM_TYPE ports;
402 | OMX_INIT_STRUCTURE(ports);
403 | OMX_GetParameter(*hComponent, OMX_IndexParamVideoInit, &ports);
404 |
405 | int i;
406 | for(i = 0; i < 4; i++) {
407 | if(OMX_GetParameter(*hComponent, types[i], &ports) == OMX_ErrorNone) {
408 | OMX_U32 nPortIndex;
409 | for(nPortIndex = ports.nStartPortNumber; nPortIndex < ports.nStartPortNumber + ports.nPorts; nPortIndex++) {
410 | say("Disabling port %d of component %s", nPortIndex, fullname);
411 | if((r = OMX_SendCommand(*hComponent, OMX_CommandPortDisable, nPortIndex, NULL)) != OMX_ErrorNone) {
412 | omx_die(r, "Failed to disable port %d of component %s", nPortIndex, fullname);
413 | }
414 | block_until_port_changed(*hComponent, nPortIndex, OMX_FALSE);
415 | }
416 | }
417 | }
418 | }
419 |
420 | // Global signal handler for trapping SIGINT, SIGTERM, and SIGQUIT
421 | static void signal_handler(int signal) {
422 | want_quit = 1;
423 | }
424 |
425 | // OMX calls this handler for all the events it emits
426 | static OMX_ERRORTYPE event_handler(
427 | OMX_HANDLETYPE hComponent,
428 | OMX_PTR pAppData,
429 | OMX_EVENTTYPE eEvent,
430 | OMX_U32 nData1,
431 | OMX_U32 nData2,
432 | OMX_PTR pEventData) {
433 |
434 | dump_event(hComponent, eEvent, nData1, nData2);
435 |
436 | appctx *ctx = (appctx *)pAppData;
437 |
438 | switch(eEvent) {
439 | case OMX_EventCmdComplete:
440 | vcos_semaphore_wait(&ctx->handler_lock);
441 | if(nData1 == OMX_CommandFlush) {
442 | ctx->flushed = 1;
443 | }
444 | vcos_semaphore_post(&ctx->handler_lock);
445 | break;
446 | case OMX_EventParamOrConfigChanged:
447 | vcos_semaphore_wait(&ctx->handler_lock);
448 | if(nData2 == OMX_IndexParamCameraDeviceNumber) {
449 | ctx->camera_ready = 1;
450 | }
451 | vcos_semaphore_post(&ctx->handler_lock);
452 | break;
453 | case OMX_EventError:
454 | omx_die(nData1, "error event received");
455 | break;
456 | default:
457 | break;
458 | }
459 |
460 | return OMX_ErrorNone;
461 | }
462 |
463 | int main(int argc, char **argv) {
464 | bcm_host_init();
465 |
466 | OMX_ERRORTYPE r;
467 |
468 | if((r = OMX_Init()) != OMX_ErrorNone) {
469 | omx_die(r, "OMX initalization failed");
470 | }
471 |
472 | // Init context
473 | appctx ctx;
474 | memset(&ctx, 0, sizeof(ctx));
475 | if(vcos_semaphore_create(&ctx.handler_lock, "handler_lock", 1) != VCOS_SUCCESS) {
476 | die("Failed to create handler lock semaphore");
477 | }
478 |
479 | // Init component handles
480 | OMX_CALLBACKTYPE callbacks;
481 | memset(&ctx, 0, sizeof(callbacks));
482 | callbacks.EventHandler = event_handler;
483 |
484 | init_component_handle("camera", &ctx.camera , &ctx, &callbacks);
485 | init_component_handle("video_render", &ctx.render, &ctx, &callbacks);
486 | init_component_handle("null_sink", &ctx.null_sink, &ctx, &callbacks);
487 |
488 | OMX_U32 screen_width = 0, screen_height = 0;
489 | if(graphics_get_display_size(DISPLAY_DEVICE, &screen_width, &screen_height) < 0) {
490 | die("Failed to get display size");
491 | }
492 |
493 | say("Configuring camera...");
494 |
495 | say("Default port definition for camera input port 73");
496 | dump_port(ctx.camera, 73, OMX_TRUE);
497 | say("Default port definition for camera preview output port 70");
498 | dump_port(ctx.camera, 70, OMX_TRUE);
499 | say("Default port definition for camera video output port 71");
500 | dump_port(ctx.camera, 71, OMX_TRUE);
501 |
502 | // Request a callback to be made when OMX_IndexParamCameraDeviceNumber is
503 | // changed signaling that the camera device is ready for use.
504 | OMX_CONFIG_REQUESTCALLBACKTYPE cbtype;
505 | OMX_INIT_STRUCTURE(cbtype);
506 | cbtype.nPortIndex = OMX_ALL;
507 | cbtype.nIndex = OMX_IndexParamCameraDeviceNumber;
508 | cbtype.bEnable = OMX_TRUE;
509 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigRequestCallback, &cbtype)) != OMX_ErrorNone) {
510 | omx_die(r, "Failed to request camera device number parameter change callback for camera");
511 | }
512 | // Set device number, this triggers the callback configured just above
513 | OMX_PARAM_U32TYPE device;
514 | OMX_INIT_STRUCTURE(device);
515 | device.nPortIndex = OMX_ALL;
516 | device.nU32 = CAM_DEVICE_NUMBER;
517 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexParamCameraDeviceNumber, &device)) != OMX_ErrorNone) {
518 | omx_die(r, "Failed to set camera parameter device number");
519 | }
520 | // Configure video format emitted by camera preview output port
521 | OMX_PARAM_PORTDEFINITIONTYPE camera_portdef;
522 | OMX_INIT_STRUCTURE(camera_portdef);
523 | camera_portdef.nPortIndex = 70;
524 | if((r = OMX_GetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
525 | omx_die(r, "Failed to get port definition for camera preview output port 70");
526 | }
527 | camera_portdef.format.video.nFrameWidth = screen_width / 2;
528 | camera_portdef.format.video.nFrameHeight = screen_height / 2;
529 | camera_portdef.format.video.xFramerate = VIDEO_FRAMERATE << 16;
530 | // Stolen from gstomxvideodec.c of gst-omx
531 | camera_portdef.format.video.nStride = (camera_portdef.format.video.nFrameWidth + camera_portdef.nBufferAlignment - 1) & (~(camera_portdef.nBufferAlignment - 1));
532 | camera_portdef.format.video.eColorFormat = OMX_COLOR_FormatYUV420PackedPlanar;
533 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
534 | omx_die(r, "Failed to set port definition for camera preview output port 70");
535 | }
536 | // Configure video format emitted by camera video output port
537 | // Use configuration from camera preview output as basis for
538 | // camera video output configuration
539 | OMX_INIT_STRUCTURE(camera_portdef);
540 | camera_portdef.nPortIndex = 70;
541 | if((r = OMX_GetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
542 | omx_die(r, "Failed to get port definition for camera preview output port 70");
543 | }
544 | camera_portdef.nPortIndex = 71;
545 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
546 | omx_die(r, "Failed to set port definition for camera video output port 71");
547 | }
548 | // Configure frame rate
549 | OMX_CONFIG_FRAMERATETYPE framerate;
550 | OMX_INIT_STRUCTURE(framerate);
551 | framerate.nPortIndex = 70;
552 | framerate.xEncodeFramerate = camera_portdef.format.video.xFramerate;
553 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigVideoFramerate, &framerate)) != OMX_ErrorNone) {
554 | omx_die(r, "Failed to set framerate configuration for camera preview output port 70");
555 | }
556 | framerate.nPortIndex = 71;
557 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigVideoFramerate, &framerate)) != OMX_ErrorNone) {
558 | omx_die(r, "Failed to set framerate configuration for camera video output port 71");
559 | }
560 | // Configure sharpness
561 | OMX_CONFIG_SHARPNESSTYPE sharpness;
562 | OMX_INIT_STRUCTURE(sharpness);
563 | sharpness.nPortIndex = OMX_ALL;
564 | sharpness.nSharpness = CAM_SHARPNESS;
565 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonSharpness, &sharpness)) != OMX_ErrorNone) {
566 | omx_die(r, "Failed to set camera sharpness configuration");
567 | }
568 | // Configure contrast
569 | OMX_CONFIG_CONTRASTTYPE contrast;
570 | OMX_INIT_STRUCTURE(contrast);
571 | contrast.nPortIndex = OMX_ALL;
572 | contrast.nContrast = CAM_CONTRAST;
573 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonContrast, &contrast)) != OMX_ErrorNone) {
574 | omx_die(r, "Failed to set camera contrast configuration");
575 | }
576 | // Configure saturation
577 | OMX_CONFIG_SATURATIONTYPE saturation;
578 | OMX_INIT_STRUCTURE(saturation);
579 | saturation.nPortIndex = OMX_ALL;
580 | saturation.nSaturation = CAM_SATURATION;
581 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonSaturation, &saturation)) != OMX_ErrorNone) {
582 | omx_die(r, "Failed to set camera saturation configuration");
583 | }
584 | // Configure brightness
585 | OMX_CONFIG_BRIGHTNESSTYPE brightness;
586 | OMX_INIT_STRUCTURE(brightness);
587 | brightness.nPortIndex = OMX_ALL;
588 | brightness.nBrightness = CAM_BRIGHTNESS;
589 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonBrightness, &brightness)) != OMX_ErrorNone) {
590 | omx_die(r, "Failed to set camera brightness configuration");
591 | }
592 | // Configure exposure value
593 | OMX_CONFIG_EXPOSUREVALUETYPE exposure_value;
594 | OMX_INIT_STRUCTURE(exposure_value);
595 | exposure_value.nPortIndex = OMX_ALL;
596 | exposure_value.xEVCompensation = CAM_EXPOSURE_VALUE_COMPENSTAION;
597 | exposure_value.bAutoSensitivity = CAM_EXPOSURE_AUTO_SENSITIVITY;
598 | exposure_value.nSensitivity = CAM_EXPOSURE_ISO_SENSITIVITY;
599 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonExposureValue, &exposure_value)) != OMX_ErrorNone) {
600 | omx_die(r, "Failed to set camera exposure value configuration");
601 | }
602 | // Configure frame frame stabilisation
603 | OMX_CONFIG_FRAMESTABTYPE frame_stabilisation_control;
604 | OMX_INIT_STRUCTURE(frame_stabilisation_control);
605 | frame_stabilisation_control.nPortIndex = OMX_ALL;
606 | frame_stabilisation_control.bStab = CAM_FRAME_STABILISATION;
607 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonFrameStabilisation, &frame_stabilisation_control)) != OMX_ErrorNone) {
608 | omx_die(r, "Failed to set camera frame frame stabilisation control configuration");
609 | }
610 | // Configure frame white balance control
611 | OMX_CONFIG_WHITEBALCONTROLTYPE white_balance_control;
612 | OMX_INIT_STRUCTURE(white_balance_control);
613 | white_balance_control.nPortIndex = OMX_ALL;
614 | white_balance_control.eWhiteBalControl = CAM_WHITE_BALANCE_CONTROL;
615 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonWhiteBalance, &white_balance_control)) != OMX_ErrorNone) {
616 | omx_die(r, "Failed to set camera frame white balance control configuration");
617 | }
618 | // Configure image filter
619 | OMX_CONFIG_IMAGEFILTERTYPE image_filter;
620 | OMX_INIT_STRUCTURE(image_filter);
621 | image_filter.nPortIndex = OMX_ALL;
622 | image_filter.eImageFilter = CAM_IMAGE_FILTER;
623 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonImageFilter, &image_filter)) != OMX_ErrorNone) {
624 | omx_die(r, "Failed to set camera image filter configuration");
625 | }
626 | // Configure mirror
627 | OMX_MIRRORTYPE eMirror = OMX_MirrorNone;
628 | if(CAM_FLIP_HORIZONTAL && !CAM_FLIP_VERTICAL) {
629 | eMirror = OMX_MirrorHorizontal;
630 | } else if(!CAM_FLIP_HORIZONTAL && CAM_FLIP_VERTICAL) {
631 | eMirror = OMX_MirrorVertical;
632 | } else if(CAM_FLIP_HORIZONTAL && CAM_FLIP_VERTICAL) {
633 | eMirror = OMX_MirrorBoth;
634 | }
635 | OMX_CONFIG_MIRRORTYPE mirror;
636 | OMX_INIT_STRUCTURE(mirror);
637 | mirror.nPortIndex = 71;
638 | mirror.eMirror = eMirror;
639 | if((r = OMX_SetConfig(ctx.camera, OMX_IndexConfigCommonMirror, &mirror)) != OMX_ErrorNone) {
640 | omx_die(r, "Failed to set mirror configuration for camera video output port 71");
641 | }
642 |
643 | // Ensure camera is ready
644 | while(!ctx.camera_ready) {
645 | usleep(10000);
646 | }
647 |
648 | say("Configuring render...");
649 |
650 | say("Default port definition for render input port 90");
651 | dump_port(ctx.render, 90, OMX_TRUE);
652 |
653 | // Render input port definition is done automatically upon tunneling
654 |
655 | // Configure display region
656 | OMX_CONFIG_DISPLAYREGIONTYPE display_region;
657 | OMX_INIT_STRUCTURE(display_region);
658 | display_region.nPortIndex = 90;
659 | display_region.set = OMX_DISPLAY_SET_NUM | OMX_DISPLAY_SET_FULLSCREEN | OMX_DISPLAY_SET_MODE | OMX_DISPLAY_SET_DEST_RECT;
660 | display_region.num = DISPLAY_DEVICE;
661 | display_region.fullscreen = OMX_FALSE;
662 | display_region.mode = OMX_DISPLAY_MODE_FILL;
663 | display_region.dest_rect.width = camera_portdef.format.video.nFrameWidth;
664 | display_region.dest_rect.height = camera_portdef.format.video.nFrameHeight;
665 | display_region.dest_rect.x_offset = display_region.dest_rect.width / 2;
666 | display_region.dest_rect.y_offset = display_region.dest_rect.height / 2;
667 | if((r = OMX_SetConfig(ctx.render, OMX_IndexConfigDisplayRegion, &display_region)) != OMX_ErrorNone) {
668 | omx_die(r, "Failed to set display region for render output port 90");
669 | }
670 |
671 | say("Configuring null sink...");
672 |
673 | say("Default port definition for null sink input port 240");
674 | dump_port(ctx.null_sink, 240, OMX_TRUE);
675 |
676 | // Null sink input port definition is done automatically upon tunneling
677 |
678 | // Tunnel camera preview output port and null sink input port
679 | say("Setting up tunnel from camera preview output port 70 to null sink input port 240...");
680 | if((r = OMX_SetupTunnel(ctx.camera, 70, ctx.null_sink, 240)) != OMX_ErrorNone) {
681 | omx_die(r, "Failed to setup tunnel between camera preview output port 70 and null sink input port 240");
682 | }
683 |
684 | // Tunnel camera video output port and render input port
685 | say("Setting up tunnel from camera video output port 71 to render input port 90...");
686 | if((r = OMX_SetupTunnel(ctx.camera, 71, ctx.render, 90)) != OMX_ErrorNone) {
687 | omx_die(r, "Failed to setup tunnel between camera video output port 71 and render input port 90");
688 | }
689 |
690 | // Switch components to idle state
691 | say("Switching state of the camera component to idle...");
692 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
693 | omx_die(r, "Failed to switch state of the camera component to idle");
694 | }
695 | block_until_state_changed(ctx.camera, OMX_StateIdle);
696 | say("Switching state of the render component to idle...");
697 | if((r = OMX_SendCommand(ctx.render, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
698 | omx_die(r, "Failed to switch state of the render component to idle");
699 | }
700 | block_until_state_changed(ctx.render, OMX_StateIdle);
701 | say("Switching state of the null sink component to idle...");
702 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
703 | omx_die(r, "Failed to switch state of the null sink component to idle");
704 | }
705 | block_until_state_changed(ctx.null_sink, OMX_StateIdle);
706 |
707 | // Enable ports
708 | say("Enabling ports...");
709 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortEnable, 73, NULL)) != OMX_ErrorNone) {
710 | omx_die(r, "Failed to enable camera input port 73");
711 | }
712 | block_until_port_changed(ctx.camera, 73, OMX_TRUE);
713 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortEnable, 70, NULL)) != OMX_ErrorNone) {
714 | omx_die(r, "Failed to enable camera preview output port 70");
715 | }
716 | block_until_port_changed(ctx.camera, 70, OMX_TRUE);
717 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortEnable, 71, NULL)) != OMX_ErrorNone) {
718 | omx_die(r, "Failed to enable camera video output port 71");
719 | }
720 | block_until_port_changed(ctx.camera, 71, OMX_TRUE);
721 | if((r = OMX_SendCommand(ctx.render, OMX_CommandPortEnable, 90, NULL)) != OMX_ErrorNone) {
722 | omx_die(r, "Failed to enable render input port 90");
723 | }
724 | block_until_port_changed(ctx.render, 90, OMX_TRUE);
725 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandPortEnable, 240, NULL)) != OMX_ErrorNone) {
726 | omx_die(r, "Failed to enable null sink input port 240");
727 | }
728 | block_until_port_changed(ctx.null_sink, 240, OMX_TRUE);
729 |
730 | // Allocate camera input buffer, buffers for tunneled
731 | // ports are allocated internally by OMX
732 | say("Allocating buffers...");
733 | OMX_INIT_STRUCTURE(camera_portdef);
734 | camera_portdef.nPortIndex = 73;
735 | if((r = OMX_GetParameter(ctx.camera, OMX_IndexParamPortDefinition, &camera_portdef)) != OMX_ErrorNone) {
736 | omx_die(r, "Failed to get port definition for camera input port 73");
737 | }
738 | if((r = OMX_AllocateBuffer(ctx.camera, &ctx.camera_ppBuffer_in, 73, NULL, camera_portdef.nBufferSize)) != OMX_ErrorNone) {
739 | omx_die(r, "Failed to allocate buffer for camera input port 73");
740 | }
741 |
742 | // Switch state of the components prior to starting
743 | // the video capture and encoding loop
744 | say("Switching state of the camera component to executing...");
745 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateExecuting, NULL)) != OMX_ErrorNone) {
746 | omx_die(r, "Failed to switch state of the camera component to executing");
747 | }
748 | block_until_state_changed(ctx.camera, OMX_StateExecuting);
749 | say("Switching state of the render component to executing...");
750 | if((r = OMX_SendCommand(ctx.render, OMX_CommandStateSet, OMX_StateExecuting, NULL)) != OMX_ErrorNone) {
751 | omx_die(r, "Failed to switch state of the render component to executing");
752 | }
753 | block_until_state_changed(ctx.render, OMX_StateExecuting);
754 | say("Switching state of the null sink component to executing...");
755 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateExecuting, NULL)) != OMX_ErrorNone) {
756 | omx_die(r, "Failed to switch state of the null sink component to executing");
757 | }
758 | block_until_state_changed(ctx.null_sink, OMX_StateExecuting);
759 |
760 | // Start capturing video with the camera
761 | say("Switching on capture on camera video output port 71...");
762 | OMX_CONFIG_PORTBOOLEANTYPE capture;
763 | OMX_INIT_STRUCTURE(capture);
764 | capture.nPortIndex = 71;
765 | capture.bEnabled = OMX_TRUE;
766 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexConfigPortCapturing, &capture)) != OMX_ErrorNone) {
767 | omx_die(r, "Failed to switch on capture on camera video output port 71");
768 | }
769 |
770 | say("Configured port definition for camera input port 73");
771 | dump_port(ctx.camera, 73, OMX_FALSE);
772 | say("Configured port definition for camera preview output port 70");
773 | dump_port(ctx.camera, 70, OMX_FALSE);
774 | say("Configured port definition for camera video output port 71");
775 | dump_port(ctx.camera, 71, OMX_FALSE);
776 | say("Configured port definition for render input port 90");
777 | dump_port(ctx.render, 90, OMX_FALSE);
778 | say("Configured port definition for null sink input port 240");
779 | dump_port(ctx.null_sink, 240, OMX_FALSE);
780 |
781 | say("Enter capture and playback loop, press Ctrl-C to quit...");
782 |
783 | signal(SIGINT, signal_handler);
784 | signal(SIGTERM, signal_handler);
785 | signal(SIGQUIT, signal_handler);
786 |
787 | while(!want_quit) {
788 | // Would be better to use signaling here but hey this works too
789 | usleep(1000);
790 | }
791 | say("Cleaning up...");
792 |
793 | // Restore signal handlers
794 | signal(SIGINT, SIG_DFL);
795 | signal(SIGTERM, SIG_DFL);
796 | signal(SIGQUIT, SIG_DFL);
797 |
798 | // Stop capturing video with the camera
799 | OMX_INIT_STRUCTURE(capture);
800 | capture.nPortIndex = 71;
801 | capture.bEnabled = OMX_FALSE;
802 | if((r = OMX_SetParameter(ctx.camera, OMX_IndexConfigPortCapturing, &capture)) != OMX_ErrorNone) {
803 | omx_die(r, "Failed to switch off capture on camera video output port 71");
804 | }
805 |
806 | // Flush the buffers on each component
807 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandFlush, 73, NULL)) != OMX_ErrorNone) {
808 | omx_die(r, "Failed to flush buffers of camera input port 73");
809 | }
810 | block_until_flushed(&ctx);
811 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandFlush, 70, NULL)) != OMX_ErrorNone) {
812 | omx_die(r, "Failed to flush buffers of camera preview output port 70");
813 | }
814 | block_until_flushed(&ctx);
815 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandFlush, 71, NULL)) != OMX_ErrorNone) {
816 | omx_die(r, "Failed to flush buffers of camera video output port 71");
817 | }
818 | block_until_flushed(&ctx);
819 | if((r = OMX_SendCommand(ctx.render, OMX_CommandFlush, 90, NULL)) != OMX_ErrorNone) {
820 | omx_die(r, "Failed to flush buffers of render input port 90");
821 | }
822 | block_until_flushed(&ctx);
823 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandFlush, 240, NULL)) != OMX_ErrorNone) {
824 | omx_die(r, "Failed to flush buffers of null sink input port 240");
825 | }
826 | block_until_flushed(&ctx);
827 |
828 | // Disable all the ports
829 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortDisable, 73, NULL)) != OMX_ErrorNone) {
830 | omx_die(r, "Failed to disable camera input port 73");
831 | }
832 | block_until_port_changed(ctx.camera, 73, OMX_FALSE);
833 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortDisable, 70, NULL)) != OMX_ErrorNone) {
834 | omx_die(r, "Failed to disable camera preview output port 70");
835 | }
836 | block_until_port_changed(ctx.camera, 70, OMX_FALSE);
837 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandPortDisable, 71, NULL)) != OMX_ErrorNone) {
838 | omx_die(r, "Failed to disable camera video output port 71");
839 | }
840 | block_until_port_changed(ctx.camera, 71, OMX_FALSE);
841 | if((r = OMX_SendCommand(ctx.render, OMX_CommandPortDisable, 90, NULL)) != OMX_ErrorNone) {
842 | omx_die(r, "Failed to disable render input port 90");
843 | }
844 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandPortDisable, 240, NULL)) != OMX_ErrorNone) {
845 | omx_die(r, "Failed to disable null sink input port 240");
846 | }
847 | block_until_port_changed(ctx.null_sink, 240, OMX_FALSE);
848 |
849 | // Free all the buffers
850 | if((r = OMX_FreeBuffer(ctx.camera, 73, ctx.camera_ppBuffer_in)) != OMX_ErrorNone) {
851 | omx_die(r, "Failed to free buffer for camera input port 73");
852 | }
853 |
854 | // Transition all the components to idle and then to loaded states
855 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
856 | omx_die(r, "Failed to switch state of the camera component to idle");
857 | }
858 | block_until_state_changed(ctx.camera, OMX_StateIdle);
859 | if((r = OMX_SendCommand(ctx.render, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
860 | omx_die(r, "Failed to switch state of the render component to idle");
861 | }
862 | block_until_state_changed(ctx.render, OMX_StateIdle);
863 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
864 | omx_die(r, "Failed to switch state of the null sink component to idle");
865 | }
866 | block_until_state_changed(ctx.null_sink, OMX_StateIdle);
867 | if((r = OMX_SendCommand(ctx.camera, OMX_CommandStateSet, OMX_StateLoaded, NULL)) != OMX_ErrorNone) {
868 | omx_die(r, "Failed to switch state of the camera component to loaded");
869 | }
870 | block_until_state_changed(ctx.camera, OMX_StateLoaded);
871 | if((r = OMX_SendCommand(ctx.render, OMX_CommandStateSet, OMX_StateLoaded, NULL)) != OMX_ErrorNone) {
872 | omx_die(r, "Failed to switch state of the render component to loaded");
873 | }
874 | block_until_state_changed(ctx.render, OMX_StateLoaded);
875 | if((r = OMX_SendCommand(ctx.null_sink, OMX_CommandStateSet, OMX_StateLoaded, NULL)) != OMX_ErrorNone) {
876 | omx_die(r, "Failed to switch state of the null sink component to loaded");
877 | }
878 | block_until_state_changed(ctx.null_sink, OMX_StateLoaded);
879 |
880 | // Free the component handles
881 | if((r = OMX_FreeHandle(ctx.camera)) != OMX_ErrorNone) {
882 | omx_die(r, "Failed to free camera component handle");
883 | }
884 | if((r = OMX_FreeHandle(ctx.render)) != OMX_ErrorNone) {
885 | omx_die(r, "Failed to free render component handle");
886 | }
887 | if((r = OMX_FreeHandle(ctx.null_sink)) != OMX_ErrorNone) {
888 | omx_die(r, "Failed to free null sink component handle");
889 | }
890 |
891 | // Exit
892 | vcos_semaphore_delete(&ctx.handler_lock);
893 | if((r = OMX_Deinit()) != OMX_ErrorNone) {
894 | omx_die(r, "OMX de-initalization failed");
895 | }
896 |
897 | say("Exit!");
898 |
899 | return 0;
900 | }
901 |
--------------------------------------------------------------------------------
/rpi-encode-yuv.c:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright © 2013 Tuomas Jormola
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | *
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | *
16 | * Short intro about this program:
17 | *
18 | * `rpi-encode-yuv` reads raw YUV frame data from `stdin`, encodes the stream
19 | * using the VideoCore hardware encoder using H.264 codec and emits the H.264
20 | * stream to `stdout`.
21 | *
22 | * $ ./rpi-encode-yuv test.h264
23 | *
24 | * `rpi-encode-yuv` uses the `video_encode` component. Uncompressed raw YUV frame
25 | * data is read from `stdin` and passed to the buffer of input port of
26 | * `video_encode`. H.264 encoded video is read from the buffer of `video_encode`
27 | * output port and dumped to `stdout`.
28 | *
29 | * Please see README.mdwn for more detailed description of this
30 | * OpenMAX IL demos for Raspberry Pi bundle.
31 | *
32 | */
33 |
34 | #include
35 | #include
36 | #include
37 | #include
38 | #include
39 | #include
40 |
41 | #include
42 |
43 | #include
44 | #include
45 |
46 | #include
47 | #include
48 | #include
49 | #include
50 |
51 | // Hard coded parameters
52 | #define VIDEO_WIDTH 1920 / 4
53 | #define VIDEO_HEIGHT 1080 / 4
54 | #define VIDEO_FRAMERATE 25
55 | #define VIDEO_BITRATE 10000000
56 |
57 | // Dunno where this is originally stolen from...
58 | #define OMX_INIT_STRUCTURE(a) \
59 | memset(&(a), 0, sizeof(a)); \
60 | (a).nSize = sizeof(a); \
61 | (a).nVersion.nVersion = OMX_VERSION; \
62 | (a).nVersion.s.nVersionMajor = OMX_VERSION_MAJOR; \
63 | (a).nVersion.s.nVersionMinor = OMX_VERSION_MINOR; \
64 | (a).nVersion.s.nRevision = OMX_VERSION_REVISION; \
65 | (a).nVersion.s.nStep = OMX_VERSION_STEP
66 |
67 | // Global variable used by the signal handler and encoding loop
68 | static int want_quit = 0;
69 |
70 | // Our application context passed around
71 | // the main routine and callback handlers
72 | typedef struct {
73 | OMX_HANDLETYPE encoder;
74 | OMX_BUFFERHEADERTYPE *encoder_ppBuffer_in;
75 | OMX_BUFFERHEADERTYPE *encoder_ppBuffer_out;
76 | int encoder_input_buffer_needed;
77 | int encoder_output_buffer_available;
78 | int flushed;
79 | FILE *fd_in;
80 | FILE *fd_out;
81 | VCOS_SEMAPHORE_T handler_lock;
82 | } appctx;
83 |
84 | // I420 frame stuff
85 | typedef struct {
86 | int width;
87 | int height;
88 | size_t size;
89 | int buf_stride;
90 | int buf_slice_height;
91 | int buf_extra_padding;
92 | int p_offset[3];
93 | int p_stride[3];
94 | } i420_frame_info;
95 |
96 | // Stolen from video-info.c of gstreamer-plugins-base
97 | #define ROUND_UP_2(num) (((num)+1)&~1)
98 | #define ROUND_UP_4(num) (((num)+3)&~3)
99 | static void get_i420_frame_info(int width, int height, int buf_stride, int buf_slice_height, i420_frame_info *info) {
100 | info->p_stride[0] = ROUND_UP_4(width);
101 | info->p_stride[1] = ROUND_UP_4(ROUND_UP_2(width) / 2);
102 | info->p_stride[2] = info->p_stride[1];
103 | info->p_offset[0] = 0;
104 | info->p_offset[1] = info->p_stride[0] * ROUND_UP_2(height);
105 | info->p_offset[2] = info->p_offset[1] + info->p_stride[1] * (ROUND_UP_2(height) / 2);
106 | info->size = info->p_offset[2] + info->p_stride[2] * (ROUND_UP_2(height) / 2);
107 | info->width = width;
108 | info->height = height;
109 | info->buf_stride = buf_stride;
110 | info->buf_slice_height = buf_slice_height;
111 | info->buf_extra_padding =
112 | buf_slice_height >= 0
113 | ? ((buf_slice_height && (height % buf_slice_height))
114 | ? (buf_slice_height - (height % buf_slice_height))
115 | : 0)
116 | : -1;
117 | }
118 |
119 | // Ugly, stupid utility functions
120 | static void say(const char* message, ...) {
121 | va_list args;
122 | char str[1024];
123 | memset(str, 0, sizeof(str));
124 | va_start(args, message);
125 | vsnprintf(str, sizeof(str) - 1, message, args);
126 | va_end(args);
127 | size_t str_len = strnlen(str, sizeof(str));
128 | if(str[str_len - 1] != '\n') {
129 | str[str_len] = '\n';
130 | }
131 | fprintf(stderr, str);
132 | }
133 |
134 | static void die(const char* message, ...) {
135 | va_list args;
136 | char str[1024];
137 | memset(str, 0, sizeof(str));
138 | va_start(args, message);
139 | vsnprintf(str, sizeof(str), message, args);
140 | va_end(args);
141 | say(str);
142 | exit(1);
143 | }
144 |
145 | static void omx_die(OMX_ERRORTYPE error, const char* message, ...) {
146 | va_list args;
147 | char str[1024];
148 | char *e;
149 | memset(str, 0, sizeof(str));
150 | va_start(args, message);
151 | vsnprintf(str, sizeof(str), message, args);
152 | va_end(args);
153 | switch(error) {
154 | case OMX_ErrorNone: e = "no error"; break;
155 | case OMX_ErrorBadParameter: e = "bad parameter"; break;
156 | case OMX_ErrorIncorrectStateOperation: e = "invalid state while trying to perform command"; break;
157 | case OMX_ErrorIncorrectStateTransition: e = "unallowed state transition"; break;
158 | case OMX_ErrorInsufficientResources: e = "insufficient resource"; break;
159 | case OMX_ErrorBadPortIndex: e = "bad port index, i.e. incorrect port"; break;
160 | case OMX_ErrorHardware: e = "hardware error"; break;
161 | /* That's all I've encountered during hacking so let's not bother with the rest... */
162 | default: e = "(no description)";
163 | }
164 | die("OMX error: %s: 0x%08x %s", str, error, e);
165 | }
166 |
167 | static void dump_frame_info(const char *message, const i420_frame_info *info) {
168 | say("%s frame info:\n"
169 | "\tWidth:\t\t\t%d\n"
170 | "\tHeight:\t\t\t%d\n"
171 | "\tSize:\t\t\t%d\n"
172 | "\tBuffer stride:\t\t%d\n"
173 | "\tBuffer slice height:\t%d\n"
174 | "\tBuffer extra padding:\t%d\n"
175 | "\tPlane strides:\t\tY:%d U:%d V:%d\n"
176 | "\tPlane offsets:\t\tY:%d U:%d V:%d\n",
177 | message,
178 | info->width, info->height, info->size, info->buf_stride, info->buf_slice_height, info->buf_extra_padding,
179 | info->p_stride[0], info->p_stride[1], info->p_stride[2],
180 | info->p_offset[0], info->p_offset[1], info->p_offset[2]);
181 | }
182 |
183 | static void dump_event(OMX_HANDLETYPE hComponent, OMX_EVENTTYPE eEvent, OMX_U32 nData1, OMX_U32 nData2) {
184 | char *e;
185 | switch(eEvent) {
186 | case OMX_EventCmdComplete: e = "command complete"; break;
187 | case OMX_EventError: e = "error"; break;
188 | case OMX_EventParamOrConfigChanged: e = "parameter or configuration changed"; break;
189 | case OMX_EventPortSettingsChanged: e = "port settings changed"; break;
190 | /* That's all I've encountered during hacking so let's not bother with the rest... */
191 | default:
192 | e = "(no description)";
193 | }
194 | say("Received event 0x%08x %s, hComponent:0x%08x, nData1:0x%08x, nData2:0x%08x",
195 | eEvent, e, hComponent, nData1, nData2);
196 | }
197 |
198 | static const char* dump_compression_format(OMX_VIDEO_CODINGTYPE c) {
199 | char *f;
200 | switch(c) {
201 | case OMX_VIDEO_CodingUnused: return "not used";
202 | case OMX_VIDEO_CodingAutoDetect: return "autodetect";
203 | case OMX_VIDEO_CodingMPEG2: return "MPEG2";
204 | case OMX_VIDEO_CodingH263: return "H.263";
205 | case OMX_VIDEO_CodingMPEG4: return "MPEG4";
206 | case OMX_VIDEO_CodingWMV: return "Windows Media Video";
207 | case OMX_VIDEO_CodingRV: return "RealVideo";
208 | case OMX_VIDEO_CodingAVC: return "H.264/AVC";
209 | case OMX_VIDEO_CodingMJPEG: return "Motion JPEG";
210 | case OMX_VIDEO_CodingVP6: return "VP6";
211 | case OMX_VIDEO_CodingVP7: return "VP7";
212 | case OMX_VIDEO_CodingVP8: return "VP8";
213 | case OMX_VIDEO_CodingYUV: return "Raw YUV video";
214 | case OMX_VIDEO_CodingSorenson: return "Sorenson";
215 | case OMX_VIDEO_CodingTheora: return "OGG Theora";
216 | case OMX_VIDEO_CodingMVC: return "H.264/MVC";
217 |
218 | default:
219 | f = calloc(23, sizeof(char));
220 | if(f == NULL) {
221 | die("Failed to allocate memory");
222 | }
223 | snprintf(f, 23 * sizeof(char) - 1, "format type 0x%08x", c);
224 | return f;
225 | }
226 | }
227 | static const char* dump_color_format(OMX_COLOR_FORMATTYPE c) {
228 | char *f;
229 | switch(c) {
230 | case OMX_COLOR_FormatUnused: return "OMX_COLOR_FormatUnused: not used";
231 | case OMX_COLOR_FormatMonochrome: return "OMX_COLOR_FormatMonochrome";
232 | case OMX_COLOR_Format8bitRGB332: return "OMX_COLOR_Format8bitRGB332";
233 | case OMX_COLOR_Format12bitRGB444: return "OMX_COLOR_Format12bitRGB444";
234 | case OMX_COLOR_Format16bitARGB4444: return "OMX_COLOR_Format16bitARGB4444";
235 | case OMX_COLOR_Format16bitARGB1555: return "OMX_COLOR_Format16bitARGB1555";
236 | case OMX_COLOR_Format16bitRGB565: return "OMX_COLOR_Format16bitRGB565";
237 | case OMX_COLOR_Format16bitBGR565: return "OMX_COLOR_Format16bitBGR565";
238 | case OMX_COLOR_Format18bitRGB666: return "OMX_COLOR_Format18bitRGB666";
239 | case OMX_COLOR_Format18bitARGB1665: return "OMX_COLOR_Format18bitARGB1665";
240 | case OMX_COLOR_Format19bitARGB1666: return "OMX_COLOR_Format19bitARGB1666";
241 | case OMX_COLOR_Format24bitRGB888: return "OMX_COLOR_Format24bitRGB888";
242 | case OMX_COLOR_Format24bitBGR888: return "OMX_COLOR_Format24bitBGR888";
243 | case OMX_COLOR_Format24bitARGB1887: return "OMX_COLOR_Format24bitARGB1887";
244 | case OMX_COLOR_Format25bitARGB1888: return "OMX_COLOR_Format25bitARGB1888";
245 | case OMX_COLOR_Format32bitBGRA8888: return "OMX_COLOR_Format32bitBGRA8888";
246 | case OMX_COLOR_Format32bitARGB8888: return "OMX_COLOR_Format32bitARGB8888";
247 | case OMX_COLOR_FormatYUV411Planar: return "OMX_COLOR_FormatYUV411Planar";
248 | case OMX_COLOR_FormatYUV411PackedPlanar: return "OMX_COLOR_FormatYUV411PackedPlanar: Planes fragmented when a frame is split in multiple buffers";
249 | case OMX_COLOR_FormatYUV420Planar: return "OMX_COLOR_FormatYUV420Planar: Planar YUV, 4:2:0 (I420)";
250 | case OMX_COLOR_FormatYUV420PackedPlanar: return "OMX_COLOR_FormatYUV420PackedPlanar: Planar YUV, 4:2:0 (I420), planes fragmented when a frame is split in multiple buffers";
251 | case OMX_COLOR_FormatYUV420SemiPlanar: return "OMX_COLOR_FormatYUV420SemiPlanar, Planar YUV, 4:2:0 (NV12), U and V planes interleaved with first U value";
252 | case OMX_COLOR_FormatYUV422Planar: return "OMX_COLOR_FormatYUV422Planar";
253 | case OMX_COLOR_FormatYUV422PackedPlanar: return "OMX_COLOR_FormatYUV422PackedPlanar: Planes fragmented when a frame is split in multiple buffers";
254 | case OMX_COLOR_FormatYUV422SemiPlanar: return "OMX_COLOR_FormatYUV422SemiPlanar";
255 | case OMX_COLOR_FormatYCbYCr: return "OMX_COLOR_FormatYCbYCr";
256 | case OMX_COLOR_FormatYCrYCb: return "OMX_COLOR_FormatYCrYCb";
257 | case OMX_COLOR_FormatCbYCrY: return "OMX_COLOR_FormatCbYCrY";
258 | case OMX_COLOR_FormatCrYCbY: return "OMX_COLOR_FormatCrYCbY";
259 | case OMX_COLOR_FormatYUV444Interleaved: return "OMX_COLOR_FormatYUV444Interleaved";
260 | case OMX_COLOR_FormatRawBayer8bit: return "OMX_COLOR_FormatRawBayer8bit";
261 | case OMX_COLOR_FormatRawBayer10bit: return "OMX_COLOR_FormatRawBayer10bit";
262 | case OMX_COLOR_FormatRawBayer8bitcompressed: return "OMX_COLOR_FormatRawBayer8bitcompressed";
263 | case OMX_COLOR_FormatL2: return "OMX_COLOR_FormatL2";
264 | case OMX_COLOR_FormatL4: return "OMX_COLOR_FormatL4";
265 | case OMX_COLOR_FormatL8: return "OMX_COLOR_FormatL8";
266 | case OMX_COLOR_FormatL16: return "OMX_COLOR_FormatL16";
267 | case OMX_COLOR_FormatL24: return "OMX_COLOR_FormatL24";
268 | case OMX_COLOR_FormatL32: return "OMX_COLOR_FormatL32";
269 | case OMX_COLOR_FormatYUV420PackedSemiPlanar: return "OMX_COLOR_FormatYUV420PackedSemiPlanar: Planar YUV, 4:2:0 (NV12), planes fragmented when a frame is split in multiple buffers, U and V planes interleaved with first U value";
270 | case OMX_COLOR_FormatYUV422PackedSemiPlanar: return "OMX_COLOR_FormatYUV422PackedSemiPlanar: Planes fragmented when a frame is split in multiple buffers";
271 | case OMX_COLOR_Format18BitBGR666: return "OMX_COLOR_Format18BitBGR666";
272 | case OMX_COLOR_Format24BitARGB6666: return "OMX_COLOR_Format24BitARGB6666";
273 | case OMX_COLOR_Format24BitABGR6666: return "OMX_COLOR_Format24BitABGR6666";
274 | case OMX_COLOR_Format32bitABGR8888: return "OMX_COLOR_Format32bitABGR8888";
275 | case OMX_COLOR_Format8bitPalette: return "OMX_COLOR_Format8bitPalette";
276 | case OMX_COLOR_FormatYUVUV128: return "OMX_COLOR_FormatYUVUV128";
277 | case OMX_COLOR_FormatRawBayer12bit: return "OMX_COLOR_FormatRawBayer12bit";
278 | case OMX_COLOR_FormatBRCMEGL: return "OMX_COLOR_FormatBRCMEGL";
279 | case OMX_COLOR_FormatBRCMOpaque: return "OMX_COLOR_FormatBRCMOpaque";
280 | case OMX_COLOR_FormatYVU420PackedPlanar: return "OMX_COLOR_FormatYVU420PackedPlanar";
281 | case OMX_COLOR_FormatYVU420PackedSemiPlanar: return "OMX_COLOR_FormatYVU420PackedSemiPlanar";
282 | default:
283 | f = calloc(23, sizeof(char));
284 | if(f == NULL) {
285 | die("Failed to allocate memory");
286 | }
287 | snprintf(f, 23 * sizeof(char) - 1, "format type 0x%08x", c);
288 | return f;
289 | }
290 | }
291 |
292 | static void dump_portdef(OMX_PARAM_PORTDEFINITIONTYPE* portdef) {
293 | say("Port %d is %s, %s, buffers wants:%d needs:%d, size:%d, pop:%d, aligned:%d",
294 | portdef->nPortIndex,
295 | (portdef->eDir == OMX_DirInput ? "input" : "output"),
296 | (portdef->bEnabled == OMX_TRUE ? "enabled" : "disabled"),
297 | portdef->nBufferCountActual,
298 | portdef->nBufferCountMin,
299 | portdef->nBufferSize,
300 | portdef->bPopulated,
301 | portdef->nBufferAlignment);
302 |
303 | OMX_VIDEO_PORTDEFINITIONTYPE *viddef = &portdef->format.video;
304 | OMX_IMAGE_PORTDEFINITIONTYPE *imgdef = &portdef->format.image;
305 | switch(portdef->eDomain) {
306 | case OMX_PortDomainVideo:
307 | say("Video type:\n"
308 | "\tWidth:\t\t%d\n"
309 | "\tHeight:\t\t%d\n"
310 | "\tStride:\t\t%d\n"
311 | "\tSliceHeight:\t%d\n"
312 | "\tBitrate:\t%d\n"
313 | "\tFramerate:\t%.02f\n"
314 | "\tError hiding:\t%s\n"
315 | "\tCodec:\t\t%s\n"
316 | "\tColor:\t\t%s\n",
317 | viddef->nFrameWidth,
318 | viddef->nFrameHeight,
319 | viddef->nStride,
320 | viddef->nSliceHeight,
321 | viddef->nBitrate,
322 | ((float)viddef->xFramerate / (float)65536),
323 | (viddef->bFlagErrorConcealment == OMX_TRUE ? "yes" : "no"),
324 | dump_compression_format(viddef->eCompressionFormat),
325 | dump_color_format(viddef->eColorFormat));
326 | break;
327 | case OMX_PortDomainImage:
328 | say("Image type:\n"
329 | "\tWidth:\t\t%d\n"
330 | "\tHeight:\t\t%d\n"
331 | "\tStride:\t\t%d\n"
332 | "\tSliceHeight:\t%d\n"
333 | "\tError hiding:\t%s\n"
334 | "\tCodec:\t\t%s\n"
335 | "\tColor:\t\t%s\n",
336 | imgdef->nFrameWidth,
337 | imgdef->nFrameHeight,
338 | imgdef->nStride,
339 | imgdef->nSliceHeight,
340 | (imgdef->bFlagErrorConcealment == OMX_TRUE ? "yes" : "no"),
341 | dump_compression_format(imgdef->eCompressionFormat),
342 | dump_color_format(imgdef->eColorFormat));
343 | break;
344 | default:
345 | break;
346 | }
347 | }
348 |
349 | static void dump_port(OMX_HANDLETYPE hComponent, OMX_U32 nPortIndex, OMX_BOOL dumpformats) {
350 | OMX_ERRORTYPE r;
351 | OMX_PARAM_PORTDEFINITIONTYPE portdef;
352 | OMX_INIT_STRUCTURE(portdef);
353 | portdef.nPortIndex = nPortIndex;
354 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamPortDefinition, &portdef)) != OMX_ErrorNone) {
355 | omx_die(r, "Failed to get port definition for port %d", nPortIndex);
356 | }
357 | dump_portdef(&portdef);
358 | if(dumpformats) {
359 | OMX_VIDEO_PARAM_PORTFORMATTYPE portformat;
360 | OMX_INIT_STRUCTURE(portformat);
361 | portformat.nPortIndex = nPortIndex;
362 | portformat.nIndex = 0;
363 | r = OMX_ErrorNone;
364 | say("Port %d supports these video formats:", nPortIndex);
365 | while(r == OMX_ErrorNone) {
366 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamVideoPortFormat, &portformat)) == OMX_ErrorNone) {
367 | say("\t%s, compression: %s", dump_color_format(portformat.eColorFormat), dump_compression_format(portformat.eCompressionFormat));
368 | portformat.nIndex++;
369 | }
370 | }
371 | }
372 | }
373 |
374 | // Some busy loops to verify we're running in order
375 | static void block_until_state_changed(OMX_HANDLETYPE hComponent, OMX_STATETYPE wanted_eState) {
376 | OMX_STATETYPE eState;
377 | int i = 0;
378 | while(i++ == 0 || eState != wanted_eState) {
379 | OMX_GetState(hComponent, &eState);
380 | if(eState != wanted_eState) {
381 | usleep(10000);
382 | }
383 | }
384 | }
385 |
386 | static void block_until_port_changed(OMX_HANDLETYPE hComponent, OMX_U32 nPortIndex, OMX_BOOL bEnabled) {
387 | OMX_ERRORTYPE r;
388 | OMX_PARAM_PORTDEFINITIONTYPE portdef;
389 | OMX_INIT_STRUCTURE(portdef);
390 | portdef.nPortIndex = nPortIndex;
391 | OMX_U32 i = 0;
392 | while(i++ == 0 || portdef.bEnabled != bEnabled) {
393 | if((r = OMX_GetParameter(hComponent, OMX_IndexParamPortDefinition, &portdef)) != OMX_ErrorNone) {
394 | omx_die(r, "Failed to get port definition");
395 | }
396 | if(portdef.bEnabled != bEnabled) {
397 | usleep(10000);
398 | }
399 | }
400 | }
401 |
402 | static void block_until_flushed(appctx *ctx) {
403 | int quit;
404 | while(!quit) {
405 | vcos_semaphore_wait(&ctx->handler_lock);
406 | if(ctx->flushed) {
407 | ctx->flushed = 0;
408 | quit = 1;
409 | }
410 | vcos_semaphore_post(&ctx->handler_lock);
411 | if(!quit) {
412 | usleep(10000);
413 | }
414 | }
415 | }
416 |
417 | static void init_component_handle(
418 | const char *name,
419 | OMX_HANDLETYPE* hComponent,
420 | OMX_PTR pAppData,
421 | OMX_CALLBACKTYPE* callbacks) {
422 | OMX_ERRORTYPE r;
423 | char fullname[32];
424 |
425 | // Get handle
426 | memset(fullname, 0, sizeof(fullname));
427 | strcat(fullname, "OMX.broadcom.");
428 | strncat(fullname, name, strlen(fullname) - 1);
429 | say("Initializing component %s", fullname);
430 | if((r = OMX_GetHandle(hComponent, fullname, pAppData, callbacks)) != OMX_ErrorNone) {
431 | omx_die(r, "Failed to get handle for component %s", fullname);
432 | }
433 |
434 | // Disable ports
435 | OMX_INDEXTYPE types[] = {
436 | OMX_IndexParamAudioInit,
437 | OMX_IndexParamVideoInit,
438 | OMX_IndexParamImageInit,
439 | OMX_IndexParamOtherInit
440 | };
441 | OMX_PORT_PARAM_TYPE ports;
442 | OMX_INIT_STRUCTURE(ports);
443 | OMX_GetParameter(*hComponent, OMX_IndexParamVideoInit, &ports);
444 |
445 | int i;
446 | for(i = 0; i < 4; i++) {
447 | if(OMX_GetParameter(*hComponent, types[i], &ports) == OMX_ErrorNone) {
448 | OMX_U32 nPortIndex;
449 | for(nPortIndex = ports.nStartPortNumber; nPortIndex < ports.nStartPortNumber + ports.nPorts; nPortIndex++) {
450 | say("Disabling port %d of component %s", nPortIndex, fullname);
451 | if((r = OMX_SendCommand(*hComponent, OMX_CommandPortDisable, nPortIndex, NULL)) != OMX_ErrorNone) {
452 | omx_die(r, "Failed to disable port %d of component %s", nPortIndex, fullname);
453 | }
454 | block_until_port_changed(*hComponent, nPortIndex, OMX_FALSE);
455 | }
456 | }
457 | }
458 | }
459 |
460 | // Global signal handler for trapping SIGINT, SIGTERM, and SIGQUIT
461 | static void signal_handler(int signal) {
462 | want_quit = 1;
463 | }
464 |
465 | // OMX calls this handler for all the events it emits
466 | static OMX_ERRORTYPE event_handler(
467 | OMX_HANDLETYPE hComponent,
468 | OMX_PTR pAppData,
469 | OMX_EVENTTYPE eEvent,
470 | OMX_U32 nData1,
471 | OMX_U32 nData2,
472 | OMX_PTR pEventData) {
473 |
474 | dump_event(hComponent, eEvent, nData1, nData2);
475 |
476 | appctx *ctx = (appctx *)pAppData;
477 |
478 | switch(eEvent) {
479 | case OMX_EventCmdComplete:
480 | vcos_semaphore_wait(&ctx->handler_lock);
481 | if(nData1 == OMX_CommandFlush) {
482 | ctx->flushed = 1;
483 | }
484 | vcos_semaphore_post(&ctx->handler_lock);
485 | break;
486 | case OMX_EventError:
487 | omx_die(nData1, "error event received");
488 | break;
489 | default:
490 | break;
491 | }
492 |
493 | return OMX_ErrorNone;
494 | }
495 |
496 | // Called by OMX when the encoder component requires
497 | // the input buffer to be filled with YUV video data
498 | static OMX_ERRORTYPE empty_input_buffer_done_handler(
499 | OMX_HANDLETYPE hComponent,
500 | OMX_PTR pAppData,
501 | OMX_BUFFERHEADERTYPE* pBuffer) {
502 | appctx *ctx = ((appctx*)pAppData);
503 | vcos_semaphore_wait(&ctx->handler_lock);
504 | // The main loop can now fill the buffer from input file
505 | ctx->encoder_input_buffer_needed = 1;
506 | vcos_semaphore_post(&ctx->handler_lock);
507 | return OMX_ErrorNone;
508 | }
509 |
510 | // Called by OMX when the encoder component has filled
511 | // the output buffer with H.264 encoded video data
512 | static OMX_ERRORTYPE fill_output_buffer_done_handler(
513 | OMX_HANDLETYPE hComponent,
514 | OMX_PTR pAppData,
515 | OMX_BUFFERHEADERTYPE* pBuffer) {
516 | appctx *ctx = ((appctx*)pAppData);
517 | vcos_semaphore_wait(&ctx->handler_lock);
518 | // The main loop can now flush the buffer to output file
519 | ctx->encoder_output_buffer_available = 1;
520 | vcos_semaphore_post(&ctx->handler_lock);
521 | return OMX_ErrorNone;
522 | }
523 |
524 | int main(int argc, char **argv) {
525 | bcm_host_init();
526 |
527 | OMX_ERRORTYPE r;
528 |
529 | if((r = OMX_Init()) != OMX_ErrorNone) {
530 | omx_die(r, "OMX initalization failed");
531 | }
532 |
533 | // Init context
534 | appctx ctx;
535 | memset(&ctx, 0, sizeof(ctx));
536 | if(vcos_semaphore_create(&ctx.handler_lock, "handler_lock", 1) != VCOS_SUCCESS) {
537 | die("Failed to create handler lock semaphore");
538 | }
539 |
540 | // Init component handles
541 | OMX_CALLBACKTYPE callbacks;
542 | memset(&ctx, 0, sizeof(callbacks));
543 | callbacks.EventHandler = event_handler;
544 | callbacks.EmptyBufferDone = empty_input_buffer_done_handler;
545 | callbacks.FillBufferDone = fill_output_buffer_done_handler;
546 |
547 | init_component_handle("video_encode", &ctx.encoder, &ctx, &callbacks);
548 |
549 | say("Configuring encoder...");
550 |
551 | say("Default port definition for encoder input port 200");
552 | dump_port(ctx.encoder, 200, OMX_TRUE);
553 | say("Default port definition for encoder output port 201");
554 | dump_port(ctx.encoder, 201, OMX_TRUE);
555 |
556 | OMX_PARAM_PORTDEFINITIONTYPE encoder_portdef;
557 | OMX_INIT_STRUCTURE(encoder_portdef);
558 | encoder_portdef.nPortIndex = 200;
559 | if((r = OMX_GetParameter(ctx.encoder, OMX_IndexParamPortDefinition, &encoder_portdef)) != OMX_ErrorNone) {
560 | omx_die(r, "Failed to get port definition for encoder input port 200");
561 | }
562 | encoder_portdef.format.video.nFrameWidth = VIDEO_WIDTH;
563 | encoder_portdef.format.video.nFrameHeight = VIDEO_HEIGHT;
564 | encoder_portdef.format.video.xFramerate = VIDEO_FRAMERATE << 16;
565 | // Stolen from gstomxvideodec.c of gst-omx
566 | encoder_portdef.format.video.nStride = (encoder_portdef.format.video.nFrameWidth + encoder_portdef.nBufferAlignment - 1) & (~(encoder_portdef.nBufferAlignment - 1));
567 | encoder_portdef.format.video.eColorFormat = OMX_COLOR_FormatYUV420PackedPlanar;
568 | if((r = OMX_SetParameter(ctx.encoder, OMX_IndexParamPortDefinition, &encoder_portdef)) != OMX_ErrorNone) {
569 | omx_die(r, "Failed to set port definition for encoder input port 200");
570 | }
571 |
572 | // Copy encoder input port definition as basis encoder output port definition
573 | OMX_INIT_STRUCTURE(encoder_portdef);
574 | encoder_portdef.nPortIndex = 200;
575 | if((r = OMX_GetParameter(ctx.encoder, OMX_IndexParamPortDefinition, &encoder_portdef)) != OMX_ErrorNone) {
576 | omx_die(r, "Failed to get port definition for encoder input port 200");
577 | }
578 | encoder_portdef.nPortIndex = 201;
579 | encoder_portdef.format.video.eColorFormat = OMX_COLOR_FormatUnused;
580 | encoder_portdef.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
581 | // Which one is effective, this or the configuration just below?
582 | encoder_portdef.format.video.nBitrate = VIDEO_BITRATE;
583 | if((r = OMX_SetParameter(ctx.encoder, OMX_IndexParamPortDefinition, &encoder_portdef)) != OMX_ErrorNone) {
584 | omx_die(r, "Failed to set port definition for encoder output port 201");
585 | }
586 | // Configure bitrate
587 | OMX_VIDEO_PARAM_BITRATETYPE bitrate;
588 | OMX_INIT_STRUCTURE(bitrate);
589 | bitrate.eControlRate = OMX_Video_ControlRateVariable;
590 | bitrate.nTargetBitrate = encoder_portdef.format.video.nBitrate;
591 | bitrate.nPortIndex = 201;
592 | if((r = OMX_SetParameter(ctx.encoder, OMX_IndexParamVideoBitrate, &bitrate)) != OMX_ErrorNone) {
593 | omx_die(r, "Failed to set bitrate for encoder output port 201");
594 | }
595 | // Configure format
596 | OMX_VIDEO_PARAM_PORTFORMATTYPE format;
597 | OMX_INIT_STRUCTURE(format);
598 | format.nPortIndex = 201;
599 | format.eCompressionFormat = OMX_VIDEO_CodingAVC;
600 | if((r = OMX_SetParameter(ctx.encoder, OMX_IndexParamVideoPortFormat, &format)) != OMX_ErrorNone) {
601 | omx_die(r, "Failed to set video format for encoder output port 201");
602 | }
603 |
604 | // Switch components to idle state
605 | say("Switching state of the encoder component to idle...");
606 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
607 | omx_die(r, "Failed to switch state of the encoder component to idle");
608 | }
609 | block_until_state_changed(ctx.encoder, OMX_StateIdle);
610 |
611 | // Enable ports
612 | say("Enabling ports...");
613 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandPortEnable, 200, NULL)) != OMX_ErrorNone) {
614 | omx_die(r, "Failed to enable encoder input port 200");
615 | }
616 | block_until_port_changed(ctx.encoder, 200, OMX_TRUE);
617 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandPortEnable, 201, NULL)) != OMX_ErrorNone) {
618 | omx_die(r, "Failed to enable encoder output port 201");
619 | }
620 | block_until_port_changed(ctx.encoder, 201, OMX_TRUE);
621 |
622 | // Allocate encoder input and output buffers
623 | say("Allocating buffers...");
624 | OMX_INIT_STRUCTURE(encoder_portdef);
625 | encoder_portdef.nPortIndex = 200;
626 | if((r = OMX_GetParameter(ctx.encoder, OMX_IndexParamPortDefinition, &encoder_portdef)) != OMX_ErrorNone) {
627 | omx_die(r, "Failed to get port definition for encoder input port 200");
628 | }
629 | if((r = OMX_AllocateBuffer(ctx.encoder, &ctx.encoder_ppBuffer_in, 200, NULL, encoder_portdef.nBufferSize)) != OMX_ErrorNone) {
630 | omx_die(r, "Failed to allocate buffer for encoder input port 200");
631 | }
632 | OMX_INIT_STRUCTURE(encoder_portdef);
633 | encoder_portdef.nPortIndex = 201;
634 | if((r = OMX_GetParameter(ctx.encoder, OMX_IndexParamPortDefinition, &encoder_portdef)) != OMX_ErrorNone) {
635 | omx_die(r, "Failed to get port definition for encoder output port 201");
636 | }
637 | if((r = OMX_AllocateBuffer(ctx.encoder, &ctx.encoder_ppBuffer_out, 201, NULL, encoder_portdef.nBufferSize)) != OMX_ErrorNone) {
638 | omx_die(r, "Failed to allocate buffer for encoder output port 201");
639 | }
640 |
641 | // Just use stdin for input and stdout for output
642 | say("Opening input and output files...");
643 | ctx.fd_in = stdin;
644 | ctx.fd_out = stdout;
645 |
646 | // Switch state of the components prior to starting
647 | // the video capture and encoding loop
648 | say("Switching state of the encoder component to executing...");
649 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandStateSet, OMX_StateExecuting, NULL)) != OMX_ErrorNone) {
650 | omx_die(r, "Failed to switch state of the encoder component to executing");
651 | }
652 | block_until_state_changed(ctx.encoder, OMX_StateExecuting);
653 |
654 | say("Configured port definition for encoder input port 200");
655 | dump_port(ctx.encoder, 200, OMX_FALSE);
656 | say("Configured port definition for encoder output port 201");
657 | dump_port(ctx.encoder, 201, OMX_FALSE);
658 |
659 | i420_frame_info frame_info, buf_info;
660 | get_i420_frame_info(encoder_portdef.format.image.nFrameWidth, encoder_portdef.format.image.nFrameHeight, encoder_portdef.format.image.nStride, encoder_portdef.format.video.nSliceHeight, &frame_info);
661 | get_i420_frame_info(frame_info.buf_stride, frame_info.buf_slice_height, -1, -1, &buf_info);
662 |
663 | dump_frame_info("Destination frame", &frame_info);
664 | dump_frame_info("Source buffer", &buf_info);
665 |
666 | if(ctx.encoder_ppBuffer_in->nAllocLen != buf_info.size) {
667 | die("Allocated encoder input port 200 buffer size %d doesn't equal to the expected buffer size %d", ctx.encoder_ppBuffer_in->nAllocLen, buf_info.size);
668 | }
669 |
670 | say("Enter encode loop, press Ctrl-C to quit...");
671 |
672 | int input_available = 1, frame_in = 0, frame_out = 0, i;
673 | size_t input_total_read, want_read, input_read, output_written;
674 | // I420 spec: U and V plane span size half of the size of the Y plane span size
675 | int plane_span_y = ROUND_UP_2(frame_info.height), plane_span_uv = plane_span_y / 2;
676 |
677 | ctx.encoder_input_buffer_needed = 1;
678 |
679 | signal(SIGINT, signal_handler);
680 | signal(SIGTERM, signal_handler);
681 | signal(SIGQUIT, signal_handler);
682 |
683 | while(1) {
684 | // empty_input_buffer_done_handler() has marked that there's
685 | // a need for a buffer to be filled by us
686 | if(ctx.encoder_input_buffer_needed && input_available) {
687 | input_total_read = 0;
688 | memset(ctx.encoder_ppBuffer_in->pBuffer, 0, ctx.encoder_ppBuffer_in->nAllocLen);
689 | // Pack Y, U, and V plane spans read from input file to the buffer
690 | for(i = 0; i < 3; i++) {
691 | want_read = frame_info.p_stride[i] * (i == 0 ? plane_span_y : plane_span_uv);
692 | input_read = fread(
693 | ctx.encoder_ppBuffer_in->pBuffer + buf_info.p_offset[i],
694 | 1, want_read, ctx.fd_in);
695 | input_total_read += input_read;
696 | if(input_read != want_read) {
697 | ctx.encoder_ppBuffer_in->nFlags = OMX_BUFFERFLAG_EOS;
698 | want_quit = 1;
699 | say("Input file EOF");
700 | break;
701 | }
702 | }
703 | ctx.encoder_ppBuffer_in->nOffset = 0;
704 | ctx.encoder_ppBuffer_in->nFilledLen = (buf_info.size - frame_info.size) + input_total_read;
705 | frame_in++;
706 | say("Read from input file and wrote to input buffer %d/%d, frame %d", ctx.encoder_ppBuffer_in->nFilledLen, ctx.encoder_ppBuffer_in->nAllocLen, frame_in);
707 | // Mark input unavailable also if the signal handler was triggered
708 | if(want_quit) {
709 | input_available = 0;
710 | }
711 | if(input_total_read > 0) {
712 | ctx.encoder_input_buffer_needed = 0;
713 | if((r = OMX_EmptyThisBuffer(ctx.encoder, ctx.encoder_ppBuffer_in)) != OMX_ErrorNone) {
714 | omx_die(r, "Failed to request emptying of the input buffer on encoder input port 200");
715 | }
716 | }
717 | }
718 | // fill_output_buffer_done_handler() has marked that there's
719 | // a buffer for us to flush
720 | if(ctx.encoder_output_buffer_available) {
721 | if(ctx.encoder_ppBuffer_out->nFlags & OMX_BUFFERFLAG_ENDOFFRAME) {
722 | frame_out++;
723 | }
724 | // Flush buffer to output file
725 | output_written = fwrite(ctx.encoder_ppBuffer_out->pBuffer + ctx.encoder_ppBuffer_out->nOffset, 1, ctx.encoder_ppBuffer_out->nFilledLen, ctx.fd_out);
726 | if(output_written != ctx.encoder_ppBuffer_out->nFilledLen) {
727 | die("Failed to write to output file: %s", strerror(errno));
728 | }
729 | say("Read from output buffer and wrote to output file %d/%d, frame %d", ctx.encoder_ppBuffer_out->nFilledLen, ctx.encoder_ppBuffer_out->nAllocLen, frame_out + 1);
730 | }
731 | if(ctx.encoder_output_buffer_available || !frame_out) {
732 | // Buffer flushed, request a new buffer to be filled by the encoder component
733 | ctx.encoder_output_buffer_available = 0;
734 | if((r = OMX_FillThisBuffer(ctx.encoder, ctx.encoder_ppBuffer_out)) != OMX_ErrorNone) {
735 | omx_die(r, "Failed to request filling of the output buffer on encoder output port 201");
736 | }
737 | }
738 | // Don't exit the loop until all the input frames have been encoded.
739 | // Out frame count is larger than in frame count because 2 header
740 | // frames are emitted in the beginning.
741 | if(want_quit && frame_out == frame_in) {
742 | break;
743 | }
744 | // Would be better to use signaling here but hey this works too
745 | usleep(10);
746 | }
747 | say("Cleaning up...");
748 |
749 | // Restore signal handlers
750 | signal(SIGINT, SIG_DFL);
751 | signal(SIGTERM, SIG_DFL);
752 | signal(SIGQUIT, SIG_DFL);
753 |
754 | // Flush the buffers on each component
755 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandFlush, 200, NULL)) != OMX_ErrorNone) {
756 | omx_die(r, "Failed to flush buffers of encoder input port 200");
757 | }
758 | block_until_flushed(&ctx);
759 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandFlush, 201, NULL)) != OMX_ErrorNone) {
760 | omx_die(r, "Failed to flush buffers of encoder output port 201");
761 | }
762 | block_until_flushed(&ctx);
763 |
764 | // Disable all the ports
765 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandPortDisable, 200, NULL)) != OMX_ErrorNone) {
766 | omx_die(r, "Failed to disable encoder input port 200");
767 | }
768 | block_until_port_changed(ctx.encoder, 200, OMX_FALSE);
769 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandPortDisable, 201, NULL)) != OMX_ErrorNone) {
770 | omx_die(r, "Failed to disable encoder output port 201");
771 | }
772 | block_until_port_changed(ctx.encoder, 201, OMX_FALSE);
773 |
774 | // Free all the buffers
775 | if((r = OMX_FreeBuffer(ctx.encoder, 200, ctx.encoder_ppBuffer_in)) != OMX_ErrorNone) {
776 | omx_die(r, "Failed to free buffer for encoder input port 200");
777 | }
778 | if((r = OMX_FreeBuffer(ctx.encoder, 201, ctx.encoder_ppBuffer_out)) != OMX_ErrorNone) {
779 | omx_die(r, "Failed to free buffer for encoder output port 201");
780 | }
781 |
782 | // Transition all the components to idle and then to loaded states
783 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandStateSet, OMX_StateIdle, NULL)) != OMX_ErrorNone) {
784 | omx_die(r, "Failed to switch state of the encoder component to idle");
785 | }
786 | block_until_state_changed(ctx.encoder, OMX_StateIdle);
787 | if((r = OMX_SendCommand(ctx.encoder, OMX_CommandStateSet, OMX_StateLoaded, NULL)) != OMX_ErrorNone) {
788 | omx_die(r, "Failed to switch state of the encoder component to loaded");
789 | }
790 | block_until_state_changed(ctx.encoder, OMX_StateLoaded);
791 |
792 | // Free the component handles
793 | if((r = OMX_FreeHandle(ctx.encoder)) != OMX_ErrorNone) {
794 | omx_die(r, "Failed to free encoder component handle");
795 | }
796 |
797 | // Exit
798 | fclose(ctx.fd_in);
799 | fclose(ctx.fd_out);
800 |
801 | vcos_semaphore_delete(&ctx.handler_lock);
802 | if((r = OMX_Deinit()) != OMX_ErrorNone) {
803 | omx_die(r, "OMX de-initalization failed");
804 | }
805 |
806 | say("Exit!");
807 |
808 | return 0;
809 | }
810 |
--------------------------------------------------------------------------------