├── .github
└── FUNDING.yml
├── .gitignore
├── LICENSE
├── README.md
├── app
├── .gitignore
├── build.gradle
├── proguard-rules.pro
└── src
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ └── com
│ │ └── alexvas
│ │ └── rtsp
│ │ └── demo
│ │ ├── MainActivity.kt
│ │ └── live
│ │ ├── LiveFragment.kt
│ │ ├── LiveViewModel.kt
│ │ └── RawFragment.kt
│ └── res
│ ├── drawable
│ ├── ic_camera_black_24dp.xml
│ ├── ic_cctv_black_24dp.xml
│ ├── ic_launcher_background.xml
│ ├── ic_launcher_foreground.xml
│ └── ic_text_subject_black_24dp.xml
│ ├── layout
│ ├── activity_main.xml
│ ├── fragment_live.xml
│ ├── fragment_logs.xml
│ ├── fragment_raw.xml
│ └── layout_rtsp_params.xml
│ ├── menu
│ └── bottom_nav_menu.xml
│ ├── mipmap-anydpi-v26
│ ├── ic_launcher.xml
│ └── ic_launcher_round.xml
│ ├── navigation
│ └── mobile_navigation.xml
│ └── values
│ ├── colors.xml
│ ├── dimens.xml
│ ├── strings.xml
│ └── styles.xml
├── build.gradle
├── docs
└── images
│ └── rtsp-demo-app.webp
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── jitpack.yml
├── library-client-rtsp
├── .gitignore
├── build.gradle
├── proguard-rules.txt
└── src
│ └── main
│ ├── AndroidManifest.xml
│ └── java
│ └── com
│ ├── alexvas
│ ├── rtsp
│ │ ├── RtspClient.java
│ │ ├── codec
│ │ │ ├── AudioDecodeThread.kt
│ │ │ ├── FrameQueue.kt
│ │ │ ├── VideoDecodeThread.kt
│ │ │ ├── VideoDecoderBitmapThread.kt
│ │ │ ├── VideoDecoderSurfaceThread.kt
│ │ │ └── color
│ │ │ │ ├── ColorConverter.kt
│ │ │ │ └── ColorConverterImage.kt
│ │ ├── parser
│ │ │ ├── AacParser.java
│ │ │ ├── RtpH264Parser.kt
│ │ │ ├── RtpH265Parser.kt
│ │ │ ├── RtpHeaderParser.java
│ │ │ └── RtpParser.kt
│ │ └── widget
│ │ │ ├── RtspImageView.kt
│ │ │ ├── RtspListeners.kt
│ │ │ ├── RtspProcessor.kt
│ │ │ └── RtspSurfaceView.kt
│ └── utils
│ │ ├── ByteUtils.java
│ │ ├── MediaCodecUtils.kt
│ │ ├── NetUtils.java
│ │ └── VideoCodecUtils.kt
│ └── limelight
│ └── binding
│ └── video
│ └── MediaCodecHelper.java
└── settings.gradle
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: alexeyvasilyev
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.iml
2 | .gradle
3 | /local.properties
4 | /.idea
5 | /build
6 | .DS_Store
7 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # rtsp-client-android
2 | Lightweight RTSP client library for Android with almost zero lag video decoding (achieved 20 msec video decoding latency on some RTSP streams). Designed for lag criticial applications (e.g. video surveillance from drones, car rear view cameras, etc.).
3 |
4 | Unlike [AndroidX Media ExoPlayer](https://github.com/androidx/media) which also supports RTSP, this library does not make any video buffering. Video frames are shown immidiately when they arrive.
5 |
6 | [](https://jitpack.io/#alexeyvasilyev/rtsp-client-android)
7 |
8 | 
9 |
10 | ## Features:
11 | - RTSP/RTSPS over TCP.
12 | - Supports majority of RTSP IP cameras.
13 | - Video H.264/H.265.
14 | - Audio AAC LC only.
15 | - Support for application specific data sent via RTP, e.g. GPS data (`m=application`, see [RFC 4566 sec.5.14](https://datatracker.ietf.org/doc/html/rfc4566#section-5.14))
16 | - Basic/Digest authentication.
17 | - Uses Android's [Low-Latency MediaCodec](https://source.android.com/docs/core/media/low-latency-media) by default if available.
18 | - Ability to select hardware or software video decoder.
19 | - Ability to [rewrite SPS frame](https://github.com/alexeyvasilyev/rtsp-client-android/blob/dbea741548307b1b0e1ead0ccc6294e811fbf6fd/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspProcessor.kt#L106C9-L106C55) with low-latency parameters (EXPERIMENTAL).
20 | - Video rotation (90, 180, 270 degrees).
21 | - Android min API 24.
22 |
23 | ## Upcoming features:
24 | - PCM and G.711 aLaw/uLaw audio streams.
25 | - 2-w talk.
26 |
27 | ## Permissions:
28 |
29 | ```xml
30 |
31 | ```
32 |
33 | ## Compile
34 |
35 | To use this library in your project add this to your build.gradle:
36 | ```gradle
37 | allprojects {
38 | repositories {
39 | maven { url 'https://jitpack.io' }
40 | }
41 | }
42 | dependencies {
43 | implementation 'com.github.alexeyvasilyev:rtsp-client-android:x.x.x'
44 | }
45 | ```
46 |
47 | ## How to use:
48 | Easiest way is just to use `RtspSurfaceView` (recommended) or `RtspImageView` classes for showing video stream in UI.
49 |
50 | Use [RtspSurfaceView](https://github.com/alexeyvasilyev/rtsp-client-android/blob/master/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspSurfaceView.kt) if you need best performance and less battery usage. To get bitmap from SurfaceView use [PixelCopy.request](https://developer.android.com/reference/android/view/PixelCopy) (on Pixel 8 Pro with 1440p @ 20 fps video stream, you can get 12 fps only via PixelCopy)
51 |
52 | Use [RtspImageView](https://github.com/alexeyvasilyev/rtsp-client-android/blob/master/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspImageView.kt) if you need better performance than PixelCopy for getting bitmaps for further processing (e.g. for AI).
53 |
54 | ```xml
55 |
59 |
60 |
64 | ```
65 |
66 | Then in code use:
67 | ```kotlin
68 | val uri = Uri.parse("rtsps://10.0.1.3/test.sdp")
69 | val username = "admin"
70 | val password = "secret"
71 | svVideo.init(uri, username, password)
72 | svVideo.start(
73 | requestVideo = true,
74 | requestAudio = true,
75 | requestApplication = false)
76 | // ...
77 | svVideo.stop()
78 | ```
79 |
80 | You can still use library without any decoding (just for obtaining raw frames from RTSP source), e.g. for writing video stream into MP4 via muxer.
81 |
82 | ```kotlin
83 | val rtspClientListener = object: RtspClient.RtspClientListener {
84 | override fun onRtspConnecting() {}
85 | override fun onRtspConnected(sdpInfo: SdpInfo) {}
86 | override fun onRtspVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
87 | // Send raw H264/H265 NAL unit to decoder
88 | }
89 | override fun onRtspAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
90 | // Send raw audio to decoder
91 | }
92 | override fun onRtspApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
93 | // Send raw application data to app specific parser
94 | }
95 | override fun onRtspDisconnected() {}
96 | override fun onRtspFailedUnauthorized() {
97 | Log.e(TAG, "RTSP failed unauthorized");
98 | }
99 | override fun onRtspFailed(message: String?) {
100 | Log.e(TAG, "RTSP failed with message '$message'")
101 | }
102 | }
103 |
104 | val uri = Uri.parse("rtsps://10.0.1.3/test.sdp")
105 | val username = "admin"
106 | val password = "secret"
107 | val stopped = new AtomicBoolean(false)
108 | val sslSocket = NetUtils.createSslSocketAndConnect(uri.getHost(), uri.getPort(), 5000)
109 |
110 | val rtspClient = RtspClient.Builder(sslSocket, uri.toString(), stopped, rtspClientListener)
111 | .requestVideo(true)
112 | .requestAudio(true)
113 | .withDebug(false)
114 | .withUserAgent("RTSP client")
115 | .withCredentials(username, password)
116 | .build()
117 | // Blocking call until stopped variable is true or connection failed
118 | rtspClient.execute()
119 |
120 | NetUtils.closeSocket(sslSocket)
121 | ```
122 |
123 | ## How to get lowest possible latency:
124 | There are two types of latencies:
125 |
126 | ### Network latency
127 | If you want the lowest possible network latency, be sure that both Android device and RTSP camera are connected to the same network by the Ethernet cable (not WiFi).
128 |
129 | Another option to try is to decrease stream bitrate on RTSP camera. Less frame size leads to less time needed for frame transfer.
130 |
131 | ### Video decoder latency
132 | Video decoder latency can vary significantly on different Android devices and on different RTSP camera streams.
133 |
134 | For the same profile/level and resolution (but different cameras) the latency in best cases can can be 20 msec, in worst cases 1200 msec.
135 |
136 | To decrease latency be sure you use the lowest possible H.264 video stream profile and level (enable `debug` in the library and check SPS frame params `profile_idc` and `level_idc` in the log). `Baseline profile` should have the lowest possible decoder latency.
137 | Check `max_num_reorder_frames` param as well. For best latency it's value should be `0`.
138 |
139 | You can also try to use [experimentalUpdateSpsFrameWithLowLatencyParams](https://github.com/alexeyvasilyev/rtsp-client-android/blob/master/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspProcessor.kt#L106) library feature which rewrites config frame on runtime with low-latency parameters.
140 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by https://www.gitignore.io/api/android,java,intellij
2 |
3 | ### Android ###
4 | # Built application files
5 | *.apk
6 | *.ap_
7 |
8 | # Files for the Dalvik VM
9 | *.dex
10 |
11 | # Java class files
12 | *.class
13 |
14 | # Generated files
15 | bin/
16 | gen/
17 |
18 | # Gradle files
19 | .gradle/
20 | build/
21 |
22 | # Local configuration file (sdk path, etc)
23 | local.properties
24 |
25 | # Proguard folder generated by Eclipse
26 | proguard/
27 |
28 | xactmobile/class_files.txt
29 | xactmobile/mapping.txt
30 | xactmobile/seeds.txt
31 |
32 | # Log Files
33 | *.log
34 |
35 | # Android Studio Navigation editor temp files
36 | .navigation/
37 |
38 | ### Android Patch ###
39 | gen-external-apklibs
40 |
41 |
42 | ### Java ###
43 | *.class
44 |
45 | # Mobile Tools for Java (J2ME)
46 | .mtj.tmp/
47 |
48 | # Package Files #
49 | #*.jar
50 | *.war
51 | *.ear
52 |
53 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
54 | hs_err_pid*
55 |
56 |
57 | ### Intellij ###
58 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio
59 |
60 | *.iml
61 |
62 | ## Directory-based project format:
63 | .idea/
64 | # if you remove the above rule, at least ignore the following:
65 |
66 | # User-specific stuff:
67 | .idea/workspace.xml
68 | .idea/tasks.xml
69 | .idea/dictionaries
70 |
71 | # Sensitive or high-churn files:
72 | .idea/dataSources.ids
73 | .idea/dataSources.xml
74 | .idea/sqlDataSources.xml
75 | .idea/dynamic.xml
76 | .idea/uiDesigner.xml
77 |
78 | # Gradle:
79 | .idea/gradle.xml
80 | .idea/libraries
81 |
82 | # Mongo Explorer plugin:
83 | .idea/mongoSettings.xml
84 |
85 | ## File-based project format:
86 | *.ipr
87 | *.iws
88 |
89 | ## Plugin-specific files:
90 |
91 | # IntelliJ
92 | /out/
93 |
94 | # mpeltonen/sbt-idea plugin
95 | .idea_modules/
96 |
97 | # JIRA plugin
98 | atlassian-ide-plugin.xml
99 |
100 | # Crashlytics plugin (for Android Studio and IntelliJ)
101 | com_crashlytics_export_strings.xml
102 | crashlytics.properties
103 | crashlytics-build.properties
104 |
105 | xactmobile/.DS_Store~64be78fe3602626c61b52bcbfd09e09a6107b50a
106 | xactmobile/.DS_Store~HEAD
107 | oslab-viewpager/._.DS_Store
108 | oslab-viewpager/src/main/.DS_Store
109 | oslab-viewpager/src/main/._.DS_Store
110 | oslab-viewpager/src/main/res/.DS_Store
111 | oslab-viewpager/src/main/res/._.DS_Store
112 | oslab-viewpager/.gitignore
113 | oslab-materialdesign/.DS_Store
114 | oslab-materialdesign/._.DS_Store
115 | oslab-materialdesign/src/.DS_Store
116 | oslab-materialdesign/src/._.DS_Store
117 | oslab-materialdesign/src/main/.DS_Store
118 | oslab-materialdesign/src/main/._.DS_Store
119 | oslab-materialdesign/src/main/res/.DS_Store
120 | oslab-materialdesign/src/main/res/._.DS_Store
121 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 | apply plugin: 'kotlin-android'
3 |
4 | android {
5 |
6 | compileSdk 35
7 |
8 | defaultConfig {
9 | applicationId "com.alexvas.rtsp.demo"
10 | minSdk 24
11 | targetSdk 34
12 | versionCode 1
13 | versionName "1.0"
14 | }
15 |
16 | buildTypes {
17 | release {
18 | minifyEnabled false
19 | proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
20 | }
21 | }
22 |
23 | // To inline the bytecode built with JVM target 1.8 into
24 | // bytecode that is being built with JVM target 1.6. (e.g. navArgs)
25 |
26 |
27 | compileOptions {
28 | sourceCompatibility JavaVersion.VERSION_17
29 | targetCompatibility JavaVersion.VERSION_17
30 | }
31 | kotlinOptions {
32 | jvmTarget = JavaVersion.VERSION_17.toString()
33 | }
34 | buildFeatures {
35 | viewBinding true
36 | }
37 | namespace 'com.alexvas.rtsp.demo'
38 | }
39 |
40 | dependencies {
41 | implementation fileTree(dir: 'libs', include: ['*.jar'])
42 | implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
43 | implementation 'androidx.appcompat:appcompat:1.7.0'
44 | implementation 'androidx.core:core-ktx:1.15.0'
45 | implementation 'com.google.android.material:material:1.12.0'
46 | implementation 'androidx.constraintlayout:constraintlayout:2.2.0'
47 | implementation 'androidx.lifecycle:lifecycle-extensions:2.2.0'
48 |
49 | def androidx_navigation_version = '2.8.5'
50 | implementation "androidx.navigation:navigation-fragment-ktx:$androidx_navigation_version"
51 | implementation "androidx.navigation:navigation-ui-ktx:$androidx_navigation_version"
52 | implementation "androidx.navigation:navigation-fragment-ktx:$androidx_navigation_version"
53 | implementation "androidx.navigation:navigation-ui-ktx:$androidx_navigation_version"
54 |
55 | def logcat_core_version = '3.3.1'
56 | api "com.github.AppDevNext.Logcat:LogcatCoreLib:$logcat_core_version"
57 | api "com.github.AppDevNext.Logcat:LogcatCoreUI:$logcat_core_version"
58 |
59 | implementation project(':library-client-rtsp')
60 | }
61 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # You can control the set of applied configuration files using the
3 | # proguardFiles setting in build.gradle.
4 | #
5 | # For more details, see
6 | # http://developer.android.com/guide/developing/tools/proguard.html
7 |
8 | # If your project uses WebView with JS, uncomment the following
9 | # and specify the fully qualified class name to the JavaScript interface
10 | # class:
11 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12 | # public *;
13 | #}
14 |
15 | # Uncomment this to preserve the line number information for
16 | # debugging stack traces.
17 | #-keepattributes SourceFile,LineNumberTable
18 |
19 | # If you keep the line number information, uncomment this to
20 | # hide the original source file name.
21 | #-renamesourcefileattribute SourceFile
22 |
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
11 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/app/src/main/java/com/alexvas/rtsp/demo/MainActivity.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.demo
2 |
3 | import android.os.Bundle
4 | import com.google.android.material.bottomnavigation.BottomNavigationView
5 | import androidx.appcompat.app.AppCompatActivity
6 | import androidx.navigation.findNavController
7 | import androidx.navigation.ui.setupWithNavController
8 |
9 | class MainActivity : AppCompatActivity() {
10 |
11 | override fun onCreate(savedInstanceState: Bundle?) {
12 | super.onCreate(savedInstanceState)
13 | setContentView(R.layout.activity_main)
14 | val navView: BottomNavigationView = findViewById(R.id.nav_view)
15 |
16 | val navController = findNavController(R.id.nav_host_fragment)
17 | // Passing each menu ID as a set of Ids because each
18 | // menu should be considered as top level destinations.
19 | // val appBarConfiguration = AppBarConfiguration(setOf(
20 | // R.id.navigation_live, R.id.navigation_logs))
21 | // setupActionBarWithNavController(navController, appBarConfiguration)
22 | navView.setupWithNavController(navController)
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/app/src/main/java/com/alexvas/rtsp/demo/live/LiveFragment.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.demo.live
2 |
3 | import android.annotation.SuppressLint
4 | import android.graphics.Bitmap
5 | import android.net.Uri
6 | import android.os.Bundle
7 | import android.os.Handler
8 | import android.os.HandlerThread
9 | import android.util.Log
10 | import android.view.*
11 | import android.widget.Toast
12 | import androidx.constraintlayout.widget.ConstraintSet
13 | import androidx.fragment.app.Fragment
14 | import androidx.lifecycle.ViewModelProvider
15 | import com.alexvas.rtsp.codec.VideoDecodeThread
16 | import com.alexvas.rtsp.demo.databinding.FragmentLiveBinding
17 | import com.alexvas.rtsp.widget.RtspDataListener
18 | import com.alexvas.rtsp.widget.RtspImageView
19 | import com.alexvas.rtsp.widget.RtspStatusListener
20 | import com.alexvas.rtsp.widget.toHexString
21 | import java.util.Timer
22 | import java.util.TimerTask
23 | import java.util.concurrent.atomic.AtomicBoolean
24 | import kotlin.math.min
25 |
26 | @SuppressLint("LogNotTimber")
27 | class LiveFragment : Fragment() {
28 |
29 | private lateinit var binding: FragmentLiveBinding
30 | private lateinit var liveViewModel: LiveViewModel
31 |
32 | private var statisticsTimer: Timer? = null
33 | private var svVideoSurfaceResolution = Pair(0, 0)
34 |
35 | private val rtspStatusSurfaceListener = object: RtspStatusListener {
36 | override fun onRtspStatusConnecting() {
37 | if (DEBUG) Log.v(TAG, "onRtspStatusConnecting()")
38 | binding.apply {
39 | tvStatusSurface.text = "RTSP connecting"
40 | pbLoadingSurface.visibility = View.VISIBLE
41 | vShutterSurface.visibility = View.VISIBLE
42 | llRtspParams.apply {
43 | etRtspRequest.isEnabled = false
44 | etRtspUsername.isEnabled = false
45 | etRtspPassword.isEnabled = false
46 | cbVideo.isEnabled = false
47 | cbAudio.isEnabled = false
48 | cbApplication.isEnabled = false
49 | cbDebug.isEnabled = false
50 | }
51 | tgRotation.isEnabled = false
52 | }
53 | }
54 |
55 | override fun onRtspStatusConnected() {
56 | if (DEBUG) Log.v(TAG, "onRtspStatusConnected()")
57 | binding.apply {
58 | tvStatusSurface.text = "RTSP connected"
59 | bnStartStopSurface.text = "Stop RTSP"
60 | }
61 | setKeepScreenOn(true)
62 | }
63 |
64 | override fun onRtspStatusDisconnecting() {
65 | if (DEBUG) Log.v(TAG, "onRtspStatusDisconnecting()")
66 | binding.apply {
67 | tvStatusSurface.text = "RTSP disconnecting"
68 | }
69 | }
70 |
71 | override fun onRtspStatusDisconnected() {
72 | if (DEBUG) Log.v(TAG, "onRtspStatusDisconnected()")
73 | binding.apply {
74 | tvStatusSurface.text = "RTSP disconnected"
75 | bnStartStopSurface.text = "Start RTSP"
76 | pbLoadingSurface.visibility = View.GONE
77 | vShutterSurface.visibility = View.VISIBLE
78 | pbLoadingSurface.isEnabled = false
79 | llRtspParams.apply {
80 | cbVideo.isEnabled = true
81 | cbAudio.isEnabled = true
82 | cbApplication.isEnabled = true
83 | cbDebug.isEnabled = true
84 | etRtspRequest.isEnabled = true
85 | etRtspUsername.isEnabled = true
86 | etRtspPassword.isEnabled = true
87 | }
88 | tgRotation.isEnabled = true
89 | }
90 | setKeepScreenOn(false)
91 | }
92 |
93 | override fun onRtspStatusFailedUnauthorized() {
94 | if (DEBUG) Log.e(TAG, "onRtspStatusFailedUnauthorized()")
95 | if (context == null) return
96 | onRtspStatusDisconnected()
97 | binding.apply {
98 | tvStatusSurface.text = "RTSP username or password invalid"
99 | pbLoadingSurface.visibility = View.GONE
100 | }
101 | }
102 |
103 | override fun onRtspStatusFailed(message: String?) {
104 | if (DEBUG) Log.e(TAG, "onRtspStatusFailed(message='$message')")
105 | if (context == null) return
106 | onRtspStatusDisconnected()
107 | binding.apply {
108 | tvStatusSurface.text = "Error: $message"
109 | pbLoadingSurface.visibility = View.GONE
110 | }
111 | }
112 |
113 | override fun onRtspFirstFrameRendered() {
114 | if (DEBUG) Log.v(TAG, "onRtspFirstFrameRendered()")
115 | Log.i(TAG, "First frame rendered")
116 | binding.apply {
117 | pbLoadingSurface.visibility = View.GONE
118 | vShutterSurface.visibility = View.GONE
119 | bnSnapshotSurface.isEnabled = true
120 | }
121 | }
122 |
123 | override fun onRtspFrameSizeChanged(width: Int, height: Int) {
124 | if (DEBUG) Log.v(TAG, "onRtspFrameSizeChanged(width=$width, height=$height)")
125 | Log.i(TAG, "Video resolution changed to ${width}x${height}")
126 | svVideoSurfaceResolution = Pair(width, height)
127 | ConstraintSet().apply {
128 | clone(binding.csVideoSurface)
129 | setDimensionRatio(binding.svVideoSurface.id, "$width:$height")
130 | applyTo(binding.csVideoSurface)
131 | }
132 | }
133 | }
134 |
135 | private val rtspDataListener = object: RtspDataListener {
136 | override fun onRtspDataApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
137 | val numBytesDump = min(length, 25) // dump max 25 bytes
138 | Log.i(TAG, "RTSP app data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}")
139 | }
140 | }
141 |
142 | private val rtspStatusImageListener = object: RtspStatusListener {
143 | override fun onRtspStatusConnecting() {
144 | if (DEBUG) Log.v(TAG, "onRtspStatusConnecting()")
145 | binding.apply {
146 | tvStatusImage.text = "RTSP connecting"
147 | pbLoadingImage.visibility = View.VISIBLE
148 | vShutterImage.visibility = View.VISIBLE
149 | }
150 | }
151 |
152 | override fun onRtspStatusConnected() {
153 | if (DEBUG) Log.v(TAG, "onRtspStatusConnected()")
154 | binding.apply {
155 | tvStatusImage.text = "RTSP connected"
156 | bnStartStopImage.text = "Stop RTSP"
157 | }
158 | setKeepScreenOn(true)
159 | }
160 |
161 | override fun onRtspStatusDisconnecting() {
162 | if (DEBUG) Log.v(TAG, "onRtspStatusDisconnecting()")
163 | binding.apply {
164 | tvStatusImage.text = "RTSP disconnecting"
165 | }
166 | }
167 |
168 | override fun onRtspStatusDisconnected() {
169 | if (DEBUG) Log.v(TAG, "onRtspStatusDisconnected()")
170 | binding.apply {
171 | tvStatusImage.text = "RTSP disconnected"
172 | bnStartStopImage.text = "Start RTSP"
173 | pbLoadingImage.visibility = View.GONE
174 | vShutterImage.visibility = View.VISIBLE
175 | pbLoadingImage.isEnabled = false
176 | }
177 | setKeepScreenOn(false)
178 | }
179 |
180 | override fun onRtspStatusFailedUnauthorized() {
181 | if (DEBUG) Log.e(TAG, "onRtspStatusFailedUnauthorized()")
182 | if (context == null) return
183 | onRtspStatusDisconnected()
184 | binding.apply {
185 | tvStatusImage.text = "RTSP username or password invalid"
186 | pbLoadingImage.visibility = View.GONE
187 | }
188 | }
189 |
190 | override fun onRtspStatusFailed(message: String?) {
191 | if (DEBUG) Log.e(TAG, "onRtspStatusFailed(message='$message')")
192 | if (context == null) return
193 | onRtspStatusDisconnected()
194 | binding.apply {
195 | tvStatusImage.text = "Error: $message"
196 | pbLoadingImage.visibility = View.GONE
197 | }
198 | }
199 |
200 | override fun onRtspFirstFrameRendered() {
201 | if (DEBUG) Log.v(TAG, "onRtspFirstFrameRendered()")
202 | Log.i(TAG, "First frame rendered")
203 | binding.apply {
204 | vShutterImage.visibility = View.GONE
205 | pbLoadingImage.visibility = View.GONE
206 | }
207 | }
208 |
209 | override fun onRtspFrameSizeChanged(width: Int, height: Int) {
210 | if (DEBUG) Log.v(TAG, "onRtspFrameSizeChanged(width=$width, height=$height)")
211 | Log.i(TAG, "Video resolution changed to ${width}x${height}")
212 | ConstraintSet().apply {
213 | clone(binding.csVideoImage)
214 | setDimensionRatio(binding.ivVideoImage.id, "$width:$height")
215 | applyTo(binding.csVideoImage)
216 | }
217 | }
218 | }
219 |
220 | private fun getSnapshot(): Bitmap? {
221 | if (DEBUG) Log.v(TAG, "getSnapshot()")
222 | val surfaceBitmap = Bitmap.createBitmap(1920, 1080, Bitmap.Config.ARGB_8888)
223 | val lock = Object()
224 | val success = AtomicBoolean(false)
225 | val thread = HandlerThread("PixelCopyHelper")
226 | thread.start()
227 | val sHandler = Handler(thread.looper)
228 | val listener = PixelCopy.OnPixelCopyFinishedListener { copyResult ->
229 | success.set(copyResult == PixelCopy.SUCCESS)
230 | synchronized (lock) {
231 | lock.notify()
232 | }
233 | }
234 | synchronized (lock) {
235 | PixelCopy.request(binding.svVideoSurface.holder.surface, surfaceBitmap, listener, sHandler)
236 | lock.wait()
237 | }
238 | thread.quitSafely()
239 | return if (success.get()) surfaceBitmap else null
240 | }
241 |
242 | override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View {
243 | if (DEBUG) Log.v(TAG, "onCreateView()")
244 |
245 | liveViewModel = ViewModelProvider(this)[LiveViewModel::class.java]
246 | binding = FragmentLiveBinding.inflate(inflater, container, false)
247 |
248 | binding.bnVideoDecoderGroup.check(binding.bnVideoDecoderHardware.id)
249 |
250 | binding.svVideoSurface.setStatusListener(rtspStatusSurfaceListener)
251 | binding.svVideoSurface.setDataListener(rtspDataListener)
252 | binding.ivVideoImage.setStatusListener(rtspStatusImageListener)
253 | binding.ivVideoImage.setDataListener(rtspDataListener)
254 |
255 | liveViewModel.initEditTexts(
256 | binding.llRtspParams.etRtspRequest,
257 | binding.llRtspParams.etRtspUsername,
258 | binding.llRtspParams.etRtspPassword
259 | )
260 |
261 | liveViewModel.rtspRequest.observe(viewLifecycleOwner) {
262 | if (binding.llRtspParams.etRtspRequest.text.toString() != it)
263 | binding.llRtspParams.etRtspRequest.setText(it)
264 | }
265 | liveViewModel.rtspUsername.observe(viewLifecycleOwner) {
266 | if (binding.llRtspParams.etRtspUsername.text.toString() != it)
267 | binding.llRtspParams.etRtspUsername.setText(it)
268 | }
269 | liveViewModel.rtspPassword.observe(viewLifecycleOwner) {
270 | if (binding.llRtspParams.etRtspPassword.text.toString() != it)
271 | binding.llRtspParams.etRtspPassword.setText(it)
272 | }
273 |
274 | binding.cbExperimentalRewriteSps.setOnCheckedChangeListener { _, isChecked ->
275 | binding.svVideoSurface.experimentalUpdateSpsFrameWithLowLatencyParams = isChecked
276 | }
277 |
278 | binding.bnRotate0.setOnClickListener {
279 | binding.svVideoSurface.videoRotation = 0
280 | binding.ivVideoImage.videoRotation = 0
281 | }
282 |
283 | binding.bnRotate90.setOnClickListener {
284 | binding.svVideoSurface.videoRotation = 90
285 | binding.ivVideoImage.videoRotation = 90
286 | }
287 |
288 | binding.bnRotate180.setOnClickListener {
289 | binding.svVideoSurface.videoRotation = 180
290 | binding.ivVideoImage.videoRotation = 180
291 | }
292 |
293 | binding.bnRotate270.setOnClickListener {
294 | binding.svVideoSurface.videoRotation = 270
295 | binding.ivVideoImage.videoRotation = 270
296 | }
297 |
298 | binding.bnRotate0.performClick()
299 |
300 | binding.bnVideoDecoderHardware.setOnClickListener {
301 | binding.svVideoSurface.videoDecoderType = VideoDecodeThread.DecoderType.HARDWARE
302 | binding.ivVideoImage.videoDecoderType = VideoDecodeThread.DecoderType.HARDWARE
303 | }
304 |
305 | binding.bnVideoDecoderSoftware.setOnClickListener {
306 | binding.svVideoSurface.videoDecoderType = VideoDecodeThread.DecoderType.SOFTWARE
307 | binding.ivVideoImage.videoDecoderType = VideoDecodeThread.DecoderType.SOFTWARE
308 | }
309 |
310 | binding.bnStartStopSurface.setOnClickListener {
311 | if (binding.svVideoSurface.isStarted()) {
312 | binding.svVideoSurface.stop()
313 | stopStatistics()
314 | } else {
315 | val uri = Uri.parse(liveViewModel.rtspRequest.value)
316 | binding.svVideoSurface.apply {
317 | init(
318 | uri,
319 | username = liveViewModel.rtspUsername.value,
320 | password = liveViewModel.rtspPassword.value,
321 | userAgent = "rtsp-client-android")
322 | debug = binding.llRtspParams.cbDebug.isChecked
323 | start(
324 | requestVideo = binding.llRtspParams.cbVideo.isChecked,
325 | requestAudio = binding.llRtspParams.cbAudio.isChecked,
326 | requestApplication = binding.llRtspParams.cbApplication.isChecked
327 | )
328 | }
329 | startStatistics()
330 | }
331 | }
332 |
333 | binding.bnStartStopImage.setOnClickListener {
334 | if (binding.ivVideoImage.isStarted()) {
335 | binding.ivVideoImage.stop()
336 | stopStatistics()
337 | } else {
338 | val uri = Uri.parse(liveViewModel.rtspRequest.value)
339 | binding.ivVideoImage.apply {
340 | init(uri, liveViewModel.rtspUsername.value, liveViewModel.rtspPassword.value, "rtsp-client-android")
341 | debug = binding.llRtspParams.cbDebug.isChecked
342 | onRtspImageBitmapListener = object : RtspImageView.RtspImageBitmapListener {
343 | override fun onRtspImageBitmapObtained(bitmap: Bitmap) {
344 | // TODO: You can send bitmap for processing
345 | }
346 | }
347 | start(
348 | requestVideo = binding.llRtspParams.cbVideo.isChecked,
349 | requestAudio = binding.llRtspParams.cbAudio.isChecked,
350 | requestApplication = binding.llRtspParams.cbApplication.isChecked
351 | )
352 | }
353 | startStatistics()
354 | }
355 | }
356 |
357 | binding.bnSnapshotSurface.setOnClickListener {
358 | val bitmap = getSnapshot()
359 | // TODO Save snapshot to DCIM folder
360 | if (bitmap != null) {
361 | Toast.makeText(requireContext(), "Snapshot succeeded", Toast.LENGTH_LONG).show()
362 | } else {
363 | Toast.makeText(requireContext(), "Snapshot failed", Toast.LENGTH_LONG).show()
364 | }
365 | }
366 | return binding.root
367 | }
368 |
369 | override fun onResume() {
370 | if (DEBUG) Log.v(TAG, "onResume()")
371 | super.onResume()
372 | liveViewModel.loadParams(requireContext())
373 | }
374 |
375 | override fun onPause() {
376 | val started = binding.svVideoSurface.isStarted()
377 | if (DEBUG) Log.v(TAG, "onPause(), started:$started")
378 | super.onPause()
379 | liveViewModel.saveParams(requireContext())
380 |
381 | if (started) {
382 | binding.svVideoSurface.stop()
383 | stopStatistics()
384 | }
385 | }
386 |
387 | private fun startStatistics() {
388 | if (DEBUG) Log.v(TAG, "startStatistics()")
389 | Log.i(TAG, "Start statistics")
390 | if (statisticsTimer == null) {
391 | val task: TimerTask = object : TimerTask() {
392 | override fun run() {
393 | val statistics = binding.svVideoSurface.statistics
394 | val text =
395 | "Video decoder: ${statistics.videoDecoderType.toString().lowercase()} ${if (statistics.videoDecoderName.isNullOrEmpty()) "" else "(${statistics.videoDecoderName})"}" +
396 | "\nVideo decoder latency: ${statistics.videoDecoderLatencyMsec} ms" +
397 | "\nResolution: ${svVideoSurfaceResolution.first}x${svVideoSurfaceResolution.second}"
398 | // "\nNetwork latency: "
399 |
400 | // // Assume that difference between current Android time and camera time cannot be more than 5 sec.
401 | // // Otherwise time need to be synchronized on both devices.
402 | // text += if (statistics.networkLatencyMsec == -1) {
403 | // "-"
404 | // } else if (statistics.networkLatencyMsec < 0 || statistics.networkLatencyMsec > TimeUnit.SECONDS.toMillis(5)) {
405 | // "[time out of sync]"
406 | // } else {
407 | // "${statistics.networkLatencyMsec} ms"
408 | // }
409 |
410 | binding.tvStatistics.post {
411 | binding.tvStatistics.text = text
412 | }
413 | }
414 | }
415 | statisticsTimer = Timer("${TAG}::Statistics").apply {
416 | schedule(task, 0, 1000)
417 | }
418 | }
419 | }
420 |
421 | private fun stopStatistics() {
422 | if (DEBUG) Log.v(TAG, "stopStatistics()")
423 | statisticsTimer?.apply {
424 | Log.i(TAG, "Stop statistics")
425 | cancel()
426 | }
427 | statisticsTimer = null
428 | }
429 |
430 | private fun setKeepScreenOn(enable: Boolean) {
431 | if (DEBUG) Log.v(TAG, "setKeepScreenOn(enable=$enable)")
432 | if (enable) {
433 | activity?.apply {
434 | window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
435 | Log.i(TAG, "Enabled keep screen on")
436 | }
437 | } else {
438 | activity?.apply {
439 | window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
440 | Log.i(TAG, "Disabled keep screen on")
441 | }
442 | }
443 | }
444 | companion object {
445 | private val TAG: String = LiveFragment::class.java.simpleName
446 | private const val DEBUG = true
447 | }
448 |
449 | }
450 |
--------------------------------------------------------------------------------
/app/src/main/java/com/alexvas/rtsp/demo/live/LiveViewModel.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.demo.live
2 |
3 | import android.annotation.SuppressLint
4 | import android.content.Context
5 | import android.text.Editable
6 | import android.text.TextWatcher
7 | import android.util.Log
8 | import android.widget.EditText
9 | import androidx.lifecycle.MutableLiveData
10 | import androidx.lifecycle.ViewModel
11 |
12 | private const val RTSP_REQUEST_KEY = "rtsp_request"
13 | private const val RTSP_USERNAME_KEY = "rtsp_username"
14 | private const val RTSP_PASSWORD_KEY = "rtsp_password"
15 |
16 | private const val DEFAULT_RTSP_REQUEST = "rtsp://10.0.1.3:554/axis-media/media.amp"
17 | private const val DEFAULT_RTSP_USERNAME = ""
18 | private const val DEFAULT_RTSP_PASSWORD = ""
19 |
20 | private const val LIVE_PARAMS_FILENAME = "live_params"
21 |
22 | @SuppressLint("LogNotTimber")
23 | class LiveViewModel : ViewModel() {
24 |
25 | val rtspRequest = MutableLiveData().apply {
26 | value = DEFAULT_RTSP_REQUEST
27 | }
28 | val rtspUsername = MutableLiveData().apply {
29 | value = DEFAULT_RTSP_USERNAME
30 | }
31 | val rtspPassword = MutableLiveData().apply {
32 | value = DEFAULT_RTSP_PASSWORD
33 | }
34 |
35 | // private val _text = MutableLiveData().apply {
36 | // value = "This is live Fragment"
37 | // }
38 | // val text: LiveData = _text
39 |
40 | // init {
41 | // // Here you could use the ID to get the user info from the DB or remote server
42 | // rtspRequest.value = "rtsp://10.0.1.3:554/axis-media/media.amp"
43 | // }
44 |
45 | fun loadParams(context: Context) {
46 | if (DEBUG) Log.v(TAG, "loadParams()")
47 | val pref = context.getSharedPreferences(LIVE_PARAMS_FILENAME, Context.MODE_PRIVATE)
48 | try {
49 | rtspRequest.setValue(pref.getString(RTSP_REQUEST_KEY, DEFAULT_RTSP_REQUEST))
50 | } catch (e: ClassCastException) {
51 | e.printStackTrace()
52 | }
53 | try {
54 | rtspUsername.setValue(pref.getString(RTSP_USERNAME_KEY, DEFAULT_RTSP_USERNAME))
55 | } catch (e: ClassCastException) {
56 | e.printStackTrace()
57 | }
58 | try {
59 | rtspPassword.setValue(pref.getString(RTSP_PASSWORD_KEY, DEFAULT_RTSP_PASSWORD))
60 | } catch (e: ClassCastException) {
61 | e.printStackTrace()
62 | }
63 | }
64 |
65 | fun saveParams(context: Context) {
66 | if (DEBUG) Log.v(TAG, "saveParams()")
67 | context.getSharedPreferences(LIVE_PARAMS_FILENAME, Context.MODE_PRIVATE).edit().apply {
68 | putString(RTSP_REQUEST_KEY, rtspRequest.value)
69 | putString(RTSP_USERNAME_KEY, rtspUsername.value)
70 | putString(RTSP_PASSWORD_KEY, rtspPassword.value)
71 | apply()
72 | }
73 | }
74 |
75 | fun initEditTexts(etRtspRequest: EditText, etRtspUsername: EditText, etRtspPassword: EditText) {
76 | if (DEBUG) Log.v(TAG, "initEditTexts()")
77 | etRtspRequest.addTextChangedListener(object : TextWatcher {
78 | override fun afterTextChanged(s: Editable?) {
79 | }
80 | override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {
81 | }
82 | override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {
83 | val text = s.toString()
84 | if (text != rtspRequest.value) {
85 | rtspRequest.value = text
86 | }
87 | }
88 | })
89 | etRtspUsername.addTextChangedListener(object : TextWatcher {
90 | override fun afterTextChanged(s: Editable?) {
91 | }
92 | override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {
93 | }
94 | override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {
95 | val text = s.toString()
96 | if (text != rtspUsername.value) {
97 | rtspUsername.value = text
98 | }
99 | }
100 | })
101 | etRtspPassword.addTextChangedListener(object : TextWatcher {
102 | override fun afterTextChanged(s: Editable?) {
103 | }
104 | override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {
105 | }
106 | override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {
107 | val text = s.toString()
108 | if (text != rtspPassword.value) {
109 | rtspPassword.value = text
110 | }
111 | }
112 | })
113 | }
114 |
115 | companion object {
116 | private val TAG: String = LiveViewModel::class.java.simpleName
117 | private const val DEBUG = false
118 |
119 |
120 | }
121 |
122 | }
123 |
--------------------------------------------------------------------------------
/app/src/main/java/com/alexvas/rtsp/demo/live/RawFragment.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.demo.live
2 |
3 | import android.annotation.SuppressLint
4 | import android.net.Uri
5 | import android.os.Bundle
6 | import android.util.Log
7 | import android.view.LayoutInflater
8 | import android.view.View
9 | import android.view.ViewGroup
10 | import androidx.fragment.app.Fragment
11 | import androidx.lifecycle.ViewModelProvider
12 | import com.alexvas.rtsp.RtspClient
13 | import com.alexvas.rtsp.demo.databinding.FragmentRawBinding
14 | import com.alexvas.rtsp.widget.toHexString
15 | import com.alexvas.utils.NetUtils
16 | import kotlinx.coroutines.Runnable
17 | import java.net.Socket
18 | import java.util.Timer
19 | import java.util.TimerTask
20 | import java.util.concurrent.atomic.AtomicBoolean
21 | import kotlin.math.min
22 |
23 | @SuppressLint("LogNotTimber")
24 | class RawFragment : Fragment() {
25 |
26 | private lateinit var binding: FragmentRawBinding
27 | private lateinit var liveViewModel: LiveViewModel
28 |
29 | private var statisticsTimer: Timer? = null
30 | private val rtspStopped = AtomicBoolean(true)
31 |
32 | private var rtspVideoBytesReceived: Long = 0
33 | private var rtspVideoFramesReceived: Long = 0
34 | private var rtspAudioBytesReceived: Long = 0
35 | private var rtspAudioSamplesReceived: Long = 0
36 | private var rtspApplicationBytesReceived: Long = 0
37 | private var rtspApplicationSamplesReceived: Long = 0
38 |
39 | private val rtspClientListener = object: RtspClient.RtspClientListener {
40 | override fun onRtspConnecting() {
41 | if (DEBUG) Log.v(TAG, "onRtspConnecting()")
42 | rtspVideoBytesReceived = 0
43 | rtspVideoFramesReceived = 0
44 | rtspAudioBytesReceived = 0
45 | rtspAudioSamplesReceived = 0
46 | rtspApplicationBytesReceived = 0
47 | rtspApplicationSamplesReceived = 0
48 |
49 | binding.apply {
50 | root.post {
51 | updateStatistics()
52 | llRtspParams.etRtspRequest.isEnabled = false
53 | llRtspParams.etRtspUsername.isEnabled = false
54 | llRtspParams.etRtspPassword.isEnabled = false
55 | llRtspParams.cbVideo.isEnabled = false
56 | llRtspParams.cbAudio.isEnabled = false
57 | llRtspParams.cbApplication.isEnabled = false
58 | llRtspParams.cbDebug.isEnabled = false
59 | tvStatusSurface.text = "RTSP connecting"
60 | bnStartStop.text = "Stop RTSP"
61 | }
62 | }
63 | }
64 |
65 | override fun onRtspConnected(sdpInfo: RtspClient.SdpInfo) {
66 | if (DEBUG) Log.v(TAG, "onRtspConnected()")
67 | binding.apply {
68 | root.post {
69 | tvStatusSurface.text = "RTSP connected"
70 | }
71 | }
72 | startStatistics()
73 | }
74 |
75 | override fun onRtspVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
76 | val numBytesDump = min(length, 25) // dump max 25 bytes
77 | Log.i(TAG, "RTSP video data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}")
78 | rtspVideoBytesReceived += length
79 | rtspVideoFramesReceived++
80 | }
81 |
82 | override fun onRtspAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
83 | val numBytesDump = min(length, 25) // dump max 25 bytes
84 | Log.i(TAG, "RTSP audio data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}")
85 | rtspAudioBytesReceived += length
86 | rtspAudioSamplesReceived++
87 | }
88 |
89 | override fun onRtspApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
90 | val numBytesDump = min(length, 25) // dump max 25 bytes
91 | Log.i(TAG, "RTSP app data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}")
92 | rtspApplicationBytesReceived += length
93 | rtspApplicationSamplesReceived++
94 | }
95 |
96 | override fun onRtspDisconnecting() {
97 | if (DEBUG) Log.v(TAG, "onRtspDisconnecting()")
98 | binding.apply {
99 | root.post {
100 | tvStatusSurface.text = "RTSP disconnecting"
101 | }
102 | }
103 | stopStatistics()
104 | }
105 |
106 | override fun onRtspDisconnected() {
107 | if (DEBUG) Log.v(TAG, "onRtspDisconnected()")
108 | binding.apply {
109 | root.post {
110 | tvStatusSurface.text = "RTSP disconnected"
111 | bnStartStop.text = "Start RTSP"
112 | llRtspParams.cbVideo.isEnabled = true
113 | llRtspParams.cbAudio.isEnabled = true
114 | llRtspParams.cbApplication.isEnabled = true
115 | llRtspParams.cbDebug.isEnabled = true
116 | llRtspParams.etRtspRequest.isEnabled = true
117 | llRtspParams.etRtspUsername.isEnabled = true
118 | llRtspParams.etRtspPassword.isEnabled = true
119 | }
120 | }
121 | }
122 |
123 | override fun onRtspFailedUnauthorized() {
124 | if (DEBUG) Log.e(TAG, "onRtspFailedUnauthorized()")
125 | Log.e(TAG, "RTSP failed unauthorized")
126 | if (context == null) return
127 | onRtspDisconnected()
128 | binding.apply {
129 | root.post {
130 | tvStatusSurface.text = "RTSP username or password invalid"
131 | }
132 | }
133 | }
134 |
135 | override fun onRtspFailed(message: String?) {
136 | if (DEBUG) Log.e(TAG, "onRtspFailed(message='$message')")
137 | Log.e(TAG, "RTSP failed with message '$message'")
138 | if (context == null) return
139 | onRtspDisconnected()
140 | binding.apply {
141 | root.post {
142 | tvStatusSurface.text = "Error: $message"
143 | }
144 | }
145 | }
146 | }
147 |
148 | private val threadRunnable = Runnable {
149 | Log.i(TAG, "Thread started")
150 | var socket: Socket? = null
151 | try {
152 | val uri = Uri.parse(liveViewModel.rtspRequest.value)
153 | val port = if (uri.port == -1) DEFAULT_RTSP_PORT else uri.port
154 | socket = NetUtils.createSocketAndConnect(uri.host!!, port, 5000)
155 |
156 | val rtspClient =
157 | RtspClient.Builder(
158 | socket,
159 | uri.toString(),
160 | rtspStopped,
161 | rtspClientListener
162 | )
163 | .requestVideo(binding.llRtspParams.cbVideo.isChecked)
164 | .requestAudio(binding.llRtspParams.cbAudio.isChecked)
165 | .requestApplication(binding.llRtspParams.cbApplication.isChecked)
166 | .withDebug(binding.llRtspParams.cbDebug.isChecked)
167 | .withUserAgent("rtsp-client-android")
168 | .withCredentials(
169 | binding.llRtspParams.etRtspUsername.text.toString(),
170 | binding.llRtspParams.etRtspPassword.text.toString())
171 | .build()
172 |
173 | rtspClient.execute()
174 | } catch (e: Exception) {
175 | e.printStackTrace()
176 | binding.root.post { rtspClientListener.onRtspFailed(e.message) }
177 | } finally {
178 | NetUtils.closeSocket(socket)
179 | }
180 | Log.i(TAG, "Thread stopped")
181 | }
182 |
183 | override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View {
184 | if (DEBUG) Log.v(TAG, "onCreateView()")
185 |
186 | liveViewModel = ViewModelProvider(this)[LiveViewModel::class.java]
187 | binding = FragmentRawBinding.inflate(inflater, container, false)
188 |
189 | liveViewModel.initEditTexts(
190 | binding.llRtspParams.etRtspRequest,
191 | binding.llRtspParams.etRtspUsername,
192 | binding.llRtspParams.etRtspPassword
193 | )
194 | liveViewModel.rtspRequest.observe(viewLifecycleOwner) {
195 | if (binding.llRtspParams.etRtspRequest.text.toString() != it)
196 | binding.llRtspParams.etRtspRequest.setText(it)
197 | }
198 | liveViewModel.rtspUsername.observe(viewLifecycleOwner) {
199 | if (binding.llRtspParams.etRtspUsername.text.toString() != it)
200 | binding.llRtspParams.etRtspUsername.setText(it)
201 | }
202 | liveViewModel.rtspPassword.observe(viewLifecycleOwner) {
203 | if (binding.llRtspParams.etRtspPassword.text.toString() != it)
204 | binding.llRtspParams.etRtspPassword.setText(it)
205 | }
206 |
207 | binding.bnStartStop.setOnClickListener {
208 | if (DEBUG) Log.v(TAG, "onClick() rtspStopped=${rtspStopped.get()}")
209 | if (rtspStopped.get()) {
210 | rtspStopped.set(false)
211 | Log.i(TAG, "Thread starting...")
212 | Thread(threadRunnable).apply {
213 | name = "RTSP raw thread"
214 | start()
215 | }
216 | } else {
217 | Log.i(TAG, "Thread stopping...")
218 | rtspStopped.set(true)
219 | }
220 | }
221 | return binding.root
222 | }
223 |
224 | override fun onResume() {
225 | if (DEBUG) Log.v(TAG, "onResume()")
226 | super.onResume()
227 | liveViewModel.loadParams(requireContext())
228 | }
229 |
230 | override fun onPause() {
231 | if (DEBUG) Log.v(TAG, "onPause()")
232 | super.onPause()
233 | liveViewModel.saveParams(requireContext())
234 |
235 | stopStatistics()
236 | rtspStopped.set(true)
237 | }
238 |
239 | private fun updateStatistics() {
240 | // if (DEBUG) Log.v(TAG, "updateStatistics()")
241 | binding.apply {
242 | tvStatisticsVideo.text = "Video: $rtspVideoBytesReceived bytes, $rtspVideoFramesReceived frames"
243 | tvStatisticsAudio.text = "Audio: $rtspAudioBytesReceived bytes, $rtspAudioSamplesReceived samples"
244 | tvStatisticsApplication.text = "Application: $rtspApplicationBytesReceived bytes, $rtspApplicationSamplesReceived samples"
245 | }
246 | }
247 |
248 | private fun startStatistics() {
249 | if (DEBUG) Log.v(TAG, "startStatistics()")
250 | Log.i(TAG, "Start statistics")
251 | if (statisticsTimer == null) {
252 | val task: TimerTask = object : TimerTask() {
253 | override fun run() {
254 | binding.root.post {
255 | updateStatistics()
256 | }
257 | }
258 | }
259 | statisticsTimer = Timer("${TAG}::Statistics").apply {
260 | schedule(task, 0, 1000)
261 | }
262 | }
263 | }
264 |
265 | private fun stopStatistics() {
266 | if (DEBUG) Log.v(TAG, "stopStatistics()")
267 | statisticsTimer?.apply {
268 | Log.i(TAG, "Stop statistics")
269 | cancel()
270 | }
271 | statisticsTimer = null
272 | }
273 |
274 | companion object {
275 | private val TAG: String = RawFragment::class.java.simpleName
276 | private const val DEBUG = true
277 |
278 | private const val DEFAULT_RTSP_PORT = 554
279 | }
280 |
281 | }
282 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_camera_black_24dp.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_cctv_black_24dp.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_background.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
10 |
15 |
20 |
25 |
30 |
35 |
40 |
45 |
50 |
55 |
60 |
65 |
70 |
75 |
80 |
85 |
90 |
95 |
100 |
105 |
110 |
115 |
120 |
125 |
130 |
135 |
140 |
145 |
150 |
155 |
160 |
165 |
170 |
171 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_launcher_foreground.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/drawable/ic_text_subject_black_24dp.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
17 |
18 |
25 |
26 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/fragment_live.xml:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
15 |
16 |
19 |
20 |
27 |
28 |
35 |
36 |
37 |
38 |
39 |
44 |
45 |
50 |
59 |
68 |
78 |
79 |
80 |
81 |
87 |
88 |
95 |
103 |
108 |
109 |
110 |
111 |
112 |
113 |
120 |
121 |
126 |
127 |
132 |
142 |
151 |
161 |
162 |
163 |
169 |
174 |
175 |
176 |
182 |
183 |
187 |
188 |
194 |
195 |
201 |
202 |
208 |
209 |
210 |
211 |
217 |
218 |
222 |
223 |
229 |
230 |
236 |
237 |
243 |
244 |
250 |
251 |
257 |
258 |
259 |
260 |
261 |
262 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/fragment_logs.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/fragment_raw.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
14 |
15 |
18 |
19 |
27 |
28 |
33 |
34 |
35 |
43 |
50 |
57 |
58 |
59 |
60 |
61 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/layout_rtsp_params.xml:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
13 |
19 |
20 |
21 |
26 |
27 |
32 |
37 |
38 |
39 |
44 |
50 |
51 |
52 |
53 |
54 |
59 |
60 |
67 |
68 |
75 |
76 |
83 |
84 |
91 |
92 |
93 |
--------------------------------------------------------------------------------
/app/src/main/res/menu/bottom_nav_menu.xml:
--------------------------------------------------------------------------------
1 |
2 |
20 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/app/src/main/res/navigation/mobile_navigation.xml:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
13 |
14 |
19 |
20 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/app/src/main/res/values/colors.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | #40747A
4 | #00BCD4
5 | #03DAC5
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | Rtsp demo
3 | Live
4 | Raw
5 | Logs
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | buildscript {
2 |
3 | ext.kotlin_version = '2.0.21'
4 | ext.compile_sdk_version = 35
5 | ext.min_sdk_version = 24
6 | ext.target_sdk_version = 35
7 | ext.project_version_code = 534
8 | ext.project_version_name = '5.3.4'
9 |
10 | repositories {
11 | google()
12 | mavenCentral()
13 | }
14 | dependencies {
15 | classpath 'com.android.tools.build:gradle:8.7.3'
16 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
17 | }
18 | }
19 |
20 | allprojects {
21 | repositories {
22 | google()
23 | mavenCentral()
24 | maven { url 'https://jitpack.io' }
25 | }
26 | }
27 |
28 | tasks.register('clean', Delete) {
29 | delete rootProject.buildDir
30 | }
31 |
--------------------------------------------------------------------------------
/docs/images/rtsp-demo-app.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexeyvasilyev/rtsp-client-android/c01abfea0b5d8e50781a5ed7a1d6b273eb5c36e9/docs/images/rtsp-demo-app.webp
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | org.gradle.jvmargs=-Xmx1g
2 | android.useAndroidX=true
3 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/alexeyvasilyev/rtsp-client-android/c01abfea0b5d8e50781a5ed7a1d6b273eb5c36e9/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-bin.zip
4 | networkTimeout=10000
5 | validateDistributionUrl=true
6 | zipStoreBase=GRADLE_USER_HOME
7 | zipStorePath=wrapper/dists
8 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | #
4 | # Copyright © 2015-2021 the original authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 | # SPDX-License-Identifier: Apache-2.0
19 | #
20 |
21 | ##############################################################################
22 | #
23 | # Gradle start up script for POSIX generated by Gradle.
24 | #
25 | # Important for running:
26 | #
27 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
28 | # noncompliant, but you have some other compliant shell such as ksh or
29 | # bash, then to run this script, type that shell name before the whole
30 | # command line, like:
31 | #
32 | # ksh Gradle
33 | #
34 | # Busybox and similar reduced shells will NOT work, because this script
35 | # requires all of these POSIX shell features:
36 | # * functions;
37 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
38 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»;
39 | # * compound commands having a testable exit status, especially «case»;
40 | # * various built-in commands including «command», «set», and «ulimit».
41 | #
42 | # Important for patching:
43 | #
44 | # (2) This script targets any POSIX shell, so it avoids extensions provided
45 | # by Bash, Ksh, etc; in particular arrays are avoided.
46 | #
47 | # The "traditional" practice of packing multiple parameters into a
48 | # space-separated string is a well documented source of bugs and security
49 | # problems, so this is (mostly) avoided, by progressively accumulating
50 | # options in "$@", and eventually passing that to Java.
51 | #
52 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
53 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
54 | # see the in-line comments for details.
55 | #
56 | # There are tweaks for specific operating systems such as AIX, CygWin,
57 | # Darwin, MinGW, and NonStop.
58 | #
59 | # (3) This script is generated from the Groovy template
60 | # https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
61 | # within the Gradle project.
62 | #
63 | # You can find Gradle at https://github.com/gradle/gradle/.
64 | #
65 | ##############################################################################
66 |
67 | # Attempt to set APP_HOME
68 |
69 | # Resolve links: $0 may be a link
70 | app_path=$0
71 |
72 | # Need this for daisy-chained symlinks.
73 | while
74 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
75 | [ -h "$app_path" ]
76 | do
77 | ls=$( ls -ld "$app_path" )
78 | link=${ls#*' -> '}
79 | case $link in #(
80 | /*) app_path=$link ;; #(
81 | *) app_path=$APP_HOME$link ;;
82 | esac
83 | done
84 |
85 | # This is normally unused
86 | # shellcheck disable=SC2034
87 | APP_BASE_NAME=${0##*/}
88 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
89 | APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s
90 | ' "$PWD" ) || exit
91 |
92 | # Use the maximum available, or set MAX_FD != -1 to use that value.
93 | MAX_FD=maximum
94 |
95 | warn () {
96 | echo "$*"
97 | } >&2
98 |
99 | die () {
100 | echo
101 | echo "$*"
102 | echo
103 | exit 1
104 | } >&2
105 |
106 | # OS specific support (must be 'true' or 'false').
107 | cygwin=false
108 | msys=false
109 | darwin=false
110 | nonstop=false
111 | case "$( uname )" in #(
112 | CYGWIN* ) cygwin=true ;; #(
113 | Darwin* ) darwin=true ;; #(
114 | MSYS* | MINGW* ) msys=true ;; #(
115 | NONSTOP* ) nonstop=true ;;
116 | esac
117 |
118 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
119 |
120 |
121 | # Determine the Java command to use to start the JVM.
122 | if [ -n "$JAVA_HOME" ] ; then
123 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
124 | # IBM's JDK on AIX uses strange locations for the executables
125 | JAVACMD=$JAVA_HOME/jre/sh/java
126 | else
127 | JAVACMD=$JAVA_HOME/bin/java
128 | fi
129 | if [ ! -x "$JAVACMD" ] ; then
130 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
131 |
132 | Please set the JAVA_HOME variable in your environment to match the
133 | location of your Java installation."
134 | fi
135 | else
136 | JAVACMD=java
137 | if ! command -v java >/dev/null 2>&1
138 | then
139 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
140 |
141 | Please set the JAVA_HOME variable in your environment to match the
142 | location of your Java installation."
143 | fi
144 | fi
145 |
146 | # Increase the maximum file descriptors if we can.
147 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
148 | case $MAX_FD in #(
149 | max*)
150 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
151 | # shellcheck disable=SC2039,SC3045
152 | MAX_FD=$( ulimit -H -n ) ||
153 | warn "Could not query maximum file descriptor limit"
154 | esac
155 | case $MAX_FD in #(
156 | '' | soft) :;; #(
157 | *)
158 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
159 | # shellcheck disable=SC2039,SC3045
160 | ulimit -n "$MAX_FD" ||
161 | warn "Could not set maximum file descriptor limit to $MAX_FD"
162 | esac
163 | fi
164 |
165 | # Collect all arguments for the java command, stacking in reverse order:
166 | # * args from the command line
167 | # * the main class name
168 | # * -classpath
169 | # * -D...appname settings
170 | # * --module-path (only if needed)
171 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
172 |
173 | # For Cygwin or MSYS, switch paths to Windows format before running java
174 | if "$cygwin" || "$msys" ; then
175 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
176 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
177 |
178 | JAVACMD=$( cygpath --unix "$JAVACMD" )
179 |
180 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
181 | for arg do
182 | if
183 | case $arg in #(
184 | -*) false ;; # don't mess with options #(
185 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
186 | [ -e "$t" ] ;; #(
187 | *) false ;;
188 | esac
189 | then
190 | arg=$( cygpath --path --ignore --mixed "$arg" )
191 | fi
192 | # Roll the args list around exactly as many times as the number of
193 | # args, so each arg winds up back in the position where it started, but
194 | # possibly modified.
195 | #
196 | # NB: a `for` loop captures its iteration list before it begins, so
197 | # changing the positional parameters here affects neither the number of
198 | # iterations, nor the values presented in `arg`.
199 | shift # remove old arg
200 | set -- "$@" "$arg" # push replacement arg
201 | done
202 | fi
203 |
204 |
205 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
206 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
207 |
208 | # Collect all arguments for the java command:
209 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
210 | # and any embedded shellness will be escaped.
211 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
212 | # treated as '${Hostname}' itself on the command line.
213 |
214 | set -- \
215 | "-Dorg.gradle.appname=$APP_BASE_NAME" \
216 | -classpath "$CLASSPATH" \
217 | org.gradle.wrapper.GradleWrapperMain \
218 | "$@"
219 |
220 | # Stop when "xargs" is not available.
221 | if ! command -v xargs >/dev/null 2>&1
222 | then
223 | die "xargs is not available"
224 | fi
225 |
226 | # Use "xargs" to parse quoted args.
227 | #
228 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed.
229 | #
230 | # In Bash we could simply go:
231 | #
232 | # readarray ARGS < <( xargs -n1 <<<"$var" ) &&
233 | # set -- "${ARGS[@]}" "$@"
234 | #
235 | # but POSIX shell has neither arrays nor command substitution, so instead we
236 | # post-process each arg (as a line of input to sed) to backslash-escape any
237 | # character that might be a shell metacharacter, then use eval to reverse
238 | # that process (while maintaining the separation between arguments), and wrap
239 | # the whole thing up as a single "set" statement.
240 | #
241 | # This will of course break if any of these variables contains a newline or
242 | # an unmatched quote.
243 | #
244 |
245 | eval "set -- $(
246 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
247 | xargs -n1 |
248 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
249 | tr '\n' ' '
250 | )" '"$@"'
251 |
252 | exec "$JAVACMD" "$@"
253 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 | @rem SPDX-License-Identifier: Apache-2.0
17 | @rem
18 |
19 | @if "%DEBUG%"=="" @echo off
20 | @rem ##########################################################################
21 | @rem
22 | @rem Gradle startup script for Windows
23 | @rem
24 | @rem ##########################################################################
25 |
26 | @rem Set local scope for the variables with windows NT shell
27 | if "%OS%"=="Windows_NT" setlocal
28 |
29 | set DIRNAME=%~dp0
30 | if "%DIRNAME%"=="" set DIRNAME=.
31 | @rem This is normally unused
32 | set APP_BASE_NAME=%~n0
33 | set APP_HOME=%DIRNAME%
34 |
35 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
36 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
37 |
38 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
39 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
40 |
41 | @rem Find java.exe
42 | if defined JAVA_HOME goto findJavaFromJavaHome
43 |
44 | set JAVA_EXE=java.exe
45 | %JAVA_EXE% -version >NUL 2>&1
46 | if %ERRORLEVEL% equ 0 goto execute
47 |
48 | echo. 1>&2
49 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
50 | echo. 1>&2
51 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2
52 | echo location of your Java installation. 1>&2
53 |
54 | goto fail
55 |
56 | :findJavaFromJavaHome
57 | set JAVA_HOME=%JAVA_HOME:"=%
58 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
59 |
60 | if exist "%JAVA_EXE%" goto execute
61 |
62 | echo. 1>&2
63 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
64 | echo. 1>&2
65 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2
66 | echo location of your Java installation. 1>&2
67 |
68 | goto fail
69 |
70 | :execute
71 | @rem Setup the command line
72 |
73 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
74 |
75 |
76 | @rem Execute Gradle
77 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
78 |
79 | :end
80 | @rem End local scope for the variables with windows NT shell
81 | if %ERRORLEVEL% equ 0 goto mainEnd
82 |
83 | :fail
84 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
85 | rem the _cmd.exe /c_ return code!
86 | set EXIT_CODE=%ERRORLEVEL%
87 | if %EXIT_CODE% equ 0 set EXIT_CODE=1
88 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
89 | exit /b %EXIT_CODE%
90 |
91 | :mainEnd
92 | if "%OS%"=="Windows_NT" endlocal
93 |
94 | :omega
95 |
--------------------------------------------------------------------------------
/jitpack.yml:
--------------------------------------------------------------------------------
1 | jdk:
2 | - openjdk17
3 |
4 | install:
5 | - ./gradlew build :library-client-rtsp:publishToMavenLocal
--------------------------------------------------------------------------------
/library-client-rtsp/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by https://www.gitignore.io/api/android,java,intellij
2 |
3 | ### Android ###
4 | # Built application files
5 | *.apk
6 | *.ap_
7 |
8 | # Files for the Dalvik VM
9 | *.dex
10 |
11 | # Java class files
12 | *.class
13 |
14 | # Generated files
15 | bin/
16 | gen/
17 |
18 | # Gradle files
19 | .gradle/
20 | build/
21 |
22 | # Local configuration file (sdk path, etc)
23 | local.properties
24 |
25 | # Proguard folder generated by Eclipse
26 | proguard/
27 |
28 | xactmobile/class_files.txt
29 | xactmobile/mapping.txt
30 | xactmobile/seeds.txt
31 |
32 | # Log Files
33 | *.log
34 |
35 | # Android Studio Navigation editor temp files
36 | .navigation/
37 |
38 | ### Android Patch ###
39 | gen-external-apklibs
40 |
41 |
42 | ### Java ###
43 | *.class
44 |
45 | # Mobile Tools for Java (J2ME)
46 | .mtj.tmp/
47 |
48 | # Package Files #
49 | #*.jar
50 | *.war
51 | *.ear
52 |
53 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
54 | hs_err_pid*
55 |
56 |
57 | ### Intellij ###
58 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio
59 |
60 | *.iml
61 |
62 | ## Directory-based project format:
63 | .idea/
64 | # if you remove the above rule, at least ignore the following:
65 |
66 | # User-specific stuff:
67 | .idea/workspace.xml
68 | .idea/tasks.xml
69 | .idea/dictionaries
70 |
71 | # Sensitive or high-churn files:
72 | .idea/dataSources.ids
73 | .idea/dataSources.xml
74 | .idea/sqlDataSources.xml
75 | .idea/dynamic.xml
76 | .idea/uiDesigner.xml
77 |
78 | # Gradle:
79 | .idea/gradle.xml
80 | .idea/libraries
81 |
82 | # Mongo Explorer plugin:
83 | .idea/mongoSettings.xml
84 |
85 | ## File-based project format:
86 | *.ipr
87 | *.iws
88 |
89 | ## Plugin-specific files:
90 |
91 | # IntelliJ
92 | /out/
93 |
94 | # mpeltonen/sbt-idea plugin
95 | .idea_modules/
96 |
97 | # JIRA plugin
98 | atlassian-ide-plugin.xml
99 |
100 | # Crashlytics plugin (for Android Studio and IntelliJ)
101 | com_crashlytics_export_strings.xml
102 | crashlytics.properties
103 | crashlytics-build.properties
104 |
105 | xactmobile/.DS_Store~64be78fe3602626c61b52bcbfd09e09a6107b50a
106 | xactmobile/.DS_Store~HEAD
107 | oslab-viewpager/._.DS_Store
108 | oslab-viewpager/src/main/.DS_Store
109 | oslab-viewpager/src/main/._.DS_Store
110 | oslab-viewpager/src/main/res/.DS_Store
111 | oslab-viewpager/src/main/res/._.DS_Store
112 | oslab-viewpager/.gitignore
113 | oslab-materialdesign/.DS_Store
114 | oslab-materialdesign/._.DS_Store
115 | oslab-materialdesign/src/.DS_Store
116 | oslab-materialdesign/src/._.DS_Store
117 | oslab-materialdesign/src/main/.DS_Store
118 | oslab-materialdesign/src/main/._.DS_Store
119 | oslab-materialdesign/src/main/res/.DS_Store
120 | oslab-materialdesign/src/main/res/._.DS_Store
121 |
--------------------------------------------------------------------------------
/library-client-rtsp/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'com.android.library'
3 | id 'kotlin-android'
4 | id 'maven-publish'
5 | }
6 |
7 | apply plugin: 'com.android.library'
8 |
9 | project.afterEvaluate {
10 | publishing {
11 | publications {
12 | release(MavenPublication) {
13 | from components.release
14 | }
15 | }
16 | }
17 | }
18 |
19 | android {
20 |
21 | compileSdk compile_sdk_version
22 |
23 | defaultConfig {
24 | minSdk min_sdk_version
25 | targetSdk target_sdk_version
26 | }
27 |
28 | compileOptions {
29 | sourceCompatibility JavaVersion.VERSION_17
30 | targetCompatibility JavaVersion.VERSION_17
31 | }
32 |
33 | kotlinOptions {
34 | jvmTarget = JavaVersion.VERSION_17.toString()
35 | }
36 |
37 | namespace 'com.alexvas.rtsp'
38 | }
39 |
40 | dependencies {
41 | implementation 'androidx.annotation:annotation:1.9.1'
42 | implementation 'androidx.media3:media3-exoplayer:1.5.1'
43 | implementation 'androidx.camera:camera-core:1.4.1' // YUV -> BMP conversion
44 | implementation 'org.jcodec:jcodec:0.2.5' // SPS frame modification
45 | }
46 |
--------------------------------------------------------------------------------
/library-client-rtsp/proguard-rules.txt:
--------------------------------------------------------------------------------
1 | # Proguard rules.
2 |
3 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/AudioDecodeThread.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.codec
2 |
3 | import android.media.*
4 | import android.os.Process
5 | import android.util.Log
6 | import java.nio.ByteBuffer
7 |
8 |
9 | class AudioDecodeThread (
10 | private val mimeType: String,
11 | private val sampleRate: Int,
12 | private val channelCount: Int,
13 | private val codecConfig: ByteArray?,
14 | private val audioFrameQueue: AudioFrameQueue) : Thread() {
15 |
16 | private var isRunning = true
17 |
18 | fun stopAsync() {
19 | if (DEBUG) Log.v(TAG, "stopAsync()")
20 | isRunning = false
21 | // Wake up sleep() code
22 | interrupt()
23 | }
24 |
25 | override fun run() {
26 | if (DEBUG) Log.d(TAG, "$name started")
27 |
28 | Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO)
29 |
30 | // Creating audio decoder
31 | val decoder = MediaCodec.createDecoderByType(mimeType)
32 | val format = MediaFormat.createAudioFormat(mimeType, sampleRate, channelCount)
33 |
34 | if (mimeType == MediaFormat.MIMETYPE_AUDIO_AAC) {
35 | val csd0 = codecConfig ?: getAacDecoderConfigData(MediaCodecInfo.CodecProfileLevel.AACObjectLC, sampleRate, channelCount)
36 | format.setByteBuffer("csd-0", ByteBuffer.wrap(csd0))
37 | format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC)
38 | } else if (mimeType == MediaFormat.MIMETYPE_AUDIO_OPUS) {
39 | // TODO: Add Opus support
40 |
41 | // val OPUS_IDENTIFICATION_HEADER = "OpusHead".toByteArray()
42 | // val OPUS_PRE_SKIP_NSEC = ByteBuffer.allocate(8).putLong(11971).array()
43 | // val OPUS_SEEK_PRE_ROLL_NSEC = ByteBuffer.allocate(8).putLong(80000000).array()
44 |
45 | // val csd0 = ByteBuffer.allocate(8+1+1+2+4+2+1)
46 | // csd0.put("OpusHead".toByteArray())
47 | // // Version
48 | // csd0.put(1)
49 | // // Number of channels
50 | // csd0.put(2)
51 | // // Pre-skip
52 | // csd0.putShort(0)
53 | // csd0.putInt(sampleRate)
54 | // // Output Gain
55 | // csd0.putShort(0)
56 | // // Channel Mapping Family
57 | // csd0.put(0)
58 | // Buffer buf = new Buffer();
59 | // // Magic Signature:固定头,占8个字节,为字符串OpusHead
60 | // buf.write("OpusHead".getBytes(StandardCharsets.UTF_8));
61 | // // Version:版本号,占1字节,固定为0x01
62 | // buf.writeByte(1);
63 | // // Channel Count:通道数,占1字节,根据音频流通道自行设置,如0x02
64 | // buf.writeByte(1);
65 | // // Pre-skip:回放的时候从解码器中丢弃的samples数量,占2字节,为小端模式,默认设置0x00,
66 | // buf.writeShortLe(0);
67 | // // Input Sample Rate (Hz):音频流的Sample Rate,占4字节,为小端模式,根据实际情况自行设置
68 | // buf.writeIntLe(currentFormat.HZ);
69 | // //Output Gain:输出增益,占2字节,为小端模式,没有用到默认设置0x00, 0x00就好
70 | // buf.writeShortLe(0);
71 | // // Channel Mapping Family:通道映射系列,占1字节,默认设置0x00就好
72 | // buf.writeByte(0);
73 | // //Channel Mapping Table:可选参数,上面的Family默认设置0x00的时候可忽略
74 | // format.setByteBuffer("csd-0", ByteBuffer.wrap(OPUS_IDENTIFICATION_HEADER).order(ByteOrder.BIG_ENDIAN))
75 | // format.setByteBuffer("csd-1", ByteBuffer.wrap(OPUS_PRE_SKIP_NSEC).order(ByteOrder.BIG_ENDIAN))
76 | // format.setByteBuffer("csd-2", ByteBuffer.wrap(OPUS_SEEK_PRE_ROLL_NSEC).order(ByteOrder.LITTLE_ENDIAN))
77 |
78 | val csd0 = byteArrayOf(
79 | 0x4f, 0x70, 0x75, 0x73, // "Opus"
80 | 0x48, 0x65, 0x61, 0x64, // "Head"
81 | 0x01, // Version
82 | 0x02, // Channel Count
83 | 0x00, 0x00, // Pre skip
84 | 0x80.toByte(), 0xbb.toByte(), 0x00, 0x00, // Sample rate 48000
85 | 0x00, 0x00, // Output Gain (Q7.8 in dB)
86 | 0x00, // Mapping Family
87 | )
88 | val csd1 = byteArrayOf(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00)
89 | val csd2 = byteArrayOf(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00)
90 | format.setByteBuffer("csd-0", ByteBuffer.wrap(csd0))
91 | format.setByteBuffer("csd-1", ByteBuffer.wrap(csd1))
92 | format.setByteBuffer("csd-2", ByteBuffer.wrap(csd2))
93 | }
94 |
95 | decoder.configure(format, null, null, 0)
96 | decoder.start()
97 |
98 | // Creating audio playback device
99 | val outChannel = if (channelCount > 1) AudioFormat.CHANNEL_OUT_STEREO else AudioFormat.CHANNEL_OUT_MONO
100 | val outAudio = AudioFormat.ENCODING_PCM_16BIT
101 | val bufferSize = AudioTrack.getMinBufferSize(sampleRate, outChannel, outAudio)
102 | // Log.i(TAG, "sampleRate: $sampleRate, bufferSize: $bufferSize".format(sampleRate, bufferSize))
103 | val audioTrack = AudioTrack(
104 | AudioAttributes.Builder()
105 | .setUsage(AudioAttributes.USAGE_MEDIA)
106 | .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
107 | .build(),
108 | AudioFormat.Builder()
109 | .setEncoding(outAudio)
110 | .setChannelMask(outChannel)
111 | .setSampleRate(sampleRate)
112 | .build(),
113 | bufferSize,
114 | AudioTrack.MODE_STREAM,
115 | 0)
116 | audioTrack.play()
117 |
118 | val bufferInfo = MediaCodec.BufferInfo()
119 | while (isRunning) {
120 | val inIndex: Int = decoder.dequeueInputBuffer(10000L)
121 | if (inIndex >= 0) {
122 | // fill inputBuffers[inputBufferIndex] with valid data
123 | var byteBuffer: ByteBuffer?
124 | try {
125 | byteBuffer = decoder.getInputBuffer(inIndex)
126 | } catch (e: Exception) {
127 | e.printStackTrace()
128 | break
129 | }
130 | byteBuffer?.rewind()
131 |
132 | // Preventing BufferOverflowException
133 | // if (length > byteBuffer.limit()) throw DecoderFatalException("Error")
134 |
135 | val audioFrame: FrameQueue.Frame?
136 | try {
137 | audioFrame = audioFrameQueue.pop()
138 | if (audioFrame == null) {
139 | Log.d(TAG, "Empty audio frame")
140 | // Release input buffer
141 | decoder.queueInputBuffer(inIndex, 0, 0, 0L, 0)
142 | } else {
143 | byteBuffer?.put(audioFrame.data, audioFrame.offset, audioFrame.length)
144 | decoder.queueInputBuffer(inIndex, audioFrame.offset, audioFrame.length, audioFrame.timestampMs, 0)
145 | }
146 | } catch (e: Exception) {
147 | e.printStackTrace()
148 | }
149 | }
150 | // Log.i(TAG, "inIndex: ${inIndex}")
151 |
152 | try {
153 | // Log.w(TAG, "outIndex: ${outIndex}")
154 | if (!isRunning) break
155 | when (val outIndex = decoder.dequeueOutputBuffer(bufferInfo, 10000L)) {
156 | MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> Log.d(TAG, "Decoder format changed: ${decoder.outputFormat}")
157 | MediaCodec.INFO_TRY_AGAIN_LATER -> if (DEBUG) Log.d(TAG, "No output from decoder available")
158 | else -> {
159 | if (outIndex >= 0) {
160 | val byteBuffer: ByteBuffer? = decoder.getOutputBuffer(outIndex)
161 |
162 | val chunk = ByteArray(bufferInfo.size)
163 | byteBuffer?.get(chunk)
164 | byteBuffer?.clear()
165 |
166 | if (chunk.isNotEmpty()) {
167 | audioTrack.write(chunk, 0, chunk.size)
168 | }
169 | decoder.releaseOutputBuffer(outIndex, false)
170 | }
171 | }
172 | }
173 | } catch (e: Exception) {
174 | e.printStackTrace()
175 | }
176 |
177 | // All decoded frames have been rendered, we can stop playing now
178 | if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {
179 | Log.d(TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM")
180 | break
181 | }
182 | }
183 | audioTrack.flush()
184 | audioTrack.release()
185 |
186 | try {
187 | decoder.stop()
188 | decoder.release()
189 | } catch (_: InterruptedException) {
190 | } catch (e: Exception) {
191 | e.printStackTrace()
192 | }
193 | audioFrameQueue.clear()
194 | if (DEBUG) Log.d(TAG, "$name stopped")
195 | }
196 |
197 | companion object {
198 | private val TAG: String = AudioDecodeThread::class.java.simpleName
199 | private const val DEBUG = false
200 |
201 | fun getAacDecoderConfigData(audioProfile: Int, sampleRate: Int, channels: Int): ByteArray {
202 | // AOT_LC = 2
203 | // 0001 0000 0000 0000
204 | var extraDataAac = audioProfile shl 11
205 | // Sample rate
206 | when (sampleRate) {
207 | 7350 -> extraDataAac = extraDataAac or (0xC shl 7)
208 | 8000 -> extraDataAac = extraDataAac or (0xB shl 7)
209 | 11025 -> extraDataAac = extraDataAac or (0xA shl 7)
210 | 12000 -> extraDataAac = extraDataAac or (0x9 shl 7)
211 | 16000 -> extraDataAac = extraDataAac or (0x8 shl 7)
212 | 22050 -> extraDataAac = extraDataAac or (0x7 shl 7)
213 | 24000 -> extraDataAac = extraDataAac or (0x6 shl 7)
214 | 32000 -> extraDataAac = extraDataAac or (0x5 shl 7)
215 | 44100 -> extraDataAac = extraDataAac or (0x4 shl 7)
216 | 48000 -> extraDataAac = extraDataAac or (0x3 shl 7)
217 | 64000 -> extraDataAac = extraDataAac or (0x2 shl 7)
218 | 88200 -> extraDataAac = extraDataAac or (0x1 shl 7)
219 | 96000 -> extraDataAac = extraDataAac or (0x0 shl 7)
220 | }
221 | // Channels
222 | extraDataAac = extraDataAac or (channels shl 3)
223 | val extraData = ByteArray(2)
224 | extraData[0] = (extraDataAac and 0xff00 shr 8).toByte() // high byte
225 | extraData[1] = (extraDataAac and 0xff).toByte() // low byte
226 | return extraData
227 | }
228 | }
229 |
230 | }
231 |
232 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/FrameQueue.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.codec
2 |
3 | import java.util.concurrent.ArrayBlockingQueue
4 | import java.util.concurrent.TimeUnit
5 |
6 | enum class VideoCodecType {
7 | H264, H265, UNKNOWN
8 | }
9 |
10 | enum class AudioCodecType {
11 | AAC_LC, UNKNOWN
12 | }
13 |
14 | class VideoFrameQueue(frameQueueCapacity: Int): FrameQueue(frameQueueCapacity)
15 | class AudioFrameQueue(frameQueueCapacity: Int): FrameQueue(frameQueueCapacity)
16 |
17 | /**
18 | * Queue for concurrent adding/removing audio/video frames.
19 | */
20 | open class FrameQueue(private val frameQueueCapacity: Int) {
21 |
22 | interface Frame {
23 | val data: ByteArray
24 | val offset: Int
25 | val length: Int
26 | val timestampMs: Long // presentation time in msec
27 | }
28 |
29 | data class VideoFrame(
30 | /** Only H264 codec supported */
31 | val codecType: VideoCodecType,
32 | /** Indicates whether it is a keyframe or not */
33 | val isKeyframe: Boolean,
34 | override val data: ByteArray,
35 | override val offset: Int,
36 | override val length: Int,
37 | /** Video frame timestamp (msec) generated by camera */
38 | override val timestampMs: Long,
39 | /** Captured (received) video frame timestamp (msec). If -1, not supported. */
40 | val capturedTimestampMs: Long = -1
41 | ) : Frame
42 |
43 | data class AudioFrame(
44 | val codecType: AudioCodecType,
45 | // val sampleRate: Int,
46 | override val data: ByteArray,
47 | override val offset: Int,
48 | override val length: Int,
49 | override val timestampMs: Long,
50 | ) : Frame
51 |
52 | private val queue = ArrayBlockingQueue(frameQueueCapacity)
53 |
54 | val size: Int
55 | get() = queue.size
56 |
57 | val capacity: Int
58 | get() = frameQueueCapacity
59 |
60 | @Throws(InterruptedException::class)
61 | fun push(frame: T): Boolean {
62 | if (queue.offer(frame, 5, TimeUnit.MILLISECONDS)) {
63 | return true
64 | }
65 | // Log.w(TAG, "Cannot add frame, queue is full")
66 | return false
67 | }
68 |
69 | @Throws(InterruptedException::class)
70 | open fun pop(timeout: Long = 1000): T? {
71 | try {
72 | val frame: T? = queue.poll(timeout, TimeUnit.MILLISECONDS)
73 | // if (frame == null) {
74 | // Log.w(TAG, "Cannot get frame within 1 sec, queue is empty")
75 | // }
76 | return frame
77 | } catch (e: InterruptedException) {
78 | Thread.currentThread().interrupt()
79 | }
80 | return null
81 | }
82 |
83 | fun clear() {
84 | queue.clear()
85 | }
86 |
87 | fun copyInto(dstFrameQueue: FrameQueue) {
88 | dstFrameQueue.queue.addAll(queue)
89 | }
90 |
91 | companion object {
92 | private val TAG: String = FrameQueue::class.java.simpleName
93 | }
94 |
95 | }
96 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/VideoDecoderBitmapThread.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.codec
2 |
3 | import android.graphics.Bitmap
4 | import android.graphics.Matrix
5 | import android.media.MediaCodec
6 | import android.media.MediaFormat
7 | import android.util.Log
8 | import com.alexvas.rtsp.codec.color.ColorConverterImageAndroidX
9 |
10 | class VideoDecoderBitmapThread (
11 | mimeType: String,
12 | rotation: Int, // 0, 90, 180, 270
13 | videoFrameQueue: VideoFrameQueue,
14 | videoDecoderListener: VideoDecoderListener,
15 | private val videoDecoderBitmapListener: VideoDecoderBitmapListener,
16 | videoDecoderType: DecoderType = DecoderType.HARDWARE
17 | ): VideoDecodeThread(mimeType, 1920, 1080, rotation, videoFrameQueue, videoDecoderListener, videoDecoderType) {
18 |
19 | interface VideoDecoderBitmapListener {
20 | /** Used only when OutputType.BUFFERS is used */
21 | fun onVideoDecoderBitmapObtained(bitmap: Bitmap) {}
22 | }
23 |
24 | private var colorConverter: ColorConverterImageAndroidX? = null
25 |
26 | override fun decoderCreated(mediaCodec: MediaCodec, mediaFormat: MediaFormat) {
27 | mediaCodec.configure(mediaFormat, null, null, 0)
28 | }
29 |
30 | override fun releaseOutputBuffer(mediaCodec: MediaCodec, outIndex: Int, render: Boolean) {
31 | val image = mediaCodec.getOutputImage(outIndex)
32 | image?.let {
33 | if (colorConverter == null)
34 | colorConverter = ColorConverterImageAndroidX()
35 | // Converting YUV 4:2:0 888 to Bitmap ARGB 8888
36 | var bitmap = colorConverter!!.getBitmapFromImage(image)
37 | // Rotation does not work in VideoDecoderThread since we do not use Surface there.
38 | // Rotate bitmaps.
39 | bitmap = if (rotation != 0) {
40 | bitmap.rotateBitmap(rotation.toFloat())
41 | } else {
42 | bitmap.createCopy565()
43 | }
44 | uiHandler.post {
45 | if (!firstFrameRendered) {
46 | firstFrameRendered = true
47 | videoDecoderListener.onVideoDecoderFirstFrameRendered()
48 | }
49 | videoDecoderBitmapListener.onVideoDecoderBitmapObtained(bitmap)
50 | }
51 | }
52 | mediaCodec.releaseOutputBuffer(outIndex, false)
53 | }
54 |
55 | override fun decoderDestroyed(mediaCodec: MediaCodec) {
56 | colorConverter?.apply {
57 | try {
58 | Log.i(TAG, "Releasing color converter...")
59 | release()
60 | Log.i(TAG, "Color converter successfully released")
61 | } catch (e: Throwable) {
62 | Log.e(TAG, "Failed to release color converter", e)
63 | }
64 | }
65 | }
66 |
67 | }
68 |
69 | fun Bitmap.createCopy565(): Bitmap {
70 | return copy(
71 | Bitmap.Config.RGB_565,
72 | true
73 | )
74 | }
75 |
76 | fun Bitmap.rotateBitmap(angle: Float): Bitmap {
77 | val matrix = Matrix()
78 | matrix.postRotate(angle)
79 | return Bitmap.createBitmap(this, 0, 0, this.width, this.height, matrix, true)
80 | }
81 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/VideoDecoderSurfaceThread.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.codec
2 |
3 | import android.media.MediaCodec
4 | import android.media.MediaFormat
5 | import android.util.Log
6 | import android.view.Surface
7 |
8 | class VideoDecoderSurfaceThread (
9 | private val surface: Surface,
10 | mimeType: String,
11 | width: Int,
12 | height: Int,
13 | rotation: Int, // 0, 90, 180, 270
14 | videoFrameQueue: VideoFrameQueue,
15 | videoDecoderListener: VideoDecoderListener,
16 | videoDecoderType: DecoderType = DecoderType.HARDWARE
17 | ): VideoDecodeThread(
18 | mimeType, width, height, rotation, videoFrameQueue, videoDecoderListener, videoDecoderType) {
19 |
20 | override fun decoderCreated(mediaCodec: MediaCodec, mediaFormat: MediaFormat) {
21 | if (!surface.isValid) {
22 | Log.e(TAG, "Surface invalid")
23 | }
24 | mediaCodec.configure(mediaFormat, surface, null, 0)
25 | }
26 |
27 | override fun releaseOutputBuffer(mediaCodec: MediaCodec, outIndex: Int, render: Boolean) {
28 | mediaCodec.releaseOutputBuffer(outIndex, render && surface.isValid)
29 | }
30 |
31 | override fun decoderDestroyed(mediaCodec: MediaCodec) {
32 | }
33 |
34 | }
35 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/color/ColorConverter.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.codec.color
2 |
3 | import android.annotation.SuppressLint
4 | import android.graphics.Bitmap
5 | import android.graphics.Matrix
6 | import android.graphics.Rect
7 | import android.media.Image
8 | import androidx.camera.core.ImageInfo
9 | import androidx.camera.core.ImageProcessingUtil
10 | import androidx.camera.core.ImageProxy
11 | import androidx.camera.core.ImmutableImageInfo
12 | import androidx.camera.core.impl.TagBundle
13 | import java.nio.ByteBuffer
14 |
15 | /**
16 | * Convert Image YUV 4:2:0 888 to Bitmap ARGB 8888.
17 | */
18 | class ColorConverterImageAndroidX: ColorConverterImage() {
19 |
20 | @SuppressLint("RestrictedApi")
21 | override fun getBitmapFromImage(image: Image): Bitmap {
22 | // YUV 4:2:0 888 -> ARGB 8888
23 | return ImageProcessingUtil.convertYUVToBitmap(AndroidImageProxy(image))
24 | }
25 |
26 | override fun release() {
27 | }
28 |
29 | }
30 |
31 | internal class AndroidImageProxy(private val image: Image) : ImageProxy {
32 |
33 | private val planes: Array = if (image.planes != null) {
34 | Array(image.planes.size) { i -> AndroidPlaneProxy(image.planes[i]) }
35 | } else {
36 | emptyArray()
37 | }
38 | @SuppressLint("RestrictedApi")
39 | private val imageInfo: ImageInfo = ImmutableImageInfo.create(
40 | TagBundle.emptyBundle(),
41 | image.timestamp,
42 | 0,
43 | Matrix()
44 | )
45 |
46 | override fun close() {
47 | image.close()
48 | }
49 |
50 | override fun getCropRect(): Rect {
51 | return image.cropRect
52 | }
53 |
54 | override fun setCropRect(rect: Rect?) {
55 | image.cropRect = rect
56 | }
57 |
58 | override fun getFormat(): Int {
59 | return image.format
60 | }
61 |
62 | override fun getHeight(): Int {
63 | return image.height
64 | }
65 |
66 | override fun getWidth(): Int {
67 | return image.width
68 | }
69 |
70 | override fun getPlanes(): Array {
71 | @Suppress("UNCHECKED_CAST")
72 | return planes as Array
73 | }
74 |
75 | /** An [ImageProxy.PlaneProxy] which wraps around an [Image.Plane]. */
76 | private class AndroidPlaneProxy(private val mPlane: Image.Plane) : ImageProxy.PlaneProxy {
77 | override fun getRowStride(): Int {
78 | return mPlane.rowStride
79 | }
80 |
81 | override fun getPixelStride(): Int {
82 | return mPlane.pixelStride
83 | }
84 |
85 | override fun getBuffer(): ByteBuffer {
86 | return mPlane.buffer
87 | }
88 | }
89 |
90 | override fun getImageInfo(): ImageInfo {
91 | return imageInfo
92 | }
93 |
94 | @SuppressLint("UnsafeOptInUsageError")
95 | override fun getImage(): Image {
96 | return image
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/color/ColorConverterImage.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.codec.color
2 |
3 | import android.graphics.Bitmap
4 | import android.media.Image
5 |
6 | abstract class ColorConverter {
7 |
8 | abstract fun release()
9 |
10 | }
11 |
12 | abstract class ColorConverterImage: ColorConverter() {
13 |
14 | abstract fun getBitmapFromImage(image: Image): Bitmap
15 |
16 | }
17 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/AacParser.java:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.parser;
2 |
3 | import android.annotation.SuppressLint;
4 | import android.util.Log;
5 |
6 | import androidx.annotation.NonNull;
7 | import androidx.annotation.Nullable;
8 |
9 | import androidx.media3.common.util.ParsableBitArray;
10 | import androidx.media3.common.util.ParsableByteArray;
11 |
12 | // https://tools.ietf.org/html/rfc3640
13 | // +---------+-----------+-----------+---------------+
14 | // | RTP | AU Header | Auxiliary | Access Unit |
15 | // | Header | Section | Section | Data Section |
16 | // +---------+-----------+-----------+---------------+
17 | //
18 | // <----------RTP Packet Payload----------->
19 | @SuppressLint("UnsafeOptInUsageError")
20 | public class AacParser {
21 |
22 | private static final String TAG = AacParser.class.getSimpleName();
23 | private static final boolean DEBUG = false;
24 |
25 | private final ParsableBitArray headerScratchBits;
26 | private final ParsableByteArray headerScratchBytes;
27 |
28 | private static final int MODE_LBR = 0;
29 | private static final int MODE_HBR = 1;
30 |
31 | // Number of bits for AAC AU sizes, indexed by mode (LBR and HBR)
32 | private static final int[] NUM_BITS_AU_SIZES = {6, 13};
33 |
34 | // Number of bits for AAC AU index(-delta), indexed by mode (LBR and HBR)
35 | private static final int[] NUM_BITS_AU_INDEX = {2, 3};
36 |
37 | // Frame Sizes for AAC AU fragments, indexed by mode (LBR and HBR)
38 | private static final int[] FRAME_SIZES = {63, 8191};
39 |
40 | private final int _aacMode;
41 | private boolean completeFrameIndicator = true;
42 |
43 | public AacParser(@NonNull String aacMode) {
44 | _aacMode = aacMode.equalsIgnoreCase("AAC-lbr") ? MODE_LBR : MODE_HBR;
45 |
46 | headerScratchBits = new ParsableBitArray();
47 | headerScratchBytes = new ParsableByteArray();
48 | }
49 |
50 | @Nullable
51 | public byte[] processRtpPacketAndGetSample(@NonNull byte[] data, int length) {
52 | if (DEBUG)
53 | Log.v(TAG, "processRtpPacketAndGetSample(length=" + length + ")");
54 | int auHeadersCount = 1;
55 | int numBitsAuSize = NUM_BITS_AU_SIZES[_aacMode];
56 | int numBitsAuIndex = NUM_BITS_AU_INDEX[_aacMode];
57 |
58 | ParsableByteArray packet = new ParsableByteArray(data, length);
59 |
60 | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
61 | // |AU-headers-length|AU-header|AU-header| |AU-header|padding|
62 | // | | (1) | (2) | | (n) | bits |
63 | // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
64 | int auHeadersLength = packet.readShort();//((data[0] & 0xFF) << 8) | (data[1] & 0xFF);
65 | int auHeadersLengthBytes = (auHeadersLength + 7) / 8;
66 |
67 | headerScratchBytes.reset(auHeadersLengthBytes);
68 | packet.readBytes(headerScratchBytes.getData(), 0, auHeadersLengthBytes);
69 | headerScratchBits.reset(headerScratchBytes.getData());
70 |
71 | int bitsAvailable = auHeadersLength - (numBitsAuSize + numBitsAuIndex);
72 |
73 | if (bitsAvailable > 0) {// && (numBitsAuSize + numBitsAuSize) > 0) {
74 | auHeadersCount += bitsAvailable / (numBitsAuSize + numBitsAuIndex);
75 | }
76 |
77 | if (auHeadersCount == 1) {
78 | int auSize = headerScratchBits.readBits(numBitsAuSize);
79 | int auIndex = headerScratchBits.readBits(numBitsAuIndex);
80 |
81 | if (completeFrameIndicator) {
82 | if (auIndex == 0) {
83 | if (packet.bytesLeft() == auSize) {
84 | return handleSingleAacFrame(packet);
85 |
86 | } else {
87 | // handleFragmentationAacFrame(packet, auSize);
88 | }
89 | }
90 | } else {
91 | // handleFragmentationAacFrame(packet, auSize);
92 | }
93 |
94 | } else {
95 | if (completeFrameIndicator) {
96 | // handleMultipleAacFrames(packet, auHeadersLength);
97 | }
98 | }
99 | // byte[] auHeader = new byte[length-2-auHeadersLengthBytes];
100 | // System.arraycopy(data,2-auHeadersLengthBytes, auHeader,0, auHeader.length);
101 | // if (DEBUG)
102 | // Log.d(TAG, "AU headers size: " + auHeadersLengthBytes + ", AU headers: " + auHeadersCount + ", sample length: " + auHeader.length);
103 | // return auHeader;
104 | return new byte[0];
105 | }
106 |
107 | private byte[] handleSingleAacFrame(ParsableByteArray packet) {
108 | int length = packet.bytesLeft();
109 | byte[] data = new byte[length];
110 | System.arraycopy(packet.getData(), packet.getPosition(), data,0, data.length);
111 | return data;
112 | }
113 |
114 | // private static final class AUHeader {
115 | // private int size;
116 | // private int index;
117 | //
118 | // public AUHeader(int size, int index) {
119 | // this.size = size;
120 | // this.index = index;
121 | // }
122 | //
123 | // public int size() { return size; }
124 | //
125 | // public int index() { return index; }
126 | // }
127 |
128 | // /**
129 | // * Stores the consecutive fragment AU to reconstruct an AAC-Frame
130 | // */
131 | // private static final class FragmentedAacFrame {
132 | // public byte[] auData;
133 | // public int auLength;
134 | // public int auSize;
135 | //
136 | // private int sequence;
137 | //
138 | // public FragmentedAacFrame(int frameSize) {
139 | // // Initialize data
140 | // auData = new byte[frameSize];
141 | // sequence = -1;
142 | // }
143 | //
144 | // /**
145 | // * Resets the buffer, clearing any data that it holds.
146 | // */
147 | // public void reset() {
148 | // auLength = 0;
149 | // auSize = 0;
150 | // sequence = -1;
151 | // }
152 | //
153 | // public void sequence(int sequence) {
154 | // this.sequence = sequence;
155 | // }
156 | //
157 | // public int sequence() {
158 | // return sequence;
159 | // }
160 | //
161 | // /**
162 | // * Called to add a fragment unit to fragmented AU.
163 | // *
164 | // * @param fragment Holds the data of fragment unit being passed.
165 | // * @param offset The offset of the data in {@code fragment}.
166 | // * @param limit The limit (exclusive) of the data in {@code fragment}.
167 | // */
168 | // public void appendFragment(byte[] fragment, int offset, int limit) {
169 | // if (auSize == 0) {
170 | // auSize = limit;
171 | // } else if (auSize != limit) {
172 | // reset();
173 | // }
174 | //
175 | // if (auData.length < auLength + limit) {
176 | // auData = Arrays.copyOf(auData, (auLength + limit) * 2);
177 | // }
178 | //
179 | // System.arraycopy(fragment, offset, auData, auLength, limit);
180 | // auLength += limit;
181 | // }
182 | //
183 | // public boolean isCompleted() {
184 | // return auSize == auLength;
185 | // }
186 | // }
187 |
188 | }
189 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpH264Parser.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.parser
2 |
3 | import android.util.Log
4 | import com.alexvas.utils.VideoCodecUtils
5 | import com.alexvas.utils.VideoCodecUtils.getH264NalUnitTypeString
6 |
7 | class RtpH264Parser: RtpParser() {
8 |
9 | override fun processRtpPacketAndGetNalUnit(data: ByteArray, length: Int, marker: Boolean): ByteArray? {
10 | if (DEBUG) Log.v(TAG, "processRtpPacketAndGetNalUnit(data.size=${data.size}, length=$length, marker=$marker)")
11 |
12 | val nalType = (data[0].toInt() and 0x1F).toByte()
13 | val packFlag = data[1].toInt() and 0xC0
14 | var nalUnit: ByteArray? = null
15 |
16 | if (DEBUG)
17 | Log.d(TAG, "\t\tNAL type: ${getH264NalUnitTypeString(nalType)}, pack flag: 0x${Integer.toHexString(packFlag).lowercase()}")
18 |
19 | when (nalType) {
20 | VideoCodecUtils.NAL_STAP_A, VideoCodecUtils.NAL_STAP_B -> {
21 | // Not supported
22 | }
23 |
24 | VideoCodecUtils.NAL_MTAP16, VideoCodecUtils.NAL_MTAP24 -> {
25 | // Not supported
26 | }
27 |
28 | VideoCodecUtils.NAL_FU_A -> {
29 | when (packFlag) {
30 | 0x80 -> {
31 | addStartFragmentedPacket(data, length)
32 | }
33 |
34 | 0x00 -> {
35 | if (marker) {
36 | // Sometimes 0x40 end packet is not arrived. Use marker bit in this case
37 | // to finish fragmented packet.
38 | nalUnit = addEndFragmentedPacketAndCombine(data, length)
39 | } else {
40 | addMiddleFragmentedPacket(data, length)
41 | }
42 | }
43 |
44 | 0x40 -> {
45 | nalUnit = addEndFragmentedPacketAndCombine(data, length)
46 | }
47 | }
48 | }
49 |
50 | VideoCodecUtils.NAL_FU_B -> {
51 | // Not supported
52 | }
53 |
54 | else -> {
55 | nalUnit = processSingleFramePacket(data, length)
56 | clearFragmentedBuffer()
57 | if (DEBUG) Log.d(TAG, "Single NAL (${nalUnit.size})")
58 | }
59 | }
60 | return nalUnit
61 | }
62 |
63 | private fun addStartFragmentedPacket(data: ByteArray, length: Int) {
64 | if (DEBUG) Log.v(TAG, "addStartFragmentedPacket(data.size=${data.size}, length=$length)")
65 | fragmentedPackets = 0
66 | fragmentedBufferLength = length - 1
67 | fragmentedBuffer[0] = ByteArray(fragmentedBufferLength).apply {
68 | this[0] = ((data[0].toInt() and 0xE0) or (data[1].toInt() and 0x1F)).toByte()
69 | }
70 | System.arraycopy(data, 2, fragmentedBuffer[0]!!, 1, length - 2)
71 | }
72 |
73 | private fun addMiddleFragmentedPacket(data: ByteArray, length: Int) {
74 | if (DEBUG) Log.v(TAG, "addMiddleFragmentedPacket(data.size=${data.size}, length=$length)")
75 | fragmentedPackets++
76 | if (fragmentedPackets >= fragmentedBuffer.size) {
77 | Log.e(TAG, "Too many middle packets. No NAL FU_A end packet received. Skipped RTP packet.")
78 | fragmentedBuffer[0] = null
79 | } else {
80 | fragmentedBufferLength += length - 2
81 | fragmentedBuffer[fragmentedPackets] = ByteArray(length - 2)
82 | System.arraycopy(data, 2, fragmentedBuffer[fragmentedPackets]!!, 0, length - 2)
83 | }
84 | }
85 |
86 | private fun addEndFragmentedPacketAndCombine(data: ByteArray, length: Int): ByteArray? {
87 | if (DEBUG) Log.v(TAG, "addEndFragmentedPacketAndCombine(data.size=${data.size}, length=$length)")
88 | var nalUnit: ByteArray? = null
89 | var tmpLen: Int
90 | if (fragmentedBuffer[0] == null) {
91 | Log.e(TAG, "No NAL FU_A start packet received. Skipped RTP packet.")
92 | } else {
93 | nalUnit = ByteArray(fragmentedBufferLength + length + 2)
94 | writeNalPrefix0001(nalUnit)
95 | tmpLen = 4
96 | // Write start and middle packets
97 | for (i in 0 until fragmentedPackets + 1) {
98 | fragmentedBuffer[i]!!.apply {
99 | System.arraycopy(
100 | this,
101 | 0,
102 | nalUnit,
103 | tmpLen,
104 | this.size
105 | )
106 | tmpLen += this.size
107 | }
108 | }
109 | // Write end packet
110 | System.arraycopy(data, 2, nalUnit, tmpLen, length - 2)
111 | clearFragmentedBuffer()
112 | if (DEBUG) Log.d(TAG, "Fragmented NAL (${nalUnit.size})")
113 | }
114 | return nalUnit
115 | }
116 |
117 | private fun clearFragmentedBuffer() {
118 | if (DEBUG) Log.v(TAG, "clearFragmentedBuffer()")
119 | for (i in 0 until fragmentedPackets + 1) {
120 | fragmentedBuffer[i] = null
121 | }
122 | }
123 |
124 | companion object {
125 | private val TAG: String = RtpH264Parser::class.java.simpleName
126 | private const val DEBUG = false
127 | }
128 |
129 | }
130 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpH265Parser.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.parser
2 |
3 | import android.util.Log
4 |
5 | class RtpH265Parser: RtpParser() {
6 |
7 | override fun processRtpPacketAndGetNalUnit(data: ByteArray, length: Int, marker: Boolean): ByteArray? {
8 | if (DEBUG) Log.v(TAG, "processRtpPacketAndGetNalUnit(length=$length, marker=$marker)")
9 |
10 | // NAL Unit Header.type (RFC7798 Section 1.1.4).
11 | val nalType = ((data[0].toInt() shr 1) and 0x3F).toByte()
12 | var nalUnit: ByteArray? = null
13 |
14 | // Log.d(TAG, "\t\tNAL type: ${VideoCodecUtils.getH265NalUnitTypeString(nalType)}")
15 |
16 | if (nalType in 0.. 0
37 | val isLastFuPacket = (fuHeader and 0x40) > 0
38 |
39 | if (isFirstFuPacket) {
40 | addStartFragmentedPacket(data, length)
41 | } else if (isLastFuPacket || marker) {
42 | return addEndFragmentedPacketAndCombine(data, length)
43 | } else {
44 | addMiddleFragmentedPacket(data, length)
45 | }
46 | return null
47 | }
48 |
49 | private fun addStartFragmentedPacket(data: ByteArray, length: Int) {
50 | if (DEBUG) Log.v(TAG, "addStartFragmentedPacket(data.size=${data.size}, length=$length)")
51 | fragmentedPackets = 0
52 | fragmentedBufferLength = length - 1
53 | fragmentedBuffer[0] = ByteArray(fragmentedBufferLength).apply {
54 |
55 | val tid = (data[1].toInt() and 0x7)
56 | val fuHeader = data[2].toInt()
57 | val nalUnitType = fuHeader and 0x3F
58 |
59 | // Convert RTP header into HEVC NAL Unit header accoding to RFC7798 Section 1.1.4.
60 | // RTP byte 0: ignored.
61 | // RTP byte 1: repurposed as HEVC HALU byte 0, copy NALU type.
62 | // RTP Byte 2: repurposed as HEVC HALU byte 1, layerId required to be zero, copying only tid.
63 | // Set data position from byte 1 as byte 0 is ignored.
64 | this[0] = (((nalUnitType shl 1) and 0x7F).toByte())
65 | this[1] = tid.toByte()
66 | }
67 | System.arraycopy(data, 3, fragmentedBuffer[0]!!, 2, length - 3)
68 | }
69 |
70 | private fun addMiddleFragmentedPacket(data: ByteArray, length: Int) {
71 | if (DEBUG) Log.v(TAG, "addMiddleFragmentedPacket(data.size=${data.size}, length=$length)")
72 | fragmentedPackets++
73 | if (fragmentedPackets >= fragmentedBuffer.size) {
74 | Log.e(TAG, "Too many middle packets. No RTP_PACKET_TYPE_FU end packet received. Skipped RTP packet.")
75 | fragmentedBuffer[0] = null
76 | } else {
77 | fragmentedBufferLength += length - 3
78 | fragmentedBuffer[fragmentedPackets] = ByteArray(length - 3).apply {
79 | System.arraycopy(data, 3, this, 0, length - 3)
80 | }
81 | }
82 | }
83 |
84 | private fun addEndFragmentedPacketAndCombine(data: ByteArray, length: Int): ByteArray? {
85 | if (DEBUG) Log.v(TAG, "addEndFragmentedPacketAndCombine(data.size=${data.size}, length=$length)")
86 | var nalUnit: ByteArray? = null
87 | if (fragmentedBuffer[0] == null) {
88 | Log.e(TAG, "No NAL FU_A start packet received. Skipped RTP packet.")
89 | } else {
90 | nalUnit = ByteArray(fragmentedBufferLength + length + 3)
91 | writeNalPrefix0001(nalUnit)
92 | var tmpLen = 4
93 | // Write start and middle packets
94 | for (i in 0 until fragmentedPackets + 1) {
95 | fragmentedBuffer[i]!!.apply {
96 | System.arraycopy(
97 | this,
98 | 0,
99 | nalUnit,
100 | tmpLen,
101 | this.size
102 | )
103 | tmpLen += this.size
104 | }
105 | }
106 | // Write end packet
107 | System.arraycopy(data, 3, nalUnit, tmpLen, length - 3)
108 | clearFragmentedBuffer()
109 | if (DEBUG) Log.d(TAG, "Fragmented NAL (${nalUnit.size})")
110 | }
111 | return nalUnit
112 | }
113 |
114 | private fun clearFragmentedBuffer() {
115 | if (DEBUG) Log.v(TAG, "clearFragmentedBuffer()")
116 | for (i in 0 until fragmentedPackets + 1) {
117 | fragmentedBuffer[i] = null
118 | }
119 | }
120 |
121 | companion object {
122 | private val TAG: String = RtpH265Parser::class.java.simpleName
123 | private const val DEBUG = false
124 |
125 | /** Aggregation Packet. RFC7798 Section 4.4.2. */
126 | private const val RTP_PACKET_TYPE_AP: Byte = 48
127 | /** Fragmentation Unit. RFC7798 Section 4.4.3. */
128 | private const val RTP_PACKET_TYPE_FU: Byte = 49
129 | }
130 |
131 | }
132 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpHeaderParser.java:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.parser;
2 |
3 | import android.util.Log;
4 |
5 | import androidx.annotation.NonNull;
6 | import androidx.annotation.Nullable;
7 |
8 | import com.alexvas.utils.NetUtils;
9 |
10 | import java.io.IOException;
11 | import java.io.InputStream;
12 |
13 | public class RtpHeaderParser {
14 |
15 | private static final String TAG = RtpHeaderParser.class.getSimpleName();
16 | private static final boolean DEBUG = false;
17 |
18 | private final static int RTP_HEADER_SIZE = 12;
19 |
20 | public static class RtpHeader {
21 | public int version;
22 | public int padding;
23 | public int extension;
24 | public int cc;
25 | public int marker;
26 | public int payloadType;
27 | public int sequenceNumber;
28 | public long timeStamp;
29 | public long ssrc;
30 | public int payloadSize;
31 |
32 | public long getTimestampMsec() {
33 | return (long)(timeStamp * 11.111111);
34 | }
35 |
36 | // If RTP header found, return 4 bytes of the header
37 | private static boolean searchForNextRtpHeader(@NonNull InputStream inputStream, @NonNull byte[] header /*out*/) throws IOException {
38 | if (header.length < 4)
39 | throw new IOException("Invalid allocated buffer size");
40 |
41 | int bytesRemaining = 100000; // 100 KB max to check
42 | boolean foundFirstByte = false;
43 | boolean foundSecondByte = false;
44 | byte[] oneByte = new byte[1];
45 | // Search for {0x24, 0x00}
46 | do {
47 | if (bytesRemaining-- < 0)
48 | return false;
49 | // Read 1 byte
50 | NetUtils.readData(inputStream, oneByte, 0, 1);
51 | if (foundFirstByte) {
52 | // Found 0x24. Checking for 0x00-0x02.
53 | if (oneByte[0] == 0x00)
54 | foundSecondByte = true;
55 | else
56 | foundFirstByte = false;
57 | }
58 | if (!foundFirstByte && oneByte[0] == 0x24) {
59 | // Found 0x24
60 | foundFirstByte = true;
61 | }
62 | } while (!foundSecondByte);
63 | header[0] = 0x24;
64 | header[1] = oneByte[0];
65 | // Read 2 bytes more (packet size)
66 | NetUtils.readData(inputStream, header, 2, 2);
67 | return true;
68 | }
69 |
70 | @Nullable
71 | private static RtpHeader parseData(@NonNull byte[] header, int packetSize) {
72 | RtpHeader rtpHeader = new RtpHeader();
73 | rtpHeader.version = (header[0] & 0xFF) >> 6;
74 | if (rtpHeader.version != 2) {
75 | if (DEBUG)
76 | Log.e(TAG,"Not a RTP packet (" + rtpHeader.version + ")");
77 | return null;
78 | }
79 |
80 | // 80 60 40 91 fd ab d4 2a
81 | // 80 c8 00 06
82 | rtpHeader.padding = (header[0] & 0x20) >> 5; // 0b00100100
83 | rtpHeader.extension = (header[0] & 0x10) >> 4;
84 | rtpHeader.marker = (header[1] & 0x80) >> 7;
85 | rtpHeader.payloadType = header[1] & 0x7F;
86 | rtpHeader.sequenceNumber = (header[3] & 0xFF) + ((header[2] & 0xFF) << 8);
87 | rtpHeader.timeStamp = (header[7] & 0xFF) + ((header[6] & 0xFF) << 8) + ((header[5] & 0xFF) << 16) + ((header[4] & 0xFF) << 24) & 0xffffffffL;
88 | rtpHeader.ssrc = (header[7] & 0xFF) + ((header[6] & 0xFF) << 8) + ((header[5] & 0xFF) << 16) + ((header[4] & 0xFF) << 24) & 0xffffffffL;
89 | rtpHeader.payloadSize = packetSize - RTP_HEADER_SIZE;
90 | return rtpHeader;
91 | }
92 |
93 | private static int getPacketSize(@NonNull byte[] header) {
94 | int packetSize = ((header[2] & 0xFF) << 8) | (header[3] & 0xFF);
95 | if (DEBUG)
96 | Log.d(TAG, "Packet size: " + packetSize);
97 | return packetSize;
98 | }
99 |
100 | public void dumpHeader() {
101 | Log.d("RTP","\t\tRTP header version: " + version
102 | + ", padding: " + padding
103 | + ", ext: " + extension
104 | + ", cc: " + cc
105 | + ", marker: " + marker
106 | + ", payload type: " + payloadType
107 | + ", seq num: " + sequenceNumber
108 | + ", ts: " + timeStamp
109 | + ", ssrc: " + ssrc
110 | + ", payload size: " + payloadSize);
111 | }
112 | }
113 |
114 | @Nullable
115 | public static RtpHeader readHeader(@NonNull InputStream inputStream) throws IOException {
116 | // 24 01 00 1c 80 c8 00 06 7f 1d d2 c4
117 | // 24 01 00 1c 80 c8 00 06 13 9b cf 60
118 | // 24 02 01 12 80 e1 01 d2 00 07 43 f0
119 | byte[] header = new byte[RTP_HEADER_SIZE];
120 | // Skip 4 bytes (TCP only). No those bytes in UDP.
121 | NetUtils.readData(inputStream, header, 0, 4);
122 | if (DEBUG && header[0] == 0x24)
123 | Log.d(TAG, header[1] == 0 ? "RTP packet" : "RTCP packet");
124 |
125 | int packetSize = RtpHeader.getPacketSize(header);
126 | if (DEBUG)
127 | Log.d(TAG, "Packet size: " + packetSize);
128 |
129 | if (NetUtils.readData(inputStream, header, 0, header.length) == header.length) {
130 | RtpHeader rtpHeader = RtpHeader.parseData(header, packetSize);
131 | if (rtpHeader == null) {
132 | // Header not found. Possible keep-alive response. Search for another RTP header.
133 | boolean foundHeader = RtpHeader.searchForNextRtpHeader(inputStream, header);
134 | if (foundHeader) {
135 | packetSize = RtpHeader.getPacketSize(header);
136 | if (NetUtils.readData(inputStream, header, 0, header.length) == header.length)
137 | return RtpHeader.parseData(header, packetSize);
138 | }
139 | } else {
140 | return rtpHeader;
141 | }
142 | }
143 | return null;
144 | }
145 | }
146 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpParser.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.parser
2 |
3 | abstract class RtpParser {
4 |
5 | abstract fun processRtpPacketAndGetNalUnit(data: ByteArray, length: Int, marker: Boolean): ByteArray?
6 |
7 | // TODO Use already allocated buffer with RtpPacket.MAX_SIZE = 65507
8 | // Used only for fragmented packets
9 | protected val fragmentedBuffer = arrayOfNulls(1024)
10 | protected var fragmentedBufferLength = 0
11 | protected var fragmentedPackets = 0
12 |
13 | protected fun writeNalPrefix0001(buffer: ByteArray) {
14 | buffer[0] = 0x00
15 | buffer[1] = 0x00
16 | buffer[2] = 0x00
17 | buffer[3] = 0x01
18 | }
19 |
20 | protected fun processSingleFramePacket(data: ByteArray, length: Int): ByteArray {
21 | return ByteArray(4 + length).apply {
22 | writeNalPrefix0001(this)
23 | System.arraycopy(data, 0, this, 4, length)
24 | }
25 | }
26 |
27 | }
28 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspImageView.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.widget
2 |
3 | import android.content.Context
4 | import android.graphics.Bitmap
5 | import android.net.Uri
6 | import android.util.AttributeSet
7 | import android.util.Log
8 | import android.widget.ImageView
9 | import com.alexvas.rtsp.codec.VideoDecodeThread
10 | import com.alexvas.rtsp.codec.VideoDecoderBitmapThread
11 | import com.alexvas.rtsp.widget.RtspProcessor.Statistics
12 | import com.limelight.binding.video.MediaCodecHelper
13 |
14 | /**
15 | * Low latency RTSP stream playback on image view (bitmap).
16 | */
17 | class RtspImageView : ImageView {
18 |
19 | /** Optional listener to be called when bitmap obtained from video decoder. */
20 | var onRtspImageBitmapListener: RtspImageBitmapListener? = null
21 |
22 | interface RtspImageBitmapListener {
23 | fun onRtspImageBitmapObtained(bitmap: Bitmap) {}
24 | }
25 |
26 | private var rtspProcessor = RtspProcessor(onVideoDecoderCreateRequested = {
27 | videoMimeType, videoRotation, videoFrameQueue, videoDecoderListener, videoDecoderType ->
28 | VideoDecoderBitmapThread(
29 | videoMimeType,
30 | videoRotation,
31 | videoFrameQueue,
32 | videoDecoderListener,
33 | videoDecoderBitmapListener,
34 | videoDecoderType,
35 | )
36 | })
37 |
38 | private val videoDecoderBitmapListener = object : VideoDecoderBitmapThread.VideoDecoderBitmapListener {
39 | override fun onVideoDecoderBitmapObtained(bitmap: Bitmap) {
40 | onRtspImageBitmapListener?.onRtspImageBitmapObtained(bitmap)
41 | setImageBitmap(bitmap)
42 | invalidate()
43 | }
44 | }
45 |
46 | var statistics = Statistics()
47 | get() = rtspProcessor.statistics
48 | private set
49 |
50 | var videoRotation: Int
51 | get() = rtspProcessor.videoRotation
52 | set(value) { rtspProcessor.videoRotation = value }
53 |
54 | var videoDecoderType: VideoDecodeThread.DecoderType
55 | get() = rtspProcessor.videoDecoderType
56 | set(value) { rtspProcessor.videoDecoderType = value }
57 |
58 | var debug: Boolean
59 | get() = rtspProcessor.debug
60 | set(value) { rtspProcessor.debug = value }
61 |
62 | constructor(context: Context) : super(context) {
63 | initView(context, null, 0)
64 | }
65 |
66 | constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {
67 | initView(context, attrs, 0)
68 | }
69 |
70 | constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int) : super(context, attrs, defStyleAttr) {
71 | initView(context, attrs, defStyleAttr)
72 | }
73 |
74 | private fun initView(context: Context, attrs: AttributeSet?, defStyleAttr: Int) {
75 | if (DEBUG) Log.v(TAG, "initView()")
76 | MediaCodecHelper.initialize(context, /*glRenderer*/ "")
77 | }
78 |
79 | fun init(uri: Uri, username: String?, password: String?, userAgent: String?) {
80 | if (DEBUG) Log.v(TAG, "init(uri='$uri', username='$username', password='$password', userAgent='$userAgent')")
81 | rtspProcessor.init(uri, username, password, userAgent)
82 | }
83 |
84 | /**
85 | * Start RTSP client.
86 | *
87 | * @param requestVideo request video track
88 | * @param requestAudio request audio track
89 | * @param requestApplication request application track
90 | * @see https://datatracker.ietf.org/doc/html/rfc4566#section-5.14
91 | */
92 | fun start(requestVideo: Boolean, requestAudio: Boolean, requestApplication: Boolean) {
93 | if (DEBUG) Log.v(TAG, "start(requestVideo=$requestVideo, requestAudio=$requestAudio, requestApplication=$requestApplication)")
94 | rtspProcessor.start(requestVideo, requestAudio, requestApplication)
95 | }
96 |
97 | /**
98 | * Stop RTSP client.
99 | */
100 | fun stop() {
101 | if (DEBUG) Log.v(TAG, "stop()")
102 | rtspProcessor.stop()
103 | }
104 |
105 | fun isStarted(): Boolean {
106 | return rtspProcessor.isStarted()
107 | }
108 |
109 | fun setStatusListener(listener: RtspStatusListener?) {
110 | if (DEBUG) Log.v(TAG, "setStatusListener()")
111 | rtspProcessor.statusListener = listener
112 | }
113 |
114 | fun setDataListener(listener: RtspDataListener?) {
115 | if (DEBUG) Log.v(TAG, "setDataListener()")
116 | rtspProcessor.dataListener = listener
117 | }
118 |
119 | companion object {
120 | private val TAG: String = RtspImageView::class.java.simpleName
121 | private const val DEBUG = false
122 | }
123 |
124 | }
125 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspListeners.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.widget
2 |
3 | /**
4 | * Listener for getting RTSP status update.
5 | */
6 | interface RtspStatusListener {
7 | fun onRtspStatusConnecting() {}
8 | fun onRtspStatusConnected() {}
9 | fun onRtspStatusDisconnecting() {}
10 | fun onRtspStatusDisconnected() {}
11 | fun onRtspStatusFailedUnauthorized() {}
12 | fun onRtspStatusFailed(message: String?) {}
13 | fun onRtspFirstFrameRendered() {}
14 | fun onRtspFrameSizeChanged(width: Int, height: Int) {}
15 | }
16 |
17 | /**
18 | * Listener for getting RTSP raw data, e.g. for recording.
19 | */
20 | interface RtspDataListener {
21 | fun onRtspDataVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {}
22 | fun onRtspDataAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {}
23 | fun onRtspDataApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {}
24 | }
25 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspSurfaceView.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.rtsp.widget
2 |
3 | import android.content.Context
4 | import android.net.Uri
5 | import android.util.AttributeSet
6 | import android.util.Log
7 | import android.view.SurfaceHolder
8 | import android.view.SurfaceView
9 | import androidx.annotation.OptIn
10 | import androidx.media3.common.util.UnstableApi
11 | import androidx.media3.container.NalUnitUtil
12 | import com.alexvas.rtsp.codec.VideoDecodeThread.DecoderType
13 | import com.alexvas.rtsp.codec.VideoDecoderSurfaceThread
14 | import com.alexvas.rtsp.widget.RtspProcessor.Statistics
15 | import com.limelight.binding.video.MediaCodecHelper
16 |
17 | /**
18 | * Low latency RTSP stream playback on surface view.
19 | */
20 | open class RtspSurfaceView: SurfaceView {
21 |
22 | private var surfaceWidth = 1920
23 | private var surfaceHeight = 1080
24 |
25 | private var rtspProcessor = RtspProcessor(
26 | onVideoDecoderCreateRequested = {
27 | videoMimeType, videoRotation, videoFrameQueue, videoDecoderListener, videoDecoderType ->
28 | VideoDecoderSurfaceThread(
29 | holder.surface,
30 | videoMimeType,
31 | surfaceWidth,
32 | surfaceHeight,
33 | videoRotation,
34 | videoFrameQueue,
35 | videoDecoderListener,
36 | videoDecoderType,
37 | )
38 | }
39 | )
40 |
41 | var statistics = Statistics()
42 | get() = rtspProcessor.statistics
43 | private set
44 |
45 | var videoRotation: Int
46 | get() = rtspProcessor.videoRotation
47 | set(value) { rtspProcessor.videoRotation = value }
48 |
49 | var videoDecoderType: DecoderType
50 | get() = rtspProcessor.videoDecoderType
51 | set(value) { rtspProcessor.videoDecoderType = value }
52 |
53 | var experimentalUpdateSpsFrameWithLowLatencyParams: Boolean
54 | get() = rtspProcessor.experimentalUpdateSpsFrameWithLowLatencyParams
55 | set(value) { rtspProcessor.experimentalUpdateSpsFrameWithLowLatencyParams = value }
56 |
57 | var debug: Boolean
58 | get() = rtspProcessor.debug
59 | set(value) { rtspProcessor.debug = value }
60 |
61 | private val surfaceCallback = object: SurfaceHolder.Callback {
62 | override fun surfaceCreated(holder: SurfaceHolder) {
63 | if (DEBUG) Log.v(TAG, "surfaceCreated()")
64 | }
65 |
66 | override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {
67 | if (DEBUG) Log.v(TAG, "surfaceChanged(format=$format, width=$width, height=$height)")
68 | surfaceWidth = width
69 | surfaceHeight = height
70 | }
71 |
72 | override fun surfaceDestroyed(holder: SurfaceHolder) {
73 | if (DEBUG) Log.v(TAG, "surfaceDestroyed()")
74 | rtspProcessor.stopDecoders()
75 | }
76 | }
77 |
78 | constructor(context: Context) : super(context) {
79 | initView(context, null, 0)
80 | }
81 |
82 | constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {
83 | initView(context, attrs, 0)
84 | }
85 |
86 | constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int) : super(context, attrs, defStyleAttr) {
87 | initView(context, attrs, defStyleAttr)
88 | }
89 |
90 | private fun initView(context: Context, attrs: AttributeSet?, defStyleAttr: Int) {
91 | if (DEBUG) Log.v(TAG, "initView()")
92 | MediaCodecHelper.initialize(context, /*glRenderer*/ "")
93 | holder.addCallback(surfaceCallback)
94 | }
95 |
96 | fun init(uri: Uri, username: String?, password: String?, userAgent: String?) {
97 | if (DEBUG) Log.v(TAG, "init(uri='$uri', username='$username', password='$password', userAgent='$userAgent')")
98 | rtspProcessor.init(uri, username, password, userAgent)
99 | }
100 |
101 | /**
102 | * Start RTSP client.
103 | *
104 | * @param requestVideo request video track
105 | * @param requestAudio request audio track
106 | * @param requestApplication request application track
107 | * @see https://datatracker.ietf.org/doc/html/rfc4566#section-5.14
108 | */
109 | fun start(requestVideo: Boolean, requestAudio: Boolean, requestApplication: Boolean = false) {
110 | if (DEBUG) Log.v(TAG, "start(requestVideo=$requestVideo, requestAudio=$requestAudio, requestApplication=$requestApplication)")
111 | rtspProcessor.start(requestVideo, requestAudio, requestApplication)
112 | }
113 |
114 | /**
115 | * Stop RTSP client.
116 | */
117 | fun stop() {
118 | if (DEBUG) Log.v(TAG, "stop()")
119 | rtspProcessor.stop()
120 | }
121 |
122 | fun isStarted(): Boolean {
123 | return rtspProcessor.isStarted()
124 | }
125 |
126 | fun setStatusListener(listener: RtspStatusListener?) {
127 | if (DEBUG) Log.v(TAG, "setStatusListener()")
128 | rtspProcessor.statusListener = listener
129 | }
130 |
131 | fun setDataListener(listener: RtspDataListener?) {
132 | if (DEBUG) Log.v(TAG, "setDataListener()")
133 | rtspProcessor.dataListener = listener
134 | }
135 |
136 | companion object {
137 | private val TAG: String = RtspSurfaceView::class.java.simpleName
138 | private const val DEBUG = false
139 | }
140 |
141 | }
142 |
143 | @OptIn(UnstableApi::class)
144 | fun NalUnitUtil.SpsData.spsDataToString(): String {
145 | return "" +
146 | "width=${this.width}, " +
147 | "height=${this.height}, " +
148 | "profile_idc=${this.profileIdc}, " +
149 | "constraint_set_flags=${this.constraintsFlagsAndReservedZero2Bits}, " +
150 | "level_idc=${this.levelIdc}, " +
151 | "max_num_ref_frames=${this.maxNumRefFrames}, " +
152 | "frame_mbs_only_flag=${this.frameMbsOnlyFlag}, " +
153 | "log2_max_frame_num=${this.frameNumLength}, " +
154 | "pic_order_cnt_type=${this.picOrderCountType}, " +
155 | "log2_max_pic_order_cnt_lsb=${this.picOrderCntLsbLength}, " +
156 | "delta_pic_order_always_zero_flag=${this.deltaPicOrderAlwaysZeroFlag}, " +
157 | "max_reorder_frames=${this.maxNumReorderFrames}"
158 | }
159 |
160 | fun ByteArray.toHexString(offset: Int, maxLength: Int): String {
161 | val length = minOf(maxLength, size - offset)
162 | return sliceArray(offset until (offset + length))
163 | .joinToString(separator = "") { byte ->
164 | "%02x ".format(byte).uppercase()
165 | }
166 | }
167 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/utils/ByteUtils.java:
--------------------------------------------------------------------------------
1 | package com.alexvas.utils;
2 |
3 | import androidx.annotation.NonNull;
4 |
5 | import java.io.File;
6 | import java.io.FileOutputStream;
7 |
8 | public class ByteUtils {
9 |
10 | // int memcmp ( const void * ptr1, const void * ptr2, size_t num );
11 | public static boolean memcmp(
12 | @NonNull byte[] source1,
13 | int offsetSource1,
14 | @NonNull byte[] source2,
15 | int offsetSource2,
16 | int num) {
17 | if (source1.length - offsetSource1 < num)
18 | return false;
19 | if (source2.length - offsetSource2 < num)
20 | return false;
21 |
22 | for (int i = 0; i < num; i++) {
23 | if (source1[offsetSource1 + i] != source2[offsetSource2 + i])
24 | return false;
25 | }
26 | return true;
27 | }
28 |
29 | public static byte[] copy(@NonNull byte[] src) {
30 | byte[] dest = new byte[src.length];
31 | System.arraycopy(src, 0, dest, 0, src.length);
32 | return dest;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/utils/MediaCodecUtils.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.utils
2 |
3 | import android.annotation.SuppressLint
4 | import android.util.Log
5 | import android.util.Range
6 | import androidx.annotation.OptIn
7 | import androidx.media3.common.util.UnstableApi
8 | import androidx.media3.exoplayer.mediacodec.MediaCodecInfo
9 | import androidx.media3.exoplayer.mediacodec.MediaCodecUtil
10 | import java.lang.Exception
11 |
12 | @SuppressLint("UnsafeOptInUsageError")
13 | object MediaCodecUtils {
14 |
15 | // key - codecs mime type
16 | // value - list of codecs able to handle this mime type
17 | private val decoderInfosMap = HashMap>()
18 |
19 | private val TAG: String = MediaCodecUtils::class.java.simpleName
20 |
21 | private fun getDecoderInfos(mimeType: String): List {
22 | val list = decoderInfosMap[mimeType]
23 | return if (list.isNullOrEmpty()) {
24 | val decoderInfos = try {
25 | MediaCodecUtil.getDecoderInfos(mimeType, false, false)
26 | } catch (e: Exception) {
27 | Log.e(TAG, "Failed to initialize '$mimeType' decoders list (${e.message})", e)
28 | ArrayList()
29 | }
30 | decoderInfosMap[mimeType] = decoderInfos
31 | decoderInfos
32 | } else {
33 | list
34 | }
35 | }
36 |
37 | /**
38 | * Get software decoders list. Usually used as fallback.
39 | */
40 | @Synchronized
41 | fun getSoftwareDecoders(mimeType: String): List {
42 | val decoderInfos = getDecoderInfos(mimeType)
43 | val list = ArrayList()
44 | for (codec in decoderInfos) {
45 | if (codec.softwareOnly)
46 | list.add(codec)
47 | }
48 | return list
49 | }
50 |
51 | /**
52 | * Get hardware accelerated decoders list. Used as default.
53 | */
54 | @Synchronized
55 | fun getHardwareDecoders(mimeType: String): List {
56 | val decoderInfos = getDecoderInfos(mimeType)
57 | val list = ArrayList()
58 | for (codec in decoderInfos) {
59 | if (codec.hardwareAccelerated)
60 | list.add(codec)
61 | }
62 | return list
63 | }
64 |
65 | /**
66 | * Look through all decoders (if there are multiple)
67 | * and select the one which supports low-latency.
68 | */
69 | @OptIn(UnstableApi::class)
70 | fun getLowLatencyDecoder(decoders: List): MediaCodecInfo? {
71 | // Some devices can have several decoders, e.g.
72 | // Samsung Fold 5:
73 | // "c2.qti.avc.decoder"
74 | // "c2.qti.avc.decoder.low_latency"
75 | for (decoder in decoders) {
76 | if (decoder.name.contains("low_latency"))
77 | return decoder
78 | }
79 | // Another approach to find decoder with low-latency is to call
80 | // MediaCodec.createByCodecName(name) for every decoder to get decoder instance and then call
81 | // decoder.codecInfo.getCapabilitiesForType(mimeType).isFeatureSupported(MediaCodecInfo.CodecCapabilities.FEATURE_LowLatency)
82 |
83 | // No low-latency decoder found.
84 | return null
85 | }
86 |
87 | }
88 |
89 | fun android.media.MediaCodecInfo.CodecCapabilities.capabilitiesToString(): String {
90 | var heights = videoCapabilities?.supportedHeights
91 | if (heights == null)
92 | heights = Range(-1, -1)
93 | var widths = videoCapabilities?.supportedWidths
94 | if (widths == null)
95 | widths = Range(-1, -1)
96 | return "max instances: ${maxSupportedInstances}, max resolution: ${heights.upper}x${widths.upper}"
97 | }
98 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/utils/NetUtils.java:
--------------------------------------------------------------------------------
1 | package com.alexvas.utils;
2 |
3 | import android.util.Log;
4 |
5 | import androidx.annotation.NonNull;
6 | import androidx.annotation.Nullable;
7 |
8 | import java.io.IOException;
9 | import java.io.InputStream;
10 | import java.net.InetSocketAddress;
11 | import java.net.Socket;
12 | import java.security.cert.CertificateException;
13 | import java.security.cert.X509Certificate;
14 | import java.util.ArrayList;
15 | import java.util.Arrays;
16 | import java.util.List;
17 |
18 | import javax.net.ssl.SSLContext;
19 | import javax.net.ssl.SSLSocket;
20 | import javax.net.ssl.TrustManager;
21 | import javax.net.ssl.X509TrustManager;
22 |
23 | public class NetUtils {
24 |
25 | private static final String TAG = NetUtils.class.getSimpleName();
26 | private static final boolean DEBUG = false;
27 | private final static int MAX_LINE_SIZE = 4098;
28 |
29 | public static final class FakeX509TrustManager implements X509TrustManager {
30 |
31 | /**
32 | * Accepted issuers for fake trust manager
33 | */
34 | final static private X509Certificate[] mAcceptedIssuers = new X509Certificate[]{};
35 |
36 | /**
37 | * Constructor for FakeX509TrustManager.
38 | */
39 | public FakeX509TrustManager() {
40 | }
41 |
42 | /**
43 | * @see javax.net.ssl.X509TrustManager#checkClientTrusted(X509Certificate[],String authType)
44 | */
45 | public void checkClientTrusted(X509Certificate[] certificates, String authType)
46 | throws CertificateException {
47 | }
48 |
49 | /**
50 | * @see javax.net.ssl.X509TrustManager#checkServerTrusted(X509Certificate[],String authType)
51 | */
52 | public void checkServerTrusted(X509Certificate[] certificates, String authType)
53 | throws CertificateException {
54 | }
55 |
56 | // https://github.com/square/okhttp/issues/4669
57 | // Called by Android via reflection in X509TrustManagerExtensions.
58 | @SuppressWarnings("unused")
59 | public List checkServerTrusted(X509Certificate[] chain, String authType, String host) throws CertificateException {
60 | return Arrays.asList(chain);
61 | }
62 |
63 | /**
64 | * @see javax.net.ssl.X509TrustManager#getAcceptedIssuers()
65 | */
66 | public X509Certificate[] getAcceptedIssuers() {
67 | return mAcceptedIssuers;
68 | }
69 | }
70 |
71 | @NonNull
72 | public static SSLSocket createSslSocketAndConnect(@NonNull String dstName, int dstPort, int timeout) throws Exception {
73 | if (DEBUG)
74 | Log.v(TAG, "createSslSocketAndConnect(dstName=" + dstName + ", dstPort=" + dstPort + ", timeout=" + timeout + ")");
75 |
76 | // TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
77 | // trustManagerFactory.init((KeyStore) null);
78 | // TrustManager[] trustManagers = trustManagerFactory.getTrustManagers();
79 | // if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) {
80 | // throw new IllegalStateException("Unexpected default trust managers:" + Arrays.toString(trustManagers));
81 | // }
82 | // X509TrustManager trustManager = (X509TrustManager) trustManagers[0];
83 |
84 | SSLContext sslContext = SSLContext.getInstance("TLS");
85 | sslContext.init(null, new TrustManager[] { new FakeX509TrustManager() }, null);
86 | SSLSocket sslSocket = (SSLSocket) sslContext.getSocketFactory().createSocket();
87 | sslSocket.connect(new InetSocketAddress(dstName, dstPort), timeout);
88 | sslSocket.setSoLinger(false, 1);
89 | sslSocket.setSoTimeout(timeout);
90 | return sslSocket;
91 | }
92 |
93 | @NonNull
94 | public static Socket createSocketAndConnect(@NonNull String dstName, int dstPort, int timeout) throws IOException {
95 | if (DEBUG)
96 | Log.v(TAG, "createSocketAndConnect(dstName=" + dstName + ", dstPort=" + dstPort + ", timeout=" + timeout + ")");
97 | Socket socket = new Socket();
98 | socket.connect(new InetSocketAddress(dstName, dstPort), timeout);
99 | socket.setSoLinger(false, 1);
100 | socket.setSoTimeout(timeout);
101 | return socket;
102 | }
103 |
104 | @NonNull
105 | public static Socket createSocket(int timeout) throws IOException {
106 | Socket socket = new Socket();
107 | socket.setSoLinger(false, 1);// 1 sec for flush() before close()
108 | socket.setSoTimeout(timeout); // 10 sec timeout for read(), not for write()
109 | return socket;
110 | }
111 |
112 | public static void closeSocket(@Nullable Socket socket) throws IOException {
113 | if (DEBUG)
114 | Log.v(TAG, "closeSocket()");
115 | if (socket != null) {
116 | try {
117 | socket.shutdownInput();
118 | } catch (Exception ignored) {
119 | }
120 | try {
121 | socket.shutdownOutput();
122 | } catch (Exception ignored) {
123 | }
124 | socket.close();
125 | }
126 | }
127 |
128 | @NonNull
129 | public static ArrayList readResponseHeaders(@NonNull InputStream inputStream) throws IOException {
130 | // Assert.assertNotNull("Input stream should not be null", inputStream);
131 | ArrayList headers = new ArrayList<>();
132 | String line;
133 | while (true) {
134 | line = readLine(inputStream);
135 | if (line != null) {
136 | if (line.equals("\r\n"))
137 | return headers;
138 | else
139 | headers.add(line);
140 | } else {
141 | break;
142 | }
143 | }
144 | return headers;
145 | }
146 |
147 | @Nullable
148 | public static String readLine(@NonNull InputStream inputStream) throws IOException {
149 | // Assert.assertNotNull("Input stream should not be null", inputStream);
150 | byte[] bufferLine = new byte[MAX_LINE_SIZE];
151 | int offset = 0;
152 | int readBytes;
153 | do {
154 | // Didn't find "\r\n" within 4K bytes
155 | if (offset >= MAX_LINE_SIZE) {
156 | throw new IOException("Invalid headers");
157 | }
158 |
159 | // Read 1 byte
160 | readBytes = inputStream.read(bufferLine, offset, 1);
161 | if (readBytes == 1) {
162 | // Check for EOL
163 | // Some cameras like Linksys WVC200 do not send \n instead of \r\n
164 | if (offset > 0 && /*bufferLine[offset-1] == '\r' &&*/ bufferLine[offset] == '\n') {
165 | // Found empty EOL. End of header section
166 | if (offset == 1)
167 | break;
168 |
169 | // Found EOL. Add to array.
170 | return new String(bufferLine, 0, offset-1);
171 | } else {
172 | offset++;
173 | }
174 | }
175 | } while (readBytes > 0);
176 | return null;
177 | }
178 |
179 | public static int getResponseStatusCode(@NonNull ArrayList headers) {
180 | // Assert.assertNotNull("Headers should not be null", headers);
181 | // Search for HTTP status code header
182 | for (String header: headers) {
183 | int indexHttp = header.indexOf("HTTP/1.1 "); // 9 characters
184 | if (indexHttp == -1)
185 | indexHttp = header.indexOf("HTTP/1.0 ");
186 | if (indexHttp >= 0) {
187 | int indexCode = header.indexOf(' ', 9);
188 | String code = header.substring(9, indexCode);
189 | try {
190 | return Integer.parseInt(code);
191 | } catch (NumberFormatException e) {
192 | // Does not fulfill standard "HTTP/1.1 200 Ok" token
193 | // Continue search for
194 | }
195 | }
196 | }
197 | // Not found
198 | return -1;
199 | }
200 |
201 | // @Nullable
202 | // static String readContentAsText(@Nullable InputStream inputStream) throws IOException {
203 | // if (inputStream == null)
204 | // return null;
205 | // BufferedReader r = new BufferedReader(new InputStreamReader(inputStream));
206 | // StringBuilder total = new StringBuilder();
207 | // String line;
208 | // while ((line = r.readLine()) != null) {
209 | // total.append(line);
210 | // total.append("\r\n");
211 | // }
212 | // return total.toString();
213 | // }
214 |
215 | @NonNull
216 | public static String readContentAsText(@NonNull InputStream inputStream, int length) throws IOException {
217 | // Assert.assertNotNull("Input stream should not be null", inputStream);
218 | if (length <= 0)
219 | return "";
220 | byte[] b = new byte[length];
221 | int read = readData(inputStream, b, 0, length);
222 | return new String(b, 0, read);
223 | }
224 |
225 | public static int readData(@NonNull InputStream inputStream, @NonNull byte[] buffer, int offset, int length) throws IOException {
226 | int readBytes;
227 | int totalReadBytes = 0;
228 | do {
229 | readBytes = inputStream.read(buffer, offset + totalReadBytes, length - totalReadBytes);
230 | if (readBytes > 0)
231 | totalReadBytes += readBytes;
232 | } while (readBytes >= 0 && totalReadBytes < length);
233 | return totalReadBytes;
234 | }
235 | }
236 |
--------------------------------------------------------------------------------
/library-client-rtsp/src/main/java/com/alexvas/utils/VideoCodecUtils.kt:
--------------------------------------------------------------------------------
1 | package com.alexvas.utils
2 |
3 | import android.annotation.SuppressLint
4 | import android.util.Log
5 | import androidx.media3.container.NalUnitUtil
6 | import androidx.media3.container.NalUnitUtil.SpsData
7 | import java.util.concurrent.atomic.AtomicInteger
8 | import kotlin.experimental.and
9 |
10 |
11 | object VideoCodecUtils {
12 |
13 | private val TAG = VideoCodecUtils::class.java.simpleName
14 |
15 | /** Max possible NAL SPS size in bytes */
16 | const val MAX_NAL_SPS_SIZE: Int = 500
17 |
18 | const val NAL_SLICE: Byte = 1
19 | const val NAL_DPA: Byte = 2
20 | const val NAL_DPB: Byte = 3
21 | const val NAL_DPC: Byte = 4
22 | const val NAL_IDR_SLICE: Byte = 5
23 | const val NAL_SEI: Byte = 6
24 | const val NAL_SPS: Byte = 7
25 | const val NAL_PPS: Byte = 8
26 | const val NAL_AUD: Byte = 9
27 | const val NAL_END_SEQUENCE: Byte = 10
28 | const val NAL_END_STREAM: Byte = 11
29 | const val NAL_FILLER_DATA: Byte = 12
30 | const val NAL_SPS_EXT: Byte = 13
31 | const val NAL_AUXILIARY_SLICE: Byte = 19
32 | const val NAL_STAP_A: Byte = 24 // https://tools.ietf.org/html/rfc3984 5.7.1
33 | const val NAL_STAP_B: Byte = 25 // 5.7.1
34 | const val NAL_MTAP16: Byte = 26 // 5.7.2
35 | const val NAL_MTAP24: Byte = 27 // 5.7.2
36 | const val NAL_FU_A: Byte = 28 // 5.8 fragmented unit
37 | const val NAL_FU_B: Byte = 29 // 5.8
38 |
39 | // Table 7-3: NAL unit type codes
40 | const val H265_NAL_TRAIL_N: Byte = 0
41 | const val H265_NAL_TRAIL_R: Byte = 1
42 | const val H265_NAL_TSA_N: Byte = 2
43 | const val H265_NAL_TSA_R: Byte = 3
44 | const val H265_NAL_STSA_N: Byte = 4
45 | const val H265_NAL_STSA_R: Byte = 5
46 | const val H265_NAL_RADL_N: Byte = 6
47 | const val H265_NAL_RADL_R: Byte = 7
48 | const val H265_NAL_RASL_N: Byte = 8
49 | const val H265_NAL_RASL_R: Byte = 9
50 | const val H265_NAL_BLA_W_LP: Byte = 16
51 | const val H265_NAL_BLA_W_RADL: Byte = 17
52 | const val H265_NAL_BLA_N_LP: Byte = 18
53 | const val H265_NAL_IDR_W_RADL: Byte = 19
54 | const val H265_NAL_IDR_N_LP: Byte = 20
55 | const val H265_NAL_CRA_NUT: Byte = 21
56 | const val H265_NAL_VPS: Byte = 32
57 | const val H265_NAL_SPS: Byte = 33
58 | const val H265_NAL_PPS: Byte = 34
59 | const val H265_NAL_AUD: Byte = 35
60 | const val H265_NAL_EOS_NUT: Byte = 36
61 | const val H265_NAL_EOB_NUT: Byte = 37
62 | const val H265_NAL_FD_NUT: Byte = 38
63 | const val H265_NAL_SEI_PREFIX: Byte = 39
64 | const val H265_NAL_SEI_SUFFIX: Byte = 40
65 |
66 | private val NAL_PREFIX1 = byteArrayOf(0x00, 0x00, 0x00, 0x01)
67 | private val NAL_PREFIX2 = byteArrayOf(0x00, 0x00, 0x01)
68 |
69 |
70 | /**
71 | * Search for 00 00 01 or 00 00 00 01 in byte stream.
72 | * @return offset to the start of NAL unit if found, otherwise -1
73 | */
74 | fun searchForNalUnitStart(
75 | data: ByteArray,
76 | offset: Int,
77 | length: Int,
78 | prefixSize: AtomicInteger
79 | ): Int {
80 | if (offset >= data.size - 3) return -1
81 | for (pos in 0 until length) {
82 | val prefix: Int = getNalUnitStartCodePrefixSize(data, pos + offset, length)
83 | if (prefix >= 0) {
84 | prefixSize.set(prefix)
85 | return pos + offset
86 | }
87 | }
88 | return -1
89 | }
90 |
91 | fun searchForH264NalUnitByType(
92 | data: ByteArray,
93 | offset: Int,
94 | length: Int,
95 | byUnitType: Int
96 | ): Int {
97 | var off = offset
98 | val nalUnitPrefixSize = AtomicInteger(-1)
99 | val timestamp = System.currentTimeMillis()
100 | while (true) {
101 | val nalUnitIndex = searchForNalUnitStart(data, off, length, nalUnitPrefixSize)
102 | if (nalUnitIndex >= 0) {
103 | val nalUnitOffset = nalUnitIndex + nalUnitPrefixSize.get()
104 | if (nalUnitOffset >= data.size)
105 | break
106 | val nalUnitTypeOctet = data[nalUnitOffset]
107 | if ((nalUnitTypeOctet and 0x1f).toInt() == byUnitType) {
108 | return nalUnitIndex
109 | }
110 | off = nalUnitOffset
111 |
112 | // Check that we are not too long here
113 | if (System.currentTimeMillis() - timestamp > 100) {
114 | Log.w(TAG, "Cannot process data within 100 msec in $length bytes")
115 | break
116 | }
117 | } else {
118 | break
119 | }
120 | }
121 | return -1
122 | }
123 |
124 | fun getNalUnitType(data: ByteArray?, offset: Int, length: Int, isH265: Boolean): Byte {
125 | if (data == null || length <= NAL_PREFIX1.size) return (-1).toByte()
126 | var nalUnitTypeOctetOffset = -1
127 | if (data[offset + NAL_PREFIX2.size - 1] == 1.toByte())
128 | nalUnitTypeOctetOffset =
129 | offset + NAL_PREFIX2.size - 1
130 | else if (data[offset + NAL_PREFIX1.size - 1] == 1.toByte())
131 | nalUnitTypeOctetOffset = offset + NAL_PREFIX1.size - 1
132 |
133 | return if (nalUnitTypeOctetOffset != -1) {
134 | val nalUnitTypeOctet = data[nalUnitTypeOctetOffset + 1]
135 | if (isH265)
136 | ((nalUnitTypeOctet.toInt() shr 1) and 0x3F).toByte()
137 | else
138 | (nalUnitTypeOctet and 0x1f)
139 | } else {
140 | (-1).toByte()
141 | }
142 | }
143 |
144 | private fun getNalUnitStartCodePrefixSize(
145 | data: ByteArray,
146 | offset: Int,
147 | length: Int
148 | ): Int {
149 | if (length < 4) return -1
150 | return if (memcmp(data, offset, NAL_PREFIX1, 0, NAL_PREFIX1.size))
151 | NAL_PREFIX1.size else
152 | if (memcmp(data, offset, NAL_PREFIX2, 0, NAL_PREFIX2.size))
153 | NAL_PREFIX2.size else
154 | -1
155 | }
156 |
157 | private fun memcmp(
158 | source1: ByteArray,
159 | offsetSource1: Int,
160 | source2: ByteArray,
161 | offsetSource2: Int,
162 | num: Int
163 | ): Boolean {
164 | if (source1.size - offsetSource1 < num) return false
165 | if (source2.size - offsetSource2 < num) return false
166 | for (i in 0 until num) {
167 | if (source1[offsetSource1 + i] != source2[offsetSource2 + i]) return false
168 | }
169 | return true
170 | }
171 |
172 | data class NalUnit (val type: Byte, val offset: Int, val length: Int)
173 |
174 |
175 | fun getNalUnits(
176 | data: ByteArray,
177 | dataOffset: Int,
178 | length: Int,
179 | foundNals: ArrayList,
180 | isH265: Boolean
181 | ): Int {
182 | foundNals.clear()
183 | var nalUnits = 0
184 | val nextNalOffset = 0
185 | val nalUnitPrefixSize = AtomicInteger(-1)
186 | val timestamp = System.currentTimeMillis()
187 | var offset = dataOffset
188 | var stopped = false
189 | while (!stopped) {
190 |
191 | // Search for first NAL unit
192 | val nalUnitIndex = searchForNalUnitStart(
193 | data,
194 | offset + nextNalOffset,
195 | length - nextNalOffset,
196 | nalUnitPrefixSize
197 | )
198 |
199 | // NAL unit found
200 | if (nalUnitIndex >= 0) {
201 | nalUnits++
202 | val nalUnitOffset = offset + nextNalOffset + nalUnitPrefixSize.get()
203 | val nalUnitTypeOctet = data[nalUnitOffset]
204 | val nalUnitType = if (isH265)
205 | ((nalUnitTypeOctet.toInt() shr 1) and 0x3F).toByte()
206 | else
207 | (nalUnitTypeOctet and 0x1F)
208 |
209 | // Search for second NAL unit (optional)
210 | var nextNalUnitStartIndex = searchForNalUnitStart(
211 | data,
212 | nalUnitOffset,
213 | length - nalUnitOffset,
214 | nalUnitPrefixSize
215 | )
216 |
217 | // Second NAL unit not found. Use till the end.
218 | if (nextNalUnitStartIndex < 0) {
219 | // Not found next NAL unit. Use till the end.
220 | // nextNalUnitStartIndex = length - nextNalOffset + dataOffset;
221 | nextNalUnitStartIndex = length + dataOffset
222 | stopped = true
223 | }
224 | val l = nextNalUnitStartIndex - offset
225 | // if (DEBUG) Log.d(
226 | // TAG,
227 | // "NAL unit type: " + getH264NalUnitTypeString(nalUnitType.toInt()) +
228 | // " (" + nalUnitType + ") - " + l + " bytes, offset " + offset
229 | // )
230 | foundNals.add(NalUnit(nalUnitType, offset, l))
231 | offset = nextNalUnitStartIndex
232 |
233 | // Check that we are not too long here
234 | if (System.currentTimeMillis() - timestamp > 200) {
235 | Log.w(TAG, "Cannot process data within 200 msec in $length bytes (NALs found: " + foundNals.size + ")")
236 | break
237 | }
238 | } else {
239 | stopped = true
240 | }
241 | }
242 | return nalUnits
243 | }
244 |
245 | private fun getNalUnitStartLengthFromArray(
246 | src: ByteArray, offset: Int, length: Int,
247 | isH265: Boolean,
248 | nalUnitType: Byte
249 | ): Pair? {
250 | val nalUnitsFound = ArrayList()
251 | if (getNalUnits(src, offset, length, nalUnitsFound, isH265) > 0) {
252 | for (nalUnit in nalUnitsFound) {
253 | if (nalUnit.type == nalUnitType) {
254 | val prefixSize = AtomicInteger()
255 | val nalUnitIndex = searchForNalUnitStart(
256 | src,
257 | nalUnit.offset,
258 | nalUnit.length,
259 | prefixSize
260 | )
261 | val nalOffset = nalUnitIndex + prefixSize.get() + 1 /* NAL unit type */
262 | return Pair(nalOffset, nalUnit.length)
263 | }
264 | }
265 | }
266 | return null
267 | }
268 |
269 | @SuppressLint("UnsafeOptInUsageError")
270 | fun getSpsNalUnitFromArray(src: ByteArray, offset: Int, length: Int, isH265: Boolean): SpsData? {
271 | val spsStartLength = getNalUnitStartLengthFromArray(src, offset, length, isH265, NAL_SPS)
272 | spsStartLength?.let {
273 | return NalUnitUtil.parseSpsNalUnitPayload(
274 | src, spsStartLength.first, spsStartLength.first + spsStartLength.second)
275 | }
276 | return null
277 | }
278 |
279 | @SuppressLint("UnsafeOptInUsageError")
280 | fun getWidthHeightFromArray(src: ByteArray, offset: Int, length: Int, isH265: Boolean): Pair? {
281 | val sps = getSpsNalUnitFromArray(src, offset, length, isH265)
282 | sps?.let {
283 | return Pair(sps.width, sps.height)
284 | }
285 | return null
286 | }
287 |
288 |
289 | // private fun isH265IRAP(nalUnitType: Byte): Boolean {
290 | // return nalUnitType in 16..23
291 | // }
292 |
293 | fun isAnyKeyFrame(data: ByteArray?, offset: Int, length: Int, isH265: Boolean): Boolean {
294 | if (data == null || length <= 0) return false
295 | var currOffset = offset
296 |
297 | val nalUnitPrefixSize = AtomicInteger(-1)
298 | val timestamp = System.currentTimeMillis()
299 | while (true) {
300 | val nalUnitIndex = searchForNalUnitStart(
301 | data,
302 | currOffset,
303 | length,
304 | nalUnitPrefixSize
305 | )
306 |
307 | if (nalUnitIndex >= 0) {
308 | val nalUnitOffset = nalUnitIndex + nalUnitPrefixSize.get()
309 | if (nalUnitOffset >= data.size)
310 | return false
311 | val nalUnitTypeOctet = data[nalUnitOffset]
312 |
313 | if (isH265) {
314 | val nalUnitType = ((nalUnitTypeOctet.toInt() and 0x7E) shr 1).toByte()
315 | // Treat SEI_PREFIX as key frame.
316 | if (nalUnitType == H265_NAL_IDR_W_RADL || nalUnitType == H265_NAL_IDR_N_LP)
317 | return true
318 | } else {
319 | val nalUnitType = (nalUnitTypeOctet.toInt() and 0x1f).toByte()
320 | when (nalUnitType) {
321 | NAL_IDR_SLICE -> return true
322 | NAL_SLICE -> return false
323 | }
324 | }
325 | // Continue searching
326 | currOffset = nalUnitOffset
327 |
328 | // Check that we are not too long here
329 | if (System.currentTimeMillis() - timestamp > 100) {
330 | Log.w(TAG, "Cannot process data within 100 msec in $length bytes (index=$nalUnitIndex)")
331 | break
332 | }
333 | } else {
334 | break
335 | }
336 | }
337 |
338 | return false
339 | }
340 |
341 | fun getH264NalUnitTypeString(nalUnitType: Byte): String {
342 | return when (nalUnitType) {
343 | NAL_SLICE -> "NAL_SLICE"
344 | NAL_DPA -> "NAL_DPA"
345 | NAL_DPB -> "NAL_DPB"
346 | NAL_DPC -> "NAL_DPC"
347 | NAL_IDR_SLICE -> "NAL_IDR_SLICE"
348 | NAL_SEI -> "NAL_SEI"
349 | NAL_SPS -> "NAL_SPS"
350 | NAL_PPS -> "NAL_PPS"
351 | NAL_AUD -> "NAL_AUD"
352 | NAL_END_SEQUENCE -> "NAL_END_SEQUENCE"
353 | NAL_END_STREAM -> "NAL_END_STREAM"
354 | NAL_FILLER_DATA -> "NAL_FILLER_DATA"
355 | NAL_SPS_EXT -> "NAL_SPS_EXT"
356 | NAL_AUXILIARY_SLICE -> "NAL_AUXILIARY_SLICE"
357 | NAL_STAP_A -> "NAL_STAP_A"
358 | NAL_STAP_B -> "NAL_STAP_B"
359 | NAL_MTAP16 -> "NAL_MTAP16"
360 | NAL_MTAP24 -> "NAL_MTAP24"
361 | NAL_FU_A -> "NAL_FU_A"
362 | NAL_FU_B -> "NAL_FU_B"
363 | else -> "unknown - $nalUnitType"
364 | }
365 | }
366 |
367 | fun getH265NalUnitTypeString(nalUnitType: Byte): String {
368 | return when (nalUnitType) {
369 | H265_NAL_TRAIL_N -> "NAL_TRAIL_N"
370 | H265_NAL_TRAIL_R -> "NAL_TRAIL_R"
371 | H265_NAL_TSA_N -> "NAL_TSA_N"
372 | H265_NAL_TSA_R -> "NAL_TSA_R"
373 | H265_NAL_STSA_N -> "NAL_STSA_N"
374 | H265_NAL_STSA_R -> "NAL_STSA_R"
375 | H265_NAL_RADL_N -> "NAL_RADL_N"
376 | H265_NAL_RADL_R -> "NAL_RADL_R"
377 | H265_NAL_RASL_N -> "NAL_RASL_N"
378 | H265_NAL_RASL_R -> "NAL_RASL_R"
379 | H265_NAL_BLA_W_LP -> "NAL_BLA_W_LP"
380 | H265_NAL_BLA_W_RADL -> "NAL_BLA_W_RADL"
381 | H265_NAL_BLA_N_LP -> "NAL_BLA_N_LP"
382 | H265_NAL_IDR_W_RADL -> "NAL_IDR_W_RADL"
383 | H265_NAL_IDR_N_LP -> "NAL_IDR_N_LP"
384 | H265_NAL_CRA_NUT -> "NAL_CRA_NUT"
385 | H265_NAL_VPS -> "NAL_VPS"
386 | H265_NAL_SPS -> "NAL_SPS"
387 | H265_NAL_PPS -> "NAL_PPS"
388 | H265_NAL_AUD -> "NAL_AUD"
389 | H265_NAL_EOS_NUT -> "NAL_EOS_NUT"
390 | H265_NAL_EOB_NUT -> "NAL_EOB_NUT"
391 | H265_NAL_FD_NUT -> "NAL_FD_NUT"
392 | H265_NAL_SEI_PREFIX -> "NAL_SEI_PREFIX"
393 | H265_NAL_SEI_SUFFIX -> "NAL_SEI_SUFFIX"
394 | else -> "unknown - $nalUnitType"
395 | }
396 | }
397 |
398 | }
399 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':library-client-rtsp'
2 | include ':app'
--------------------------------------------------------------------------------