├── .gitignore
├── .idea
├── .name
├── compiler.xml
├── copyright
│ └── profiles_settings.xml
├── gradle.xml
├── misc.xml
├── modules.xml
├── runConfigurations.xml
└── vcs.xml
├── README.md
├── app
├── .gitignore
├── app.iml
├── build.gradle
├── proguard-rules.pro
└── src
│ ├── androidTest
│ └── java
│ │ └── com
│ │ └── yschi
│ │ └── castscreen
│ │ └── ApplicationTest.java
│ └── main
│ ├── AndroidManifest.xml
│ ├── java
│ └── com
│ │ └── yschi
│ │ └── castscreen
│ │ ├── CastService.java
│ │ ├── Common.java
│ │ ├── IvfWriter.java
│ │ ├── MainActivity.java
│ │ └── Utils.java
│ └── res
│ ├── layout
│ └── activity_main.xml
│ ├── menu
│ └── menu_main.xml
│ ├── mipmap-hdpi
│ └── ic_launcher.png
│ ├── mipmap-mdpi
│ └── ic_launcher.png
│ ├── mipmap-xhdpi
│ └── ic_launcher.png
│ ├── mipmap-xxhdpi
│ └── ic_launcher.png
│ ├── mipmap-xxxhdpi
│ └── ic_launcher.png
│ ├── values-v21
│ └── styles.xml
│ ├── values-w820dp
│ └── dimens.xml
│ └── values
│ ├── dimens.xml
│ ├── strings.xml
│ └── styles.xml
├── build.gradle
├── castscreen.iml
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── receiver
├── Makefile
├── cs_receiver.c
├── cs_receiver.py
├── cs_receiver_conn.py
├── wait_adb.sh
├── wait_adb_arm.sh
└── wait_adb_conn.sh
└── settings.gradle
/.gitignore:
--------------------------------------------------------------------------------
1 | .gradle
2 | /local.properties
3 | /.idea/workspace.xml
4 | /.idea/libraries
5 | .DS_Store
6 | /build
7 | /captures
8 |
--------------------------------------------------------------------------------
/.idea/.name:
--------------------------------------------------------------------------------
1 | castscreen
--------------------------------------------------------------------------------
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/.idea/copyright/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.idea/runConfigurations.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # CastScreen
2 | Cast Android screen via WiFi or USB
3 |
4 | Demo video: https://youtu.be/D_DSuvFz_sg
5 |
6 | ## Requirments
7 | * Gstreamer 1.0 with H264 decoder (h264parse, avdec_h264)
8 | * adb for mirror via USB
9 |
10 | ## With native receiver
11 | * If you are not on an ARM machine, ignore outputs from *_arm targets, or remove them from the Makefile.
12 | * Compile the receiver
13 | ```
14 | $ cd receiver
15 | $ make
16 | ```
17 | ### Via WiFi
18 | 1. Launch receiver
19 | ```
20 | $ cd receiver
21 | $ ./cs_receiver autovideosink
22 | ```
23 | 2. Open CastScreen APP
24 | 3. Wait the receiver to appear on the list
25 | 4. Select the receiver
26 | 5. Tap **Start** on right corner
27 |
28 | ### Via USB
29 | 1. Enable debug mode on the Android device
30 | 2. Make sure adb is available on your PC
31 | 3. Open CastScreen APP
32 | 4. Select **Server mode**
33 | 5. Tap **Start** on right corner
34 | 6. Launch receiver
35 | ```
36 | $ cd receiver
37 | $ ./wait_adb.sh
38 | ```
39 |
40 | ## With python receiver
41 | ### Via WiFi
42 | 1. Launch receiver
43 | ```
44 | $ cd receiver
45 | $ python cs_receiver.py
46 | ```
47 | 2. Open CastScreen APP
48 | 3. Wait the receiver to appear on the list
49 | 4. Select the receiver
50 | 5. Tap **Start** on right corner
51 |
52 | ### Via USB
53 | 1. Enable debug mode on the Android device
54 | 2. Make sure adb is available on your PC
55 | 3. Open CastScreen APP
56 | 4. Select **Server mode**
57 | 5. Tap **Start** on right corner
58 | 6. Launch receiver
59 | ```
60 | $ cd receiver
61 | $ adb forward tcp:53516 tcp:53515
62 | $ python cs_receiver_conn.py
63 | ```
64 |
65 | ## Closing receivers
66 | ### Ubuntu
67 | Open system monitor, look up using the word receiver, and kill the process.
68 |
69 | ## Using an alternative app.
70 | You can use the receiver with the [All Cast Receiver App](https://play.google.com/store/apps/details?id=com.koushikdutta.cast.receiver&rdid=com.koushikdutta.cast.receiver) as well. Just start a receiver as described above (the native receiver is faster than the python one).
71 |
72 | ## License
73 | Copyright (c) 2015-2016 Jones Chi. Code released under the Apache License.
74 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 |
--------------------------------------------------------------------------------
/app/app.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 | generateDebugAndroidTestSources
19 | generateDebugSources
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
--------------------------------------------------------------------------------
/app/build.gradle:
--------------------------------------------------------------------------------
1 | apply plugin: 'com.android.application'
2 |
3 | android {
4 | compileSdkVersion 26
5 |
6 | defaultConfig {
7 | applicationId "com.yschi.castscreen"
8 | minSdkVersion 21
9 | targetSdkVersion 26
10 | versionCode 1
11 | versionName "1.0"
12 | }
13 | buildTypes {
14 | release {
15 | minifyEnabled false
16 | proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
17 | }
18 | }
19 | }
20 |
21 | dependencies {
22 | compile fileTree(dir: 'libs', include: ['*.jar'])
23 | }
24 |
--------------------------------------------------------------------------------
/app/proguard-rules.pro:
--------------------------------------------------------------------------------
1 | # Add project specific ProGuard rules here.
2 | # By default, the flags in this file are appended to flags specified
3 | # in /opt/AndroidSDK/adt-bundle-linux-x86_64-20131030/sdk/tools/proguard/proguard-android.txt
4 | # You can edit the include path and order by changing the proguardFiles
5 | # directive in build.gradle.
6 | #
7 | # For more details, see
8 | # http://developer.android.com/guide/developing/tools/proguard.html
9 |
10 | # Add any project specific keep options here:
11 |
12 | # If your project uses WebView with JS, uncomment the following
13 | # and specify the fully qualified class name to the JavaScript interface
14 | # class:
15 | #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
16 | # public *;
17 | #}
18 |
--------------------------------------------------------------------------------
/app/src/androidTest/java/com/yschi/castscreen/ApplicationTest.java:
--------------------------------------------------------------------------------
1 | package com.yschi.castscreen;
2 |
3 | import android.app.Application;
4 | import android.test.ApplicationTestCase;
5 |
6 | /**
7 | * Testing Fundamentals
8 | */
9 | public class ApplicationTest extends ApplicationTestCase {
10 | public ApplicationTest() {
11 | super(Application.class);
12 | }
13 | }
--------------------------------------------------------------------------------
/app/src/main/AndroidManifest.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
14 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/app/src/main/java/com/yschi/castscreen/CastService.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2016 Jones Chi
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.yschi.castscreen;
18 |
19 | import android.app.Activity;
20 | import android.app.Notification;
21 | import android.app.NotificationManager;
22 | import android.app.PendingIntent;
23 | import android.app.Service;
24 | import android.content.BroadcastReceiver;
25 | import android.content.Context;
26 | import android.content.Intent;
27 | import android.content.IntentFilter;
28 | import android.hardware.display.VirtualDisplay;
29 | import android.media.MediaCodec;
30 | import android.media.MediaCodecInfo;
31 | import android.media.MediaFormat;
32 | import android.media.projection.MediaProjection;
33 | import android.media.projection.MediaProjectionManager;
34 | import android.os.Handler;
35 | import android.os.IBinder;
36 | import android.os.Message;
37 | import android.os.Messenger;
38 | import android.util.Log;
39 | import android.view.Surface;
40 |
41 | import java.io.BufferedReader;
42 | import java.io.IOException;
43 | import java.io.InputStreamReader;
44 | import java.io.OutputStream;
45 | import java.io.OutputStreamWriter;
46 | import java.net.InetAddress;
47 | import java.net.ServerSocket;
48 | import java.net.Socket;
49 | import java.net.UnknownHostException;
50 | import java.nio.ByteBuffer;
51 | import java.util.ArrayList;
52 |
53 | public class CastService extends Service {
54 | private final String TAG = "CastService";
55 | private final int NT_ID_CASTING = 0;
56 | private Handler mHandler = new Handler(new ServiceHandlerCallback());
57 | private Messenger mMessenger = new Messenger(mHandler);
58 | private ArrayList mClients = new ArrayList();
59 | private IntentFilter mBroadcastIntentFilter;
60 |
61 | private static final String HTTP_MESSAGE_TEMPLATE = "POST /api/v1/h264 HTTP/1.1\r\n" +
62 | "Connection: close\r\n" +
63 | "X-WIDTH: %1$d\r\n" +
64 | "X-HEIGHT: %2$d\r\n" +
65 | "\r\n";
66 |
67 | // 1280x720@25
68 | private static final byte[] H264_PREDEFINED_HEADER_1280x720 = {
69 | (byte)0x21, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00,
70 | (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01,
71 | (byte)0x67, (byte)0x42, (byte)0x80, (byte)0x20, (byte)0xda, (byte)0x01, (byte)0x40, (byte)0x16,
72 | (byte)0xe8, (byte)0x06, (byte)0xd0, (byte)0xa1, (byte)0x35, (byte)0x00, (byte)0x00, (byte)0x00,
73 | (byte)0x01, (byte)0x68, (byte)0xce, (byte)0x06, (byte)0xe2, (byte)0x32, (byte)0x24, (byte)0x00,
74 | (byte)0x00, (byte)0x7a, (byte)0x83, (byte)0x3d, (byte)0xae, (byte)0x37, (byte)0x00, (byte)0x00};
75 |
76 | // 800x480@25
77 | private static final byte[] H264_PREDEFINED_HEADER_800x480 = {
78 | (byte)0x21, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00,
79 | (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01,
80 | (byte)0x67, (byte)0x42, (byte)0x80, (byte)0x20, (byte)0xda, (byte)0x03, (byte)0x20, (byte)0xf6,
81 | (byte)0x80, (byte)0x6d, (byte)0x0a, (byte)0x13, (byte)0x50, (byte)0x00, (byte)0x00, (byte)0x00,
82 | (byte)0x01, (byte)0x68, (byte)0xce, (byte)0x06, (byte)0xe2, (byte)0x32, (byte)0x24, (byte)0x00,
83 | (byte)0x00, (byte)0x7a, (byte)0x83, (byte)0x3d, (byte)0xae, (byte)0x37, (byte)0x00, (byte)0x00};
84 |
85 | private MediaProjectionManager mMediaProjectionManager;
86 | private String mReceiverIp;
87 | private int mResultCode;
88 | private Intent mResultData;
89 | private String mSelectedFormat;
90 | private int mSelectedWidth;
91 | private int mSelectedHeight;
92 | private int mSelectedDpi;
93 | private int mSelectedBitrate;
94 | //private boolean mMuxerStarted = false;
95 | private MediaProjection mMediaProjection;
96 | private VirtualDisplay mVirtualDisplay;
97 | private Surface mInputSurface;
98 | //private MediaMuxer mMuxer;
99 | private MediaCodec mVideoEncoder;
100 | private MediaCodec.BufferInfo mVideoBufferInfo;
101 | //private int mTrackIndex = -1;
102 | private ServerSocket mServerSocket;
103 | private Socket mSocket;
104 | private OutputStream mSocketOutputStream;
105 | private IvfWriter mIvfWriter;
106 | private Handler mDrainHandler = new Handler();
107 | private Runnable mStartEncodingRunnable = new Runnable() {
108 | @Override
109 | public void run() {
110 | if (!startScreenCapture()) {
111 | Log.e(TAG, "Failed to start capturing screen");
112 | }
113 | }
114 | };
115 | private Runnable mDrainEncoderRunnable = new Runnable() {
116 | @Override
117 | public void run() {
118 | drainEncoder();
119 | }
120 | };
121 |
122 | private class ServiceHandlerCallback implements Handler.Callback {
123 | @Override
124 | public boolean handleMessage(Message msg) {
125 | Log.d(TAG, "Handler got event, what: " + msg.what);
126 | switch (msg.what) {
127 | case Common.MSG_REGISTER_CLIENT: {
128 | mClients.add(msg.replyTo);
129 | break;
130 | }
131 | case Common.MSG_UNREGISTER_CLIENT: {
132 | mClients.remove(msg.replyTo);
133 | break;
134 | }
135 | case Common.MSG_STOP_CAST: {
136 | stopScreenCapture();
137 | closeSocket(true);
138 | stopSelf();
139 | }
140 | }
141 | return false;
142 | }
143 | }
144 |
145 | private BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() {
146 | @Override
147 | public void onReceive(Context context, Intent intent) {
148 | String action = intent.getAction();
149 | Log.d(TAG, "Service receive broadcast action: " + action);
150 | if (action == null) {
151 | return;
152 | }
153 | if (Common.ACTION_STOP_CAST.equals(action)) {
154 | stopScreenCapture();
155 | closeSocket(true);
156 | stopSelf();
157 | }
158 | }
159 | };
160 |
161 | @Override
162 | public void onCreate() {
163 | super.onCreate();
164 | mMediaProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
165 | mBroadcastIntentFilter = new IntentFilter();
166 | mBroadcastIntentFilter.addAction(Common.ACTION_STOP_CAST);
167 | registerReceiver(mBroadcastReceiver, mBroadcastIntentFilter);
168 | }
169 |
170 | @Override
171 | public void onDestroy() {
172 | super.onDestroy();
173 | Log.d(TAG, "Destroy service");
174 | stopScreenCapture();
175 | closeSocket(true);
176 | unregisterReceiver(mBroadcastReceiver);
177 | }
178 |
179 | @Override
180 | public int onStartCommand(Intent intent, int flags, int startId) {
181 | if (intent == null) {
182 | return START_NOT_STICKY;
183 | }
184 | mReceiverIp = intent.getStringExtra(Common.EXTRA_RECEIVER_IP);
185 | mResultCode = intent.getIntExtra(Common.EXTRA_RESULT_CODE, -1);
186 | mResultData = intent.getParcelableExtra(Common.EXTRA_RESULT_DATA);
187 | Log.d(TAG, "Remove IP: " + mReceiverIp);
188 | if (mReceiverIp == null) {
189 | return START_NOT_STICKY;
190 | }
191 | //if (mResultCode != Activity.RESULT_OK || mResultData == null) {
192 | // Log.e(TAG, "Failed to start service, mResultCode: " + mResultCode + ", mResultData: " + mResultData);
193 | // return START_NOT_STICKY;
194 | //}
195 | mSelectedWidth = intent.getIntExtra(Common.EXTRA_SCREEN_WIDTH, Common.DEFAULT_SCREEN_WIDTH);
196 | mSelectedHeight = intent.getIntExtra(Common.EXTRA_SCREEN_HEIGHT, Common.DEFAULT_SCREEN_HEIGHT);
197 | mSelectedDpi = intent.getIntExtra(Common.EXTRA_SCREEN_DPI, Common.DEFAULT_SCREEN_DPI);
198 | mSelectedBitrate = intent.getIntExtra(Common.EXTRA_VIDEO_BITRATE, Common.DEFAULT_VIDEO_BITRATE);
199 | mSelectedFormat = intent.getStringExtra(Common.EXTRA_VIDEO_FORMAT);
200 | if (mSelectedFormat == null) {
201 | mSelectedFormat = Common.DEFAULT_VIDEO_MIME_TYPE;
202 | }
203 | if (mReceiverIp.length() <= 0) {
204 | Log.d(TAG, "Start with listen mode");
205 | if (!createServerSocket()) {
206 | Log.e(TAG, "Failed to create socket to receiver, ip: " + mReceiverIp);
207 | return START_NOT_STICKY;
208 | }
209 | } else {
210 | Log.d(TAG, "Start with client mode");
211 | if (!createSocket()) {
212 | Log.e(TAG, "Failed to create socket to receiver, ip: " + mReceiverIp);
213 | return START_NOT_STICKY;
214 | }
215 | if (!startScreenCapture()) {
216 | Log.e(TAG, "Failed to start capture screen");
217 | return START_NOT_STICKY;
218 | }
219 | }
220 | return START_STICKY;
221 | }
222 |
223 | @Override
224 | public IBinder onBind(Intent intent) {
225 | return mMessenger.getBinder();
226 | }
227 |
228 | private void showNotification() {
229 | final Intent notificationIntent = new Intent(Common.ACTION_STOP_CAST);
230 | PendingIntent notificationPendingIntent = PendingIntent.getBroadcast(this, 0, notificationIntent, PendingIntent.FLAG_UPDATE_CURRENT);
231 | Notification.Builder builder = new Notification.Builder(this);
232 | builder.setSmallIcon(R.mipmap.ic_launcher)
233 | .setDefaults(Notification.DEFAULT_ALL)
234 | .setOnlyAlertOnce(true)
235 | .setOngoing(true)
236 | .setContentTitle(getString(R.string.app_name))
237 | .setContentText(getString(R.string.casting_screen))
238 | .addAction(android.R.drawable.ic_menu_close_clear_cancel, getString(R.string.action_stop), notificationPendingIntent);
239 | NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
240 | notificationManager.notify(NT_ID_CASTING, builder.build());
241 | }
242 |
243 | private void dismissNotification() {
244 | NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
245 | notificationManager.cancel(NT_ID_CASTING);
246 | }
247 |
248 | private boolean startScreenCapture() {
249 | Log.d(TAG, "mResultCode: " + mResultCode + ", mResultData: " + mResultData);
250 | if (mResultCode != 0 && mResultData != null) {
251 | setUpMediaProjection();
252 | startRecording();
253 | showNotification();
254 | return true;
255 | }
256 | return false;
257 | }
258 |
259 | private void setUpMediaProjection() {
260 | mMediaProjection = mMediaProjectionManager.getMediaProjection(mResultCode, mResultData);
261 | }
262 |
263 | private void startRecording() {
264 | Log.d(TAG, "startRecording");
265 | prepareVideoEncoder();
266 |
267 | //try {
268 | // mMuxer = new MediaMuxer("/sdcard/video.mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
269 | //} catch (IOException ioe) {
270 | // throw new RuntimeException("MediaMuxer creation failed", ioe);
271 | //}
272 |
273 | // Start the video input.
274 | mVirtualDisplay = mMediaProjection.createVirtualDisplay("Recording Display", mSelectedWidth,
275 | mSelectedHeight, mSelectedDpi, 0 /* flags */, mInputSurface,
276 | null /* callback */, null /* handler */);
277 |
278 | // Start the encoders
279 | drainEncoder();
280 | }
281 |
282 | private void prepareVideoEncoder() {
283 | mVideoBufferInfo = new MediaCodec.BufferInfo();
284 | MediaFormat format = MediaFormat.createVideoFormat(mSelectedFormat, mSelectedWidth, mSelectedHeight);
285 | int frameRate = Common.DEFAULT_VIDEO_FPS;
286 |
287 | // Set some required properties. The media codec may fail if these aren't defined.
288 | format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
289 | format.setInteger(MediaFormat.KEY_BIT_RATE, mSelectedBitrate);
290 | format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
291 | format.setInteger(MediaFormat.KEY_CAPTURE_RATE, frameRate);
292 | format.setInteger(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER, 1000000 / frameRate);
293 | format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
294 | format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); // 1 seconds between I-frames
295 |
296 | // Create a MediaCodec encoder and configure it. Get a Surface we can use for recording into.
297 | try {
298 | mVideoEncoder = MediaCodec.createEncoderByType(mSelectedFormat);
299 | mVideoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
300 | mInputSurface = mVideoEncoder.createInputSurface();
301 | mVideoEncoder.start();
302 | } catch (IOException e) {
303 | Log.e(TAG, "Failed to initial encoder, e: " + e);
304 | releaseEncoders();
305 | }
306 | }
307 |
308 | private boolean drainEncoder() {
309 | mDrainHandler.removeCallbacks(mDrainEncoderRunnable);
310 | while (true) {
311 | int bufferIndex = mVideoEncoder.dequeueOutputBuffer(mVideoBufferInfo, 0);
312 |
313 | if (bufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
314 | // nothing available yet
315 | break;
316 | } else if (bufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
317 | // should happen before receiving buffers, and should only happen once
318 | //if (mTrackIndex >= 0) {
319 | // throw new RuntimeException("format changed twice");
320 | //}
321 | //mTrackIndex = mMuxer.addTrack(mVideoEncoder.getOutputFormat());
322 | //if (!mMuxerStarted && mTrackIndex >= 0) {
323 | // mMuxer.start();
324 | // mMuxerStarted = true;
325 | //}
326 | } else if (bufferIndex < 0) {
327 | // not sure what's going on, ignore it
328 | } else {
329 | ByteBuffer encodedData = mVideoEncoder.getOutputBuffer(bufferIndex);
330 | if (encodedData == null) {
331 | throw new RuntimeException("couldn't fetch buffer at index " + bufferIndex);
332 | }
333 | // Fixes playability issues on certain h264 decoders including omxh264dec on raspberry pi
334 | // See http://stackoverflow.com/a/26684736/4683709 for explanation
335 | //if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
336 | // mVideoBufferInfo.size = 0;
337 | //}
338 |
339 | //Log.d(TAG, "Video buffer offset: " + mVideoBufferInfo.offset + ", size: " + mVideoBufferInfo.size);
340 | if (mVideoBufferInfo.size != 0) {
341 | encodedData.position(mVideoBufferInfo.offset);
342 | encodedData.limit(mVideoBufferInfo.offset + mVideoBufferInfo.size);
343 | if (mSocketOutputStream != null) {
344 | try {
345 | byte[] b = new byte[encodedData.remaining()];
346 | encodedData.get(b);
347 | if (mIvfWriter != null) {
348 | mIvfWriter.writeFrame(b, mVideoBufferInfo.presentationTimeUs);
349 | } else {
350 | mSocketOutputStream.write(b);
351 | }
352 | } catch (IOException e) {
353 | Log.d(TAG, "Failed to write data to socket, stop casting");
354 | e.printStackTrace();
355 | stopScreenCapture();
356 | return false;
357 | }
358 | }
359 | /*
360 | if (mMuxerStarted) {
361 | encodedData.position(mVideoBufferInfo.offset);
362 | encodedData.limit(mVideoBufferInfo.offset + mVideoBufferInfo.size);
363 | try {
364 | if (mSocketOutputStream != null) {
365 | byte[] b = new byte[encodedData.remaining()];
366 | encodedData.get(b);
367 | mSocketOutputStream.write(b);
368 | }
369 | } catch (IOException e) {
370 | e.printStackTrace();
371 | }
372 | mMuxer.writeSampleData(mTrackIndex, encodedData, mVideoBufferInfo);
373 | } else {
374 | // muxer not started
375 | }
376 | */
377 | }
378 |
379 | mVideoEncoder.releaseOutputBuffer(bufferIndex, false);
380 |
381 | if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
382 | break;
383 | }
384 | }
385 | }
386 |
387 | mDrainHandler.postDelayed(mDrainEncoderRunnable, 10);
388 | return true;
389 | }
390 |
391 | private void stopScreenCapture() {
392 | dismissNotification();
393 | releaseEncoders();
394 | closeSocket();
395 | if (mVirtualDisplay == null) {
396 | return;
397 | }
398 | mVirtualDisplay.release();
399 | mVirtualDisplay = null;
400 | }
401 |
402 | private void releaseEncoders() {
403 | mDrainHandler.removeCallbacks(mDrainEncoderRunnable);
404 | /*
405 | if (mMuxer != null) {
406 | if (mMuxerStarted) {
407 | mMuxer.stop();
408 | }
409 | mMuxer.release();
410 | mMuxer = null;
411 | mMuxerStarted = false;
412 | }
413 | */
414 | if (mVideoEncoder != null) {
415 | mVideoEncoder.stop();
416 | mVideoEncoder.release();
417 | mVideoEncoder = null;
418 | }
419 | if (mInputSurface != null) {
420 | mInputSurface.release();
421 | mInputSurface = null;
422 | }
423 | if (mMediaProjection != null) {
424 | mMediaProjection.stop();
425 | mMediaProjection = null;
426 | }
427 | if (mIvfWriter != null) {
428 | mIvfWriter = null;
429 | }
430 | //mResultCode = 0;
431 | //mResultData = null;
432 | mVideoBufferInfo = null;
433 | //mTrackIndex = -1;
434 | }
435 |
436 | private boolean createServerSocket() {
437 | Thread th = new Thread(new Runnable() {
438 | @Override
439 | public void run() {
440 | try {
441 | mServerSocket = new ServerSocket(Common.VIEWER_PORT);
442 | while (!Thread.currentThread().isInterrupted() && !mServerSocket.isClosed()) {
443 | mSocket = mServerSocket.accept();
444 | CommunicationThread commThread = new CommunicationThread(mSocket);
445 | new Thread(commThread).start();
446 | }
447 | } catch (IOException e) {
448 | Log.e(TAG, "Failed to create server socket or server socket error");
449 | e.printStackTrace();
450 | }
451 | }
452 | });
453 | th.start();
454 | return true;
455 | }
456 |
457 | class CommunicationThread implements Runnable {
458 | private Socket mClientSocket;
459 |
460 | public CommunicationThread(Socket clientSocket) {
461 | mClientSocket = clientSocket;
462 | }
463 |
464 | public void run() {
465 | while (!Thread.currentThread().isInterrupted()) {
466 | try {
467 | BufferedReader input = new BufferedReader(new InputStreamReader(mClientSocket.getInputStream()));
468 | String data = input.readLine();
469 | Log.d(TAG, "Got data from socket: " + data);
470 | if (data == null || !data.equalsIgnoreCase("mirror")) {
471 | mClientSocket.close();
472 | return;
473 | }
474 | mSocketOutputStream = mClientSocket.getOutputStream();
475 | OutputStreamWriter osw = new OutputStreamWriter(mSocketOutputStream);
476 | osw.write(String.format(HTTP_MESSAGE_TEMPLATE, mSelectedWidth, mSelectedHeight));
477 | osw.flush();
478 | mSocketOutputStream.flush();
479 | if (mSelectedFormat.equals(MediaFormat.MIMETYPE_VIDEO_AVC)) {
480 | if (mSelectedWidth == 1280 && mSelectedHeight == 720) {
481 | mSocketOutputStream.write(H264_PREDEFINED_HEADER_1280x720);
482 | } else if (mSelectedWidth == 800 && mSelectedHeight == 480) {
483 | mSocketOutputStream.write(H264_PREDEFINED_HEADER_800x480);
484 | } else {
485 | Log.e(TAG, "Unknown width: " + mSelectedWidth + ", height: " + mSelectedHeight);
486 | mSocketOutputStream.close();
487 | mClientSocket.close();
488 | mClientSocket = null;
489 | mSocketOutputStream = null;
490 | }
491 | } else if (mSelectedFormat.equals(MediaFormat.MIMETYPE_VIDEO_VP8)) {
492 | mIvfWriter = new IvfWriter(mSocketOutputStream, mSelectedWidth, mSelectedHeight);
493 | mIvfWriter.writeHeader();
494 | } else {
495 | Log.e(TAG, "Unknown format: " + mSelectedFormat);
496 | mSocketOutputStream.close();
497 | mClientSocket.close();
498 | mClientSocket = null;
499 | mSocketOutputStream = null;
500 | }
501 | if (mSocketOutputStream != null) {
502 | mHandler.post(mStartEncodingRunnable);
503 | }
504 | return;
505 | } catch (UnknownHostException e) {
506 | e.printStackTrace();
507 | } catch (IOException e) {
508 | e.printStackTrace();
509 | }
510 | mClientSocket = null;
511 | mSocketOutputStream = null;
512 | }
513 | }
514 | }
515 |
516 | private boolean createSocket() {
517 | Thread th = new Thread(new Runnable() {
518 | @Override
519 | public void run() {
520 | try {
521 | InetAddress serverAddr = InetAddress.getByName(mReceiverIp);
522 | mSocket = new Socket(serverAddr, Common.VIEWER_PORT);
523 | mSocketOutputStream = mSocket.getOutputStream();
524 | OutputStreamWriter osw = new OutputStreamWriter(mSocketOutputStream);
525 | osw.write(String.format(HTTP_MESSAGE_TEMPLATE, mSelectedWidth, mSelectedHeight));
526 | osw.flush();
527 | mSocketOutputStream.flush();
528 | if (mSelectedFormat.equals(MediaFormat.MIMETYPE_VIDEO_AVC)) {
529 | if (mSelectedWidth == 1280 && mSelectedHeight == 720) {
530 | mSocketOutputStream.write(H264_PREDEFINED_HEADER_1280x720);
531 | } else if (mSelectedWidth == 800 && mSelectedHeight == 480) {
532 | mSocketOutputStream.write(H264_PREDEFINED_HEADER_800x480);
533 | } else {
534 | Log.e(TAG, "Unknown width: " + mSelectedWidth + ", height: " + mSelectedHeight);
535 | mSocketOutputStream.close();
536 | mSocket.close();
537 | mSocket = null;
538 | mSocketOutputStream = null;
539 | }
540 | } else if (mSelectedFormat.equals(MediaFormat.MIMETYPE_VIDEO_VP8)) {
541 | mIvfWriter = new IvfWriter(mSocketOutputStream, mSelectedWidth, mSelectedHeight);
542 | mIvfWriter.writeHeader();
543 | } else {
544 | Log.e(TAG, "Unknown format: " + mSelectedFormat);
545 | mSocketOutputStream.close();
546 | mSocket.close();
547 | mSocket = null;
548 | mSocketOutputStream = null;
549 | }
550 | return;
551 | } catch (UnknownHostException e) {
552 | e.printStackTrace();
553 | } catch (IOException e) {
554 | e.printStackTrace();
555 | }
556 | mSocket = null;
557 | mSocketOutputStream = null;
558 | }
559 | });
560 | th.start();
561 | try {
562 | th.join();
563 | if (mSocket != null && mSocketOutputStream != null) {
564 | return true;
565 | }
566 | } catch (InterruptedException e) {
567 | e.printStackTrace();
568 | }
569 | return false;
570 | }
571 |
572 | private void closeSocket() {
573 | closeSocket(false);
574 | }
575 |
576 | private void closeSocket(boolean closeServerSocket) {
577 | if (mSocket != null) {
578 | try {
579 | mSocket.close();
580 | } catch (IOException e) {
581 | e.printStackTrace();
582 | }
583 | }
584 | if (closeServerSocket) {
585 | if (mServerSocket != null) {
586 | try {
587 | mServerSocket.close();
588 | } catch (IOException e) {
589 | e.printStackTrace();
590 | }
591 | }
592 | mServerSocket = null;
593 | }
594 | mSocket = null;
595 | mSocketOutputStream = null;
596 | }
597 | }
598 |
--------------------------------------------------------------------------------
/app/src/main/java/com/yschi/castscreen/Common.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2016 Jones Chi
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.yschi.castscreen;
18 |
19 | import android.media.MediaFormat;
20 |
21 | /**
22 | * Created by yschi on 2015/5/28.
23 | */
24 | public class Common {
25 | public static final int VIEWER_PORT = 53515;
26 |
27 | public static final int DISCOVER_PORT = 53515;
28 | public static final String DISCOVER_MESSAGE = "hello";
29 |
30 | public static final int DEFAULT_SCREEN_WIDTH = 1280;
31 | public static final int DEFAULT_SCREEN_HEIGHT = 720;
32 | public static final int DEFAULT_SCREEN_DPI = 320;
33 | public static final int DEFAULT_VIDEO_BITRATE = 6144000;
34 | public static final int DEFAULT_VIDEO_FPS = 25;
35 | public static final String DEFAULT_VIDEO_MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC;
36 |
37 | // Activity to service
38 | public static final int MSG_REGISTER_CLIENT = 200;
39 | public static final int MSG_UNREGISTER_CLIENT = 201;
40 | public static final int MSG_STOP_CAST = 301;
41 |
42 | public static final String EXTRA_RESULT_CODE = "result_code";
43 | public static final String EXTRA_RESULT_DATA = "result_data";
44 | public static final String EXTRA_RECEIVER_IP = "receiver_ip";
45 |
46 | public static final String EXTRA_SCREEN_WIDTH = "screen_width";
47 | public static final String EXTRA_SCREEN_HEIGHT = "screen_height";
48 | public static final String EXTRA_SCREEN_DPI = "screen_dpi";
49 | public static final String EXTRA_VIDEO_FORMAT = "video_format";
50 | public static final String EXTRA_VIDEO_BITRATE = "video_bitrate";
51 |
52 | public static final String ACTION_STOP_CAST = "com.yschi.castscreen.ACTION_STOP_CAST";
53 | }
54 |
--------------------------------------------------------------------------------
/app/src/main/java/com/yschi/castscreen/IvfWriter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2013 The Android Open Source Project
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.yschi.castscreen;
18 |
19 | import java.io.IOException;
20 | import java.io.OutputStream;
21 | import java.io.RandomAccessFile;
22 |
23 | /**
24 | * Writes an IVF file.
25 | *
26 | * IVF format is a simple container format for VP8 encoded frames defined at
27 | * http://wiki.multimedia.cx/index.php?title=IVF.
28 | */
29 |
30 | public class IvfWriter {
31 | private static final byte HEADER_END = 32;
32 | //private RandomAccessFile mOutputFile;
33 | private OutputStream mOutputStream;
34 | private int mWidth;
35 | private int mHeight;
36 | private int mScale;
37 | private int mRate;
38 | private int mFrameCount;
39 |
40 | /**
41 | * Initializes the IVF file writer.
42 | *
43 | * Timebase fraction is in format scale/rate, e.g. 1/1000
44 | * Timestamp values supplied while writing frames should be in accordance
45 | * with this timebase value.
46 | *
47 | * @param filename name of the IVF file
48 | * @param width frame width
49 | * @param height frame height
50 | * @param scale timebase scale (or numerator of the timebase fraction)
51 | * @param rate timebase rate (or denominator of the timebase fraction)
52 | */
53 | public IvfWriter(OutputStream outputStream,
54 | int width, int height,
55 | int scale, int rate) throws IOException {
56 | //mOutputFile = new RandomAccessFile(filename, "rw");
57 | mOutputStream = outputStream;
58 | mWidth = width;
59 | mHeight = height;
60 | mScale = scale;
61 | mRate = rate;
62 | mFrameCount = 0;
63 | //mOutputFile.setLength(0);
64 | //mOutputFile.seek(HEADER_END); // Skip the header for now, as framecount is unknown
65 | }
66 |
67 | /**
68 | * Initializes the IVF file writer with a microsecond timebase.
69 | *
70 | * Microsecond timebase is default for OMX thus stagefright.
71 | *
72 | * @param filename name of the IVF file
73 | * @param width frame width
74 | * @param height frame height
75 | */
76 | public IvfWriter(OutputStream outputStream, int width, int height) throws IOException {
77 | this(outputStream, width, height, 1, 1000000);
78 | }
79 |
80 | /**
81 | * Finalizes the IVF header and closes the file.
82 | */
83 | public void close() throws IOException{
84 | // Write header now
85 | //mOutputFile.seek(0);
86 | //mOutputFile.write(makeIvfHeader(mFrameCount, mWidth, mHeight, mScale, mRate));
87 | //mOutputFile.close();
88 | mOutputStream.close();
89 | }
90 |
91 |
92 | public void writeHeader() throws IOException {
93 | mOutputStream.write(makeIvfHeader(mFrameCount, mWidth, mHeight, mScale, mRate));
94 | }
95 |
96 | /**
97 | * Writes a single encoded VP8 frame with its frame header.
98 | *
99 | * @param frame actual contents of the encoded frame data
100 | * @param timeStamp timestamp of the frame (in accordance to specified timebase)
101 | */
102 | public void writeFrame(byte[] frame, long timeStamp) throws IOException {
103 | mOutputStream.write(makeIvfFrameHeader(frame.length, timeStamp));
104 | mOutputStream.write(frame);
105 | mFrameCount++;
106 | }
107 |
108 | /**
109 | * Makes a 32 byte file header for IVF format.
110 | *
111 | * Timebase fraction is in format scale/rate, e.g. 1/1000
112 | *
113 | * @param frameCount total number of frames file contains
114 | * @param width frame width
115 | * @param height frame height
116 | * @param scale timebase scale (or numerator of the timebase fraction)
117 | * @param rate timebase rate (or denominator of the timebase fraction)
118 | */
119 | public static byte[] makeIvfHeader(int frameCount, int width, int height, int scale, int rate){
120 | byte[] ivfHeader = new byte[32];
121 | ivfHeader[0] = 'D';
122 | ivfHeader[1] = 'K';
123 | ivfHeader[2] = 'I';
124 | ivfHeader[3] = 'F';
125 | lay16Bits(ivfHeader, 4, 0); // version
126 | lay16Bits(ivfHeader, 6, 32); // header size
127 | ivfHeader[8] = 'V'; // fourcc
128 | ivfHeader[9] = 'P';
129 | ivfHeader[10] = '8';
130 | ivfHeader[11] = '0';
131 | lay16Bits(ivfHeader, 12, width);
132 | lay16Bits(ivfHeader, 14, height);
133 | lay32Bits(ivfHeader, 16, rate); // scale/rate
134 | lay32Bits(ivfHeader, 20, scale);
135 | lay32Bits(ivfHeader, 24, frameCount);
136 | lay32Bits(ivfHeader, 28, 0); // unused
137 | return ivfHeader;
138 | }
139 |
140 | /**
141 | * Makes a 12 byte header for an encoded frame.
142 | *
143 | * @param size frame size
144 | * @param timestamp presentation timestamp of the frame
145 | */
146 | private static byte[] makeIvfFrameHeader(int size, long timestamp){
147 | byte[] frameHeader = new byte[12];
148 | lay32Bits(frameHeader, 0, size);
149 | lay64bits(frameHeader, 4, timestamp);
150 | return frameHeader;
151 | }
152 |
153 |
154 | /**
155 | * Lays least significant 16 bits of an int into 2 items of a byte array.
156 | *
157 | * Note that ordering is little-endian.
158 | *
159 | * @param array the array to be modified
160 | * @param index index of the array to start laying down
161 | * @param value the integer to use least significant 16 bits
162 | */
163 | private static void lay16Bits(byte[] array, int index, int value){
164 | array[index] = (byte) (value);
165 | array[index + 1] = (byte) (value >> 8);
166 | }
167 |
168 | /**
169 | * Lays an int into 4 items of a byte array.
170 | *
171 | * Note that ordering is little-endian.
172 | *
173 | * @param array the array to be modified
174 | * @param index index of the array to start laying down
175 | * @param value the integer to use
176 | */
177 | private static void lay32Bits(byte[] array, int index, int value){
178 | for (int i = 0; i < 4; i++){
179 | array[index + i] = (byte) (value >> (i * 8));
180 | }
181 | }
182 |
183 | /**
184 | * Lays a long int into 8 items of a byte array.
185 | *
186 | * Note that ordering is little-endian.
187 | *
188 | * @param array the array to be modified
189 | * @param index index of the array to start laying down
190 | * @param value the integer to use
191 | */
192 | private static void lay64bits(byte[] array, int index, long value){
193 | for (int i = 0; i < 8; i++){
194 | array[index + i] = (byte) (value >> (i * 8));
195 | }
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/app/src/main/java/com/yschi/castscreen/MainActivity.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2016 Jones Chi
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.yschi.castscreen;
18 |
19 | import android.app.Activity;
20 | import android.content.ComponentName;
21 | import android.content.Context;
22 | import android.content.Intent;
23 | import android.content.ServiceConnection;
24 | import android.media.MediaFormat;
25 | import android.media.projection.MediaProjectionManager;
26 | import android.os.AsyncTask;
27 | import android.os.Bundle;
28 | import android.os.Handler;
29 | import android.os.IBinder;
30 | import android.os.Message;
31 | import android.os.Messenger;
32 | import android.os.RemoteException;
33 | import android.util.Log;
34 | import android.view.Menu;
35 | import android.view.MenuItem;
36 | import android.view.View;
37 | import android.widget.AdapterView;
38 | import android.widget.ArrayAdapter;
39 | import android.widget.Button;
40 | import android.widget.EditText;
41 | import android.widget.ListView;
42 | import android.widget.Spinner;
43 | import android.widget.TextView;
44 | import android.widget.Toast;
45 |
46 | import org.json.JSONException;
47 | import org.json.JSONObject;
48 |
49 | import java.io.IOException;
50 | import java.net.DatagramPacket;
51 | import java.net.DatagramSocket;
52 | import java.net.SocketException;
53 | import java.net.SocketTimeoutException;
54 | import java.util.Arrays;
55 | import java.util.HashMap;
56 |
57 |
58 | public class MainActivity extends Activity {
59 | private static final String TAG = "MainActivity";
60 |
61 | private static final String PREF_COMMON = "common";
62 | private static final String PREF_KEY_INPUT_RECEIVER = "input_receiver";
63 | private static final String PREF_KEY_FORMAT = "format";
64 | private static final String PREF_KEY_RECEIVER = "receiver";
65 | private static final String PREF_KEY_RESOLUTION = "resolution";
66 | private static final String PREF_KEY_BITRATE = "bitrate";
67 |
68 | private static final String[] FORMAT_OPTIONS = {
69 | MediaFormat.MIMETYPE_VIDEO_AVC,
70 | MediaFormat.MIMETYPE_VIDEO_VP8
71 | };
72 |
73 | private static final int[][] RESOLUTION_OPTIONS = {
74 | {1280, 720, 320},
75 | {800, 480, 160}
76 | };
77 |
78 | private static final int[] BITRATE_OPTIONS = {
79 | 6144000, // 6 Mbps
80 | 4096000, // 4 Mbps
81 | 2048000, // 2 Mbps
82 | 1024000 // 1 Mbps
83 | };
84 |
85 | private static final int REQUEST_MEDIA_PROJECTION = 100;
86 | private static final String STATE_RESULT_CODE = "result_code";
87 | private static final String STATE_RESULT_DATA = "result_data";
88 |
89 | private Context mContext;
90 | private MediaProjectionManager mMediaProjectionManager;
91 | private Handler mHandler = new Handler(new HandlerCallback());
92 | private Messenger mMessenger = new Messenger(mHandler);
93 | private Messenger mServiceMessenger = null;
94 | private TextView mReceiverTextView;
95 | private ListView mDiscoverListView;
96 | private ArrayAdapter mDiscoverAdapter;
97 | private HashMap mDiscoverdMap;
98 | private String mSelectedFormat = FORMAT_OPTIONS[0];
99 | private int mSelectedWidth = RESOLUTION_OPTIONS[0][0];
100 | private int mSelectedHeight = RESOLUTION_OPTIONS[0][1];
101 | private int mSelectedDpi = RESOLUTION_OPTIONS[0][2];
102 | private int mSelectedBitrate = BITRATE_OPTIONS[0];
103 | private String mReceiverIp = "";
104 | private DiscoveryTask mDiscoveryTask;
105 | private int mResultCode;
106 | private Intent mResultData;
107 |
108 | private class HandlerCallback implements Handler.Callback {
109 | public boolean handleMessage(Message msg) {
110 | Log.d(TAG, "Handler got event, what: " + msg.what);
111 | return false;
112 | }
113 | }
114 |
115 | private ServiceConnection mServiceConnection = new ServiceConnection() {
116 | @Override
117 | public void onServiceConnected(ComponentName name, IBinder service) {
118 | Log.d(TAG, "Service connected, name: " + name);
119 | mServiceMessenger = new Messenger(service);
120 | try {
121 | Message msg = Message.obtain(null, Common.MSG_REGISTER_CLIENT);
122 | msg.replyTo = mMessenger;
123 | mServiceMessenger.send(msg);
124 | Log.d(TAG, "Connected to service, send register client back");
125 | } catch (RemoteException e) {
126 | Log.d(TAG, "Failed to send message back to service, e: " + e.toString());
127 | e.printStackTrace();
128 | }
129 | }
130 |
131 | @Override
132 | public void onServiceDisconnected(ComponentName name) {
133 | Log.d(TAG, "Service disconnected, name: " + name);
134 | mServiceMessenger = null;
135 | }
136 | };
137 |
138 |
139 | @Override
140 | protected void onCreate(Bundle savedInstanceState) {
141 | super.onCreate(savedInstanceState);
142 | setContentView(R.layout.activity_main);
143 |
144 | if (savedInstanceState != null) {
145 | mResultCode = savedInstanceState.getInt(STATE_RESULT_CODE);
146 | mResultData = savedInstanceState.getParcelable(STATE_RESULT_DATA);
147 | }
148 |
149 | mContext = this;
150 | mMediaProjectionManager = (MediaProjectionManager) getSystemService(Context.MEDIA_PROJECTION_SERVICE);
151 |
152 | mDiscoverdMap = new HashMap<>();
153 | mDiscoverListView = (ListView) findViewById(R.id.discover_listview);
154 | mDiscoverAdapter = new ArrayAdapter<>(this,
155 | android.R.layout.simple_list_item_1);
156 | mDiscoverAdapter.addAll(mDiscoverdMap.keySet());
157 | mDiscoverListView.setAdapter(mDiscoverAdapter);
158 | mDiscoverListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
159 | @Override
160 | public void onItemClick(AdapterView> adapterView, View view, int i, long l) {
161 | String name = mDiscoverAdapter.getItem(i);
162 | String ip = mDiscoverdMap.get(name);
163 | Log.d(TAG, "Select receiver name: " + name + ", ip: " + ip);
164 | mReceiverIp = ip;
165 | updateReceiverStatus();
166 | mContext.getSharedPreferences(PREF_COMMON, 0).edit().putString(PREF_KEY_RECEIVER, mReceiverIp).commit();
167 | }
168 | });
169 |
170 | // add server mode option
171 | mDiscoverAdapter.add(mContext.getString(R.string.server_mode));
172 | mDiscoverdMap.put(mContext.getString(R.string.server_mode), "");
173 |
174 | mReceiverTextView = (TextView) findViewById(R.id.receiver_textview);
175 | final EditText ipEditText = (EditText) findViewById(R.id.ip_edittext);
176 | final Button selectButton = (Button) findViewById(R.id.select_button);
177 | selectButton.setOnClickListener(new View.OnClickListener() {
178 | @Override
179 | public void onClick(View view) {
180 | if (ipEditText.getText().length() > 0) {
181 | mReceiverIp = ipEditText.getText().toString();
182 | Log.d(TAG, "Using ip: " + mReceiverIp);
183 | updateReceiverStatus();
184 | mContext.getSharedPreferences(PREF_COMMON, 0).edit().putString(PREF_KEY_INPUT_RECEIVER, mReceiverIp).commit();
185 | mContext.getSharedPreferences(PREF_COMMON, 0).edit().putString(PREF_KEY_RECEIVER, mReceiverIp).commit();
186 | }
187 | }
188 | });
189 | ipEditText.setText(mContext.getSharedPreferences(PREF_COMMON, 0).getString(PREF_KEY_INPUT_RECEIVER, ""));
190 |
191 | Spinner formatSpinner = (Spinner) findViewById(R.id.format_spinner);
192 | ArrayAdapter formatAdapter = ArrayAdapter.createFromResource(this,
193 | R.array.format_options, android.R.layout.simple_spinner_item);
194 | formatAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
195 | formatSpinner.setAdapter(formatAdapter);
196 | formatSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
197 | @Override
198 | public void onItemSelected(AdapterView> adapterView, View view, int i, long l) {
199 | mSelectedFormat = FORMAT_OPTIONS[i];
200 | mContext.getSharedPreferences(PREF_COMMON, 0).edit().putInt(PREF_KEY_FORMAT, i).commit();
201 | }
202 |
203 | @Override
204 | public void onNothingSelected(AdapterView> adapterView) {
205 | mSelectedFormat = FORMAT_OPTIONS[0];
206 | mContext.getSharedPreferences(PREF_COMMON, 0).edit().putInt(PREF_KEY_FORMAT, 0).commit();
207 | }
208 | });
209 | formatSpinner.setSelection(mContext.getSharedPreferences(PREF_COMMON, 0).getInt(PREF_KEY_FORMAT, 0));
210 |
211 | Spinner resolutionSpinner = (Spinner) findViewById(R.id.resolution_spinner);
212 | ArrayAdapter resolutionAdapter = ArrayAdapter.createFromResource(this,
213 | R.array.resolution_options, android.R.layout.simple_spinner_item);
214 | resolutionAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
215 | resolutionSpinner.setAdapter(resolutionAdapter);
216 | resolutionSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
217 | @Override
218 | public void onItemSelected(AdapterView> adapterView, View view, int i, long l) {
219 | mSelectedWidth = RESOLUTION_OPTIONS[i][0];
220 | mSelectedHeight = RESOLUTION_OPTIONS[i][1];
221 | mSelectedDpi = RESOLUTION_OPTIONS[i][2];
222 | mContext.getSharedPreferences(PREF_COMMON, 0).edit().putInt(PREF_KEY_RESOLUTION, i).commit();
223 | }
224 |
225 | @Override
226 | public void onNothingSelected(AdapterView> adapterView) {
227 | mSelectedWidth = RESOLUTION_OPTIONS[0][0];
228 | mSelectedHeight = RESOLUTION_OPTIONS[0][1];
229 | mSelectedDpi = RESOLUTION_OPTIONS[0][2];
230 | mContext.getSharedPreferences(PREF_COMMON, 0).edit().putInt(PREF_KEY_RESOLUTION, 0).commit();
231 | }
232 | });
233 | resolutionSpinner.setSelection(mContext.getSharedPreferences(PREF_COMMON, 0).getInt(PREF_KEY_RESOLUTION, 0));
234 |
235 | Spinner bitrateSpinner = (Spinner) findViewById(R.id.bitrate_spinner);
236 | ArrayAdapter bitrateAdapter = ArrayAdapter.createFromResource(this,
237 | R.array.bitrate_options, android.R.layout.simple_spinner_item);
238 | bitrateAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
239 | bitrateSpinner.setAdapter(bitrateAdapter);
240 | bitrateSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
241 | @Override
242 | public void onItemSelected(AdapterView> adapterView, View view, int i, long l) {
243 | mSelectedBitrate = BITRATE_OPTIONS[i];
244 | mContext.getSharedPreferences(PREF_COMMON, 0).edit().putInt(PREF_KEY_BITRATE, i).commit();
245 | }
246 |
247 | @Override
248 | public void onNothingSelected(AdapterView> adapterView) {
249 | mSelectedBitrate = BITRATE_OPTIONS[0];
250 | mContext.getSharedPreferences(PREF_COMMON, 0).edit().putInt(PREF_KEY_BITRATE, 0).commit();
251 | }
252 | });
253 | bitrateSpinner.setSelection(mContext.getSharedPreferences(PREF_COMMON, 0).getInt(PREF_KEY_BITRATE, 0));
254 |
255 | mReceiverIp = mContext.getSharedPreferences(PREF_COMMON, 0).getString(PREF_KEY_RECEIVER, "");
256 | updateReceiverStatus();
257 | startService();
258 | }
259 |
260 | @Override
261 | public void onResume() {
262 | super.onResume();
263 |
264 | // start discovery task
265 | mDiscoveryTask = new DiscoveryTask();
266 | mDiscoveryTask.execute();
267 | }
268 |
269 | @Override
270 | public void onPause() {
271 | super.onPause();
272 | mDiscoveryTask.cancel(true);
273 | }
274 | @Override
275 | protected void onDestroy() {
276 | super.onDestroy();
277 | doUnbindService();
278 | }
279 |
280 | @Override
281 | public boolean onCreateOptionsMenu(Menu menu) {
282 | // Inflate the menu; this adds items to the action bar if it is present.
283 | getMenuInflater().inflate(R.menu.menu_main, menu);
284 | //if (mInputSurface != null) {
285 | // menu.findItem(R.id.action_start).setVisible(false);
286 | // menu.findItem(R.id.action_stop).setVisible(true);
287 | //} else {
288 | // menu.findItem(R.id.action_start).setVisible(true);
289 | // menu.findItem(R.id.action_stop).setVisible(false);
290 | //}
291 | return true;
292 | }
293 |
294 | @Override
295 | public boolean onOptionsItemSelected(MenuItem item) {
296 | // Handle action bar item clicks here. The action bar will
297 | // automatically handle clicks on the Home/Up button, so long
298 | // as you specify a parent activity in AndroidManifest.xml.
299 | int id = item.getItemId();
300 |
301 | //noinspection SimplifiableIfStatement
302 | if (id == R.id.action_start) {
303 | Log.d(TAG, "==== start ====");
304 | if (mReceiverIp != null) {
305 | startCaptureScreen();
306 | //invalidateOptionsMenu();
307 | } else {
308 | Toast.makeText(mContext, R.string.no_receiver, Toast.LENGTH_SHORT).show();
309 | }
310 | return true;
311 | } else if (id == R.id.action_stop) {
312 | Log.d(TAG, "==== stop ====");
313 | stopScreenCapture();
314 | //invalidateOptionsMenu();
315 | return true;
316 | }
317 |
318 | return super.onOptionsItemSelected(item);
319 | }
320 |
321 | @Override
322 | public void onActivityResult(int requestCode, int resultCode, Intent data) {
323 | if (requestCode == REQUEST_MEDIA_PROJECTION) {
324 | if (resultCode != Activity.RESULT_OK) {
325 | Log.d(TAG, "User cancelled");
326 | Toast.makeText(mContext, R.string.user_cancelled, Toast.LENGTH_SHORT).show();
327 | return;
328 | }
329 | Log.d(TAG, "Starting screen capture");
330 | mResultCode = resultCode;
331 | mResultData = data;
332 | startCaptureScreen();
333 | }
334 | }
335 |
336 | @Override
337 | public void onSaveInstanceState(Bundle outState) {
338 | super.onSaveInstanceState(outState);
339 | if (mResultData != null) {
340 | outState.putInt(STATE_RESULT_CODE, mResultCode);
341 | outState.putParcelable(STATE_RESULT_DATA, mResultData);
342 | }
343 | }
344 |
345 | private void updateReceiverStatus() {
346 | if (mReceiverIp.length() > 0) {
347 | mReceiverTextView.setText(String.format(mContext.getString(R.string.receiver), mReceiverIp));
348 | } else {
349 | mReceiverTextView.setText(R.string.no_receiver);
350 | }
351 | }
352 |
353 | private void startCaptureScreen() {
354 | if (mResultCode != 0 && mResultData != null) {
355 | startService();
356 | } else {
357 | Log.d(TAG, "Requesting confirmation");
358 | // This initiates a prompt dialog for the user to confirm screen projection.
359 | startActivityForResult(
360 | mMediaProjectionManager.createScreenCaptureIntent(),
361 | REQUEST_MEDIA_PROJECTION);
362 | }
363 | }
364 |
365 | private void stopScreenCapture() {
366 | if (mServiceMessenger == null) {
367 | return;
368 | }
369 | final Intent stopCastIntent = new Intent(Common.ACTION_STOP_CAST);
370 | sendBroadcast(stopCastIntent);
371 | /*
372 | try {
373 | Message msg = Message.obtain(null, Common.MSG_STOP_CAST);
374 | mServiceMessenger.send(msg);
375 | } catch (RemoteException e) {
376 | Log.e(TAG, "Failed to send stop message to service");
377 | e.printStackTrace();
378 | }*/
379 | }
380 |
381 | private void startService() {
382 | if (mResultCode != 0 && mResultData != null && mReceiverIp != null) {
383 | Intent intent = new Intent(this, CastService.class);
384 | intent.putExtra(Common.EXTRA_RESULT_CODE, mResultCode);
385 | intent.putExtra(Common.EXTRA_RESULT_DATA, mResultData);
386 | intent.putExtra(Common.EXTRA_RECEIVER_IP, mReceiverIp);
387 | intent.putExtra(Common.EXTRA_VIDEO_FORMAT, mSelectedFormat);
388 | intent.putExtra(Common.EXTRA_SCREEN_WIDTH, mSelectedWidth);
389 | intent.putExtra(Common.EXTRA_SCREEN_HEIGHT, mSelectedHeight);
390 | intent.putExtra(Common.EXTRA_SCREEN_DPI, mSelectedDpi);
391 | intent.putExtra(Common.EXTRA_VIDEO_BITRATE, mSelectedBitrate);
392 | Log.d(TAG, "===== start service =====");
393 | startService(intent);
394 | bindService(intent, mServiceConnection, Context.BIND_AUTO_CREATE);
395 | } else {
396 | Intent intent = new Intent(this, CastService.class);
397 | startService(intent);
398 | bindService(intent, mServiceConnection, Context.BIND_AUTO_CREATE);
399 | }
400 | }
401 |
402 | private void doUnbindService() {
403 | if (mServiceMessenger != null) {
404 | try {
405 | Message msg = Message.obtain(null, Common.MSG_UNREGISTER_CLIENT);
406 | msg.replyTo = mMessenger;
407 | mServiceMessenger.send(msg);
408 | } catch (RemoteException e) {
409 | Log.d(TAG, "Failed to send unregister message to service, e: " + e.toString());
410 | e.printStackTrace();
411 | }
412 | unbindService(mServiceConnection);
413 | }
414 | }
415 |
416 | private class DiscoveryTask extends AsyncTask {
417 | @Override
418 | protected Void doInBackground(Void... voids) {
419 | try {
420 | DatagramSocket discoverUdpSocket = new DatagramSocket();
421 | Log.d(TAG, "Bind local port: " + discoverUdpSocket.getLocalPort());
422 | discoverUdpSocket.setSoTimeout(3000);
423 | byte[] buf = new byte[1024];
424 | while (true) {
425 | if (!Utils.sendBroadcastMessage(mContext, discoverUdpSocket, Common.DISCOVER_PORT, Common.DISCOVER_MESSAGE)) {
426 | Log.w(TAG, "Failed to send discovery message");
427 | }
428 | Arrays.fill(buf, (byte)0);
429 | DatagramPacket receivePacket = new DatagramPacket(buf, buf.length);
430 | try {
431 | discoverUdpSocket.receive(receivePacket);
432 | String ip = receivePacket.getAddress().getHostAddress();
433 | Log.d(TAG, "Receive discover response from " + ip + ", length: " + receivePacket.getLength());
434 | if (receivePacket.getLength() > 9) {
435 | String respMsg = new String(receivePacket.getData());
436 | Log.d(TAG, "Discover response message: " + respMsg);
437 | try {
438 | JSONObject json = new JSONObject(respMsg);
439 | String name = json.getString("name");
440 | //String id = json.getString("id");
441 | String width = json.getString("width");
442 | String height = json.getString("height");
443 | mDiscoverdMap.put(name, ip);
444 | mHandler.post(new Runnable() {
445 | @Override
446 | public void run() {
447 | mDiscoverAdapter.clear();
448 | mDiscoverAdapter.addAll(mDiscoverdMap.keySet());
449 | }
450 | });
451 | Log.d(TAG, "Got receiver name: " + name + ", ip: " + ip + ", width: " + width + ", height: " + height);
452 | } catch (JSONException e) {
453 | e.printStackTrace();
454 | }
455 | }
456 | } catch (SocketTimeoutException e) {
457 | }
458 |
459 | Thread.sleep(3000);
460 | }
461 | } catch (SocketException e) {
462 | Log.d(TAG, "Failed to create socket for discovery");
463 | e.printStackTrace();
464 | } catch (IOException e) {
465 | e.printStackTrace();
466 | } catch (InterruptedException e) {
467 | e.printStackTrace();
468 | }
469 | return null;
470 | }
471 | }
472 | }
473 |
--------------------------------------------------------------------------------
/app/src/main/java/com/yschi/castscreen/Utils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2016 Jones Chi
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.yschi.castscreen;
18 |
19 | import android.content.Context;
20 | import android.net.DhcpInfo;
21 | import android.net.wifi.WifiManager;
22 |
23 | import java.io.IOException;
24 | import java.net.DatagramPacket;
25 | import java.net.DatagramSocket;
26 | import java.net.InetAddress;
27 |
28 | /**
29 | * Created by yschi on 2015/5/27.
30 | */
31 | public class Utils {
32 | static public InetAddress getBroadcastAddress(Context context) throws IOException {
33 | WifiManager wifi = (WifiManager) context.getSystemService(Context.WIFI_SERVICE);
34 | DhcpInfo dhcp = wifi.getDhcpInfo();
35 | if (dhcp == null) {
36 | return null;
37 | }
38 |
39 | int broadcast = (dhcp.ipAddress & dhcp.netmask) | ~dhcp.netmask;
40 | byte[] quads = new byte[4];
41 | for (int k = 0; k < 4; k++) {
42 | quads[k] = (byte) ((broadcast >> k * 8) & 0xFF);
43 | }
44 | return InetAddress.getByAddress(quads);
45 | }
46 |
47 | static public boolean sendBroadcastMessage(Context context, DatagramSocket socket, int port, String message) {
48 |
49 | try {
50 | InetAddress broadcastAddr = getBroadcastAddress(context);
51 | if (broadcastAddr == null) {
52 | return false;
53 | }
54 | socket.setBroadcast(true);
55 | DatagramPacket packet = new DatagramPacket(message.getBytes(), message.length(),
56 | broadcastAddr, port);
57 | socket.send(packet);
58 | return true;
59 | } catch (IOException e) {
60 | e.printStackTrace();
61 | }
62 | return false;
63 | }
64 |
65 | }
66 |
--------------------------------------------------------------------------------
/app/src/main/res/layout/activity_main.xml:
--------------------------------------------------------------------------------
1 |
8 |
9 |
13 |
14 |
18 |
19 |
23 |
24 |
29 |
35 |
41 |
42 |
43 |
48 |
49 |
55 |
56 |
57 |
--------------------------------------------------------------------------------
/app/src/main/res/menu/menu_main.xml:
--------------------------------------------------------------------------------
1 |
3 |
5 |
7 |
8 |
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-hdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JonesChi/CastScreen/28dfc7cd61f5495bd29fcb91a676979ca3287356/app/src/main/res/mipmap-hdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-mdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JonesChi/CastScreen/28dfc7cd61f5495bd29fcb91a676979ca3287356/app/src/main/res/mipmap-mdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JonesChi/CastScreen/28dfc7cd61f5495bd29fcb91a676979ca3287356/app/src/main/res/mipmap-xhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JonesChi/CastScreen/28dfc7cd61f5495bd29fcb91a676979ca3287356/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JonesChi/CastScreen/28dfc7cd61f5495bd29fcb91a676979ca3287356/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
--------------------------------------------------------------------------------
/app/src/main/res/values-v21/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values-w820dp/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 64dp
6 |
7 |
--------------------------------------------------------------------------------
/app/src/main/res/values/dimens.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 16dp
4 | 16dp
5 |
6 |
--------------------------------------------------------------------------------
/app/src/main/res/values/strings.xml:
--------------------------------------------------------------------------------
1 |
2 | CastScreen
3 | Input Receiver
4 | Start
5 | Stop
6 | Failed to connect to %1$s
7 | Receiver %1$s
8 | Using server mode
9 | Casting screen
10 | User cancelled
11 | Server mode
12 |
13 |
14 | - H264
15 | - IVF
16 |
17 |
18 |
19 | - 1280x720 @ 320
20 | - 800x480 @ 160
21 |
22 |
23 |
24 | - 6 Mbps
25 | - 4 Mbps
26 | - 2 Mbps
27 | - 1 Mbps
28 |
29 |
30 |
--------------------------------------------------------------------------------
/app/src/main/res/values/styles.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | // Top-level build file where you can add configuration options common to all sub-projects/modules.
2 |
3 | buildscript {
4 | repositories {
5 | jcenter()
6 | maven {
7 | url 'https://maven.google.com/'
8 | name 'Google'
9 | }
10 | }
11 | dependencies {
12 | classpath 'com.android.tools.build:gradle:3.1.2'
13 |
14 | // NOTE: Do not place your application dependencies here; they belong
15 | // in the individual module build.gradle files
16 | }
17 | }
18 |
19 | allprojects {
20 | repositories {
21 | jcenter()
22 | maven {
23 | url 'https://maven.google.com/'
24 | name 'Google'
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/castscreen.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | # Project-wide Gradle settings.
2 |
3 | # IDE (e.g. Android Studio) users:
4 | # Gradle settings configured through the IDE *will override*
5 | # any settings specified in this file.
6 |
7 | # For more details on how to configure your build environment visit
8 | # http://www.gradle.org/docs/current/userguide/build_environment.html
9 |
10 | # Specifies the JVM arguments used for the daemon process.
11 | # The setting is particularly useful for tweaking memory settings.
12 | # Default value: -Xmx10248m -XX:MaxPermSize=256m
13 | # org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
14 |
15 | # When configured, Gradle will run in incubating parallel mode.
16 | # This option should only be used with decoupled projects. More details, visit
17 | # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
18 | # org.gradle.parallel=true
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JonesChi/CastScreen/28dfc7cd61f5495bd29fcb91a676979ca3287356/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | zipStoreBase=GRADLE_USER_HOME
4 | zipStorePath=wrapper/dists
5 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip
6 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
10 | DEFAULT_JVM_OPTS=""
11 |
12 | APP_NAME="Gradle"
13 | APP_BASE_NAME=`basename "$0"`
14 |
15 | # Use the maximum available, or set MAX_FD != -1 to use that value.
16 | MAX_FD="maximum"
17 |
18 | warn ( ) {
19 | echo "$*"
20 | }
21 |
22 | die ( ) {
23 | echo
24 | echo "$*"
25 | echo
26 | exit 1
27 | }
28 |
29 | # OS specific support (must be 'true' or 'false').
30 | cygwin=false
31 | msys=false
32 | darwin=false
33 | case "`uname`" in
34 | CYGWIN* )
35 | cygwin=true
36 | ;;
37 | Darwin* )
38 | darwin=true
39 | ;;
40 | MINGW* )
41 | msys=true
42 | ;;
43 | esac
44 |
45 | # Attempt to set APP_HOME
46 | # Resolve links: $0 may be a link
47 | PRG="$0"
48 | # Need this for relative symlinks.
49 | while [ -h "$PRG" ] ; do
50 | ls=`ls -ld "$PRG"`
51 | link=`expr "$ls" : '.*-> \(.*\)$'`
52 | if expr "$link" : '/.*' > /dev/null; then
53 | PRG="$link"
54 | else
55 | PRG=`dirname "$PRG"`"/$link"
56 | fi
57 | done
58 | SAVED="`pwd`"
59 | cd "`dirname \"$PRG\"`/" >/dev/null
60 | APP_HOME="`pwd -P`"
61 | cd "$SAVED" >/dev/null
62 |
63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
64 |
65 | # Determine the Java command to use to start the JVM.
66 | if [ -n "$JAVA_HOME" ] ; then
67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
68 | # IBM's JDK on AIX uses strange locations for the executables
69 | JAVACMD="$JAVA_HOME/jre/sh/java"
70 | else
71 | JAVACMD="$JAVA_HOME/bin/java"
72 | fi
73 | if [ ! -x "$JAVACMD" ] ; then
74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
75 |
76 | Please set the JAVA_HOME variable in your environment to match the
77 | location of your Java installation."
78 | fi
79 | else
80 | JAVACMD="java"
81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
82 |
83 | Please set the JAVA_HOME variable in your environment to match the
84 | location of your Java installation."
85 | fi
86 |
87 | # Increase the maximum file descriptors if we can.
88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
89 | MAX_FD_LIMIT=`ulimit -H -n`
90 | if [ $? -eq 0 ] ; then
91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
92 | MAX_FD="$MAX_FD_LIMIT"
93 | fi
94 | ulimit -n $MAX_FD
95 | if [ $? -ne 0 ] ; then
96 | warn "Could not set maximum file descriptor limit: $MAX_FD"
97 | fi
98 | else
99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
100 | fi
101 | fi
102 |
103 | # For Darwin, add options to specify how the application appears in the dock
104 | if $darwin; then
105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
106 | fi
107 |
108 | # For Cygwin, switch paths to Windows format before running java
109 | if $cygwin ; then
110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
112 | JAVACMD=`cygpath --unix "$JAVACMD"`
113 |
114 | # We build the pattern for arguments to be converted via cygpath
115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
116 | SEP=""
117 | for dir in $ROOTDIRSRAW ; do
118 | ROOTDIRS="$ROOTDIRS$SEP$dir"
119 | SEP="|"
120 | done
121 | OURCYGPATTERN="(^($ROOTDIRS))"
122 | # Add a user-defined pattern to the cygpath arguments
123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
125 | fi
126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
127 | i=0
128 | for arg in "$@" ; do
129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
131 |
132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
134 | else
135 | eval `echo args$i`="\"$arg\""
136 | fi
137 | i=$((i+1))
138 | done
139 | case $i in
140 | (0) set -- ;;
141 | (1) set -- "$args0" ;;
142 | (2) set -- "$args0" "$args1" ;;
143 | (3) set -- "$args0" "$args1" "$args2" ;;
144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
150 | esac
151 | fi
152 |
153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
154 | function splitJvmOpts() {
155 | JVM_OPTS=("$@")
156 | }
157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
159 |
160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
161 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
12 | set DEFAULT_JVM_OPTS=
13 |
14 | set DIRNAME=%~dp0
15 | if "%DIRNAME%" == "" set DIRNAME=.
16 | set APP_BASE_NAME=%~n0
17 | set APP_HOME=%DIRNAME%
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windowz variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 | if "%@eval[2+2]" == "4" goto 4NT_args
53 |
54 | :win9xME_args
55 | @rem Slurp the command line arguments.
56 | set CMD_LINE_ARGS=
57 | set _SKIP=2
58 |
59 | :win9xME_args_slurp
60 | if "x%~1" == "x" goto execute
61 |
62 | set CMD_LINE_ARGS=%*
63 | goto execute
64 |
65 | :4NT_args
66 | @rem Get arguments from the 4NT Shell from JP Software
67 | set CMD_LINE_ARGS=%$
68 |
69 | :execute
70 | @rem Setup the command line
71 |
72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if "%ERRORLEVEL%"=="0" goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
85 | exit /b 1
86 |
87 | :mainEnd
88 | if "%OS%"=="Windows_NT" endlocal
89 |
90 | :omega
91 |
--------------------------------------------------------------------------------
/receiver/Makefile:
--------------------------------------------------------------------------------
1 | all: cs_receiver cs_receiver_conn cs_receiver_arm cs_receiver_conn_arm
2 |
3 | cs_receiver: cs_receiver.c
4 | gcc -Wall -o $@ $< $(LIBS)
5 |
6 | cs_receiver_conn: cs_receiver.c
7 | gcc -Wall -DCLIENT_MODE -o $@ $< $(LIBS)
8 |
9 | cs_receiver_arm: cs_receiver.c
10 | arm-cortexa9_neon-linux-gnueabi-gcc -Wall -DVPUDEC -o $@ $< $(LIBS)
11 |
12 | cs_receiver_conn_arm: cs_receiver.c
13 | arm-cortexa9_neon-linux-gnueabi-gcc -Wall -DCLIENT_MODE -DVPUDEC -o $@ $< $(LIBS)
14 |
15 | clean:
16 | rm -f cs_receiver
17 | rm -f cs_receiver_conn
18 | rm -f cs_receiver_arm
19 | rm -f cs_receiver_conn_arm
20 |
--------------------------------------------------------------------------------
/receiver/cs_receiver.c:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2016 Jones Chi
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | #include
18 | #include
19 | #include
20 | #include
21 | #include
22 | #include
23 | #include
24 | #include
25 | #include
26 | #include
27 | #include
28 | #include
29 | #include
30 |
31 | #define USE_FIFO 0
32 |
33 | #define READ 0
34 | #define WRITE 1
35 |
36 | #define DISCOVER_PORT 53515
37 | #define DISCOVER_MSG "hello"
38 | #define LOCAL_SERVER_PORT 53516
39 |
40 | #define DISCOVER_MSG_TEMPLATE "{\"port\":%d,\"name\":\"CsReceiver @ %s\",\"id\":\"%s\",\"width\":1280,\"height\":960,\"mirror\":\"h264\",\"audio\":\"pcm\",\"subtitles\":\"text/vtt\",\"proxyHeaders\":true,\"hls\":false,\"upsell\":true}"
41 |
42 | #define FIFO_PATH "/tmp/cast_fifo"
43 |
44 | pid_t popen2(const char **command, int *infp, int *outfp)
45 | {
46 | int p_stdin[2], p_stdout[2];
47 | pid_t pid;
48 |
49 | if (pipe(p_stdin) != 0 || pipe(p_stdout) != 0)
50 | return -1;
51 |
52 | pid = fork();
53 |
54 | if (pid < 0)
55 | return pid;
56 | else if (pid == 0)
57 | {
58 | close(p_stdin[WRITE]);
59 | dup2(p_stdin[READ], READ);
60 | close(p_stdout[READ]);
61 | dup2(p_stdout[WRITE], WRITE);
62 |
63 | execvp((const char *)*command,(char* const*) command);
64 | perror("execvp");
65 | exit(1);
66 | }
67 |
68 | if (infp == NULL)
69 | close(p_stdin[WRITE]);
70 | else
71 | *infp = p_stdin[WRITE];
72 |
73 | if (outfp == NULL)
74 | close(p_stdout[READ]);
75 | else
76 | *outfp = p_stdout[READ];
77 |
78 | return pid;
79 | }
80 |
81 | int setup_udp_socket() {
82 | int udp_sock = -1;
83 | int so_reuseaddr = 1;
84 | int pktinfo = 1;
85 | struct sockaddr_in broadcast_addr;
86 |
87 | if ((udp_sock = socket(AF_INET, SOCK_DGRAM, 0)) < 0) {
88 | perror("Error when creating udp socket");
89 | return -1;
90 | }
91 |
92 | if (setsockopt(udp_sock, SOL_SOCKET, SO_REUSEADDR, &so_reuseaddr, sizeof(so_reuseaddr)) < 0) {
93 | perror("Error when setting reuseaddr for udp socket");
94 | return -1;
95 | }
96 |
97 | if (setsockopt(udp_sock, IPPROTO_IP, IP_PKTINFO, &pktinfo, sizeof(pktinfo)) < 0) {
98 | perror("Error when setting pktinfo for udp socket");
99 | return -1;
100 | }
101 |
102 | memset((char *)&broadcast_addr, 0, sizeof(broadcast_addr));
103 | broadcast_addr.sin_family = AF_INET;
104 | broadcast_addr.sin_addr.s_addr = htonl(INADDR_ANY);
105 | broadcast_addr.sin_port = htons(DISCOVER_PORT);
106 |
107 | if (bind(udp_sock, (struct sockaddr *)&broadcast_addr, sizeof(broadcast_addr)) < 0) {
108 | perror("Error when binding broadcast port for udp socket");
109 | return -1;
110 | }
111 | return udp_sock;
112 | }
113 |
114 | int main(int argc, char* argv[])
115 | {
116 | int fifo_fp = -1;
117 | int udp_sock = -1;
118 | int tcp_sock = -1;
119 | int tcp_client_sock = -1;
120 | int max_sock = -1;
121 | #ifndef CLIENT_MODE
122 | struct sockaddr_in my_addr;
123 | #endif
124 | struct sockaddr_in peer_addr;
125 | unsigned int addr_len;
126 | char resp_msg_buf[512];
127 | char data_msg_buf[1024];
128 | int len;
129 | fd_set fd_r;
130 | int gst_in_fp = -1;
131 | int gst_out_fp = -1;
132 | pid_t gst_pid = -1;
133 | int just_connect = 0;
134 | char *gst_sink;
135 | int no_data_count = 0;
136 |
137 | if (argc != 2 || strlen(argv[1]) <= 0) {
138 | perror("Missing sink setting");
139 | return -1;
140 | }
141 |
142 | gst_sink = argv[1];
143 | printf("Using sink: %s\n", gst_sink);
144 |
145 | #if USE_FIFO
146 | unlink(FIFO_PATH);
147 | if (mkfifo(FIFO_PATH, 0666) < 0) {
148 | perror("Error when creating fifo");
149 | return 0;
150 | }
151 | #endif
152 |
153 | #ifdef CLIENT_MODE
154 | if ((tcp_client_sock = socket(AF_INET, SOCK_STREAM, 0)) < 0) {
155 | perror("Error when creating tcp socket");
156 | return 0;
157 | }
158 |
159 | memset((char *)&peer_addr, 0, sizeof(peer_addr));
160 | peer_addr.sin_family = AF_INET;
161 | peer_addr.sin_addr.s_addr = inet_addr("127.0.0.1");
162 | peer_addr.sin_port = htons(LOCAL_SERVER_PORT);
163 |
164 | if (connect(tcp_client_sock, (const struct sockaddr *)&peer_addr, sizeof(peer_addr)) < 0) {
165 | perror("Error when connecting to remote");
166 | return 0;
167 | }
168 | if (send(tcp_client_sock, "mirror\n", 7, 0) < 0) {
169 | perror("Error when sending mirror command");
170 | return 0;
171 | }
172 | just_connect = 1;
173 |
174 | #else
175 | udp_sock = setup_udp_socket();
176 | if ((tcp_sock = socket(AF_INET, SOCK_STREAM, 0)) < 0) {
177 | perror("Error when creating tcp socket");
178 | return 0;
179 | }
180 |
181 | memset((char *)&my_addr, 0, sizeof(my_addr));
182 | my_addr.sin_family = AF_INET;
183 | my_addr.sin_addr.s_addr = htonl(INADDR_ANY);
184 | my_addr.sin_port = htons(DISCOVER_PORT);
185 |
186 | if (bind(tcp_sock, (struct sockaddr *)&my_addr, sizeof(my_addr)) < 0) {
187 | perror("Error when binding tcp socket");
188 | return 0;
189 | }
190 |
191 | if (listen(tcp_sock, 3) < 0) {
192 | perror("Error when listening tcp socket");
193 | return 0;
194 | }
195 | #endif
196 |
197 | for (;;) {
198 | int timeout = 0;
199 | struct timeval tv;
200 | // set connect timeout
201 | tv.tv_sec = 3;
202 | tv.tv_usec = 0;
203 | FD_ZERO(&fd_r);
204 | FD_SET(udp_sock, &fd_r);
205 | FD_SET(tcp_sock, &fd_r);
206 | if (tcp_sock > udp_sock) {
207 | max_sock = tcp_sock;
208 | } else {
209 | max_sock = udp_sock;
210 | }
211 | if (tcp_client_sock > 0) {
212 | FD_SET(tcp_client_sock, &fd_r);
213 | if (tcp_client_sock > max_sock) {
214 | max_sock = tcp_client_sock;
215 | }
216 | }
217 | switch (select(max_sock + 1, &fd_r, NULL, NULL, &tv)) {
218 | case -1:
219 | printf("error occur, %s\n", strerror(errno));
220 | break;
221 | case 0:
222 | timeout = 1;
223 | default: {
224 | if (FD_ISSET(udp_sock, &fd_r)) {
225 | size_t aux[128 / sizeof(size_t)];
226 | char broadcast_msg_buf[128];
227 | struct iovec io;
228 | struct msghdr msg;
229 | struct cmsghdr *cmsg;
230 | io.iov_base = broadcast_msg_buf;
231 | io.iov_len = sizeof(broadcast_msg_buf);
232 | memset(&msg, 0, sizeof(msg));
233 | msg.msg_iov = &io;
234 | msg.msg_iovlen = 1;
235 | msg.msg_control = aux;
236 | msg.msg_controllen = sizeof(aux);
237 | msg.msg_flags = 0;
238 | msg.msg_name = &peer_addr;
239 | msg.msg_namelen = sizeof(peer_addr);
240 | len = recvmsg(udp_sock, &msg, 0);
241 | if (len < 0) {
242 | printf("Error when receiving data from discover socket, errno: %s\n", strerror(errno));
243 | close(udp_sock);
244 | udp_sock = setup_udp_socket();
245 | break;
246 | }
247 | printf("Receive broadcast msg: %s from: %s:%d\n", broadcast_msg_buf, inet_ntoa(peer_addr.sin_addr), ntohs(peer_addr.sin_port));
248 | if (!strncmp(broadcast_msg_buf, DISCOVER_MSG, 5)) {
249 | printf("Receive discover msg: %s, from: %s\n", broadcast_msg_buf, inet_ntoa(peer_addr.sin_addr));
250 | for (cmsg = CMSG_FIRSTHDR(&msg); cmsg != NULL; cmsg = CMSG_NXTHDR(&msg, cmsg)) {
251 | if (cmsg->cmsg_level == IPPROTO_IP) {
252 | struct in_pktinfo *i = (struct in_pktinfo*) CMSG_DATA(cmsg);
253 | printf("Response discover msg with local ip: %s\n", inet_ntoa(i->ipi_spec_dst));
254 | memset(resp_msg_buf, 0, sizeof(resp_msg_buf));
255 | snprintf(resp_msg_buf, sizeof(resp_msg_buf), DISCOVER_MSG_TEMPLATE, DISCOVER_PORT, inet_ntoa(i->ipi_spec_dst), inet_ntoa(i->ipi_spec_dst));
256 | if (sendto(udp_sock, resp_msg_buf, strlen(resp_msg_buf), 0, (struct sockaddr *)&peer_addr, sizeof(peer_addr)) < 0) {
257 | printf("Error when send discover response to peer\n");
258 | }
259 | }
260 | }
261 | }
262 | } else if (FD_ISSET(tcp_sock, &fd_r)) {
263 | if (tcp_client_sock < 0) {
264 | addr_len = sizeof(peer_addr);
265 | tcp_client_sock = accept(tcp_sock, (struct sockaddr *)&peer_addr, &addr_len);
266 | if (tcp_client_sock < 0) {
267 | printf("Error when accepting client\n");
268 | } else {
269 | just_connect = 1;
270 | printf("Accept peer addr: %s:%d\n", inet_ntoa(peer_addr.sin_addr), ntohs(peer_addr.sin_port));
271 | if (!strncmp(gst_sink, "ffplay", 6)) {
272 | #if USE_FIFO
273 | const char *command[] = {"ffplay", "-framerate", "50", "-infbuf", "-framedrop", "-analyzeduration", "1", FIFO_PATH, NULL};
274 | #else
275 | const char *command[] = {"ffplay", "-framerate", "50", "-infbuf", "-framedrop", "-analyzeduration", "1", "-", NULL};
276 | #endif
277 | gst_pid = popen2(command, &gst_in_fp, &gst_out_fp);
278 | } else {
279 | #if USE_FIFO
280 | char location_buf[32] = {0};
281 | strcat(location_buf, "location=");
282 | strcat(location_buf, FIFO_PATH);
283 | #ifdef VPUDEC
284 | const char *command[] = {"gst-launch-0.10", "filesrc", location_buf, "do-timestamp=true", "!", "video\/x-h264,width=800,height=480,framerate=30\/1", "!", "vpudec", "framedrop=true", "frame-plus=1", "low-latency=true", "!", gst_sink, NULL};
285 | #else
286 | const char *command[] = {"gst-launch-1.0", "filesrc", location_buf, "do-timestamp=true", "!", "h264parse", "!", "decodebin", "!", gst_sink, NULL};
287 | #endif
288 | #else
289 | #ifdef VPUDEC
290 | const char *command[] = {"gst-launch-0.10", "fdsrc", "do-timestamp=true", "!", "video\/x-h264,width=800,height=480,framerate=30\/1", "!", "vpudec", "framedrop=true", "frame-plus=1", "low-latency=true", "!", gst_sink, NULL};
291 | #else
292 | const char *command[] = {"gst-launch-1.0", "fdsrc", "do-timestamp=true", "!", "h264parse", "!", "decodebin", "!", gst_sink, NULL};
293 | //const char *command[] = {"gst-launch-1.0", "fdsrc", "!", "video\/x-h264,width=800,height=480,framerate=0\/1,stream-format=avc", "!", "avdec_h264", "!", gst_sink, NULL};
294 | #endif
295 | #endif
296 | gst_pid = popen2(command, &gst_in_fp, &gst_out_fp);
297 | }
298 | printf("gst pid: %d\n", gst_pid);
299 | #if USE_FIFO
300 | fifo_fp = open(FIFO_PATH, O_WRONLY);
301 | printf("fifo_fp: %d\n", fifo_fp);
302 | #endif
303 | }
304 | } else {
305 | printf("Could not accept client, another connection still exist\n");
306 | }
307 | } else if (tcp_client_sock > 0 && FD_ISSET(tcp_client_sock, &fd_r)) {
308 | memset(data_msg_buf, 0, sizeof(data_msg_buf));
309 | len = read(tcp_client_sock, data_msg_buf, sizeof(data_msg_buf));
310 | //printf("Receive data len: %d\n", len);
311 | if (len > 0) {
312 | no_data_count = 0;
313 | } else {
314 | no_data_count++;
315 | }
316 | if (len < 0 || no_data_count > 2) {
317 | printf("Failed to receive from tcp client socket, close the socket\n");
318 | close(tcp_client_sock);
319 | tcp_client_sock = -1;
320 | if (gst_pid > 0) {
321 | kill(gst_pid, SIGKILL);
322 | waitpid(gst_pid, NULL, 0);
323 | gst_pid = -1;
324 | gst_in_fp = -1;
325 | gst_out_fp = -1;
326 | }
327 | if (fifo_fp > 0) {
328 | close(fifo_fp);
329 | fifo_fp = -1;
330 | }
331 | #ifdef CLIENT_MODE
332 | return 0;
333 | #endif
334 | } else {
335 | if (just_connect && strstr(data_msg_buf, "\r\n")) {
336 | int width = 800;
337 | int height = 480;
338 | printf("Receive control data(%u): %s\n", len, data_msg_buf);
339 | char *control_end = strstr(data_msg_buf, "\r\n\r\n");
340 | int bdata_len = 0;
341 | if (control_end + 4 - data_msg_buf > 0) {
342 | bdata_len = len - (control_end + 4 - data_msg_buf);
343 | control_end = control_end + 4;
344 | }
345 | char *info = strtok(data_msg_buf, "\r\n");
346 | while (info) {
347 | //printf("info: %s\n", info);
348 | if (strstr(info, "X-WIDTH:")) {
349 | width = atoi(strstr(info, " "));
350 | printf("width: %d\n", width);
351 | }
352 | if (strstr(info, "X-HEIGHT:")) {
353 | height = atoi(strstr(info, " "));
354 | printf("height: %d\n", height);
355 | }
356 | info = strtok(NULL, "\r\n");
357 | }
358 |
359 | if (!strncmp(gst_sink, "ffplay", 6)) {
360 | #if USE_FIFO
361 | const char *command[] = {"ffplay", "-framerate", "50", "-infbuf", "-framedrop", "-analyzeduration", "1", FIFO_PATH, NULL};
362 | #else
363 | const char *command[] = {"ffplay", "-framerate", "50", "-infbuf", "-framedrop", "-analyzeduration", "1", "-", NULL};
364 | #endif
365 | gst_pid = popen2(command, &gst_in_fp, &gst_out_fp);
366 | } else {
367 | #if USE_FIFO
368 | char location_buf[32] = {0};
369 | strcat(location_buf, "location=");
370 | strcat(location_buf, FIFO_PATH);
371 | #ifdef VPUDEC
372 | char mime_buf[70] = {0};
373 | snprintf(mime_buf, 70, "video\/x-h264,width=%d,height=%d,framerate=30\/1", width, height);
374 | //snprintf(mime_buf, 70, "video\/x-h264,width=%d,height=%d,framerate=30\/1,stream-format=avc", width, height);
375 | printf("Using cap: %s\n", mime_buf);
376 | const char *command[] = {"gst-launch-0.10", "filesrc", location_buf, "do-timestamp=true", "!", mime_buf, "!", "vpudec", "framedrop=true", "frame-plus=1", "low-latency=true", "!", gst_sink, NULL};
377 | #else
378 | const char *command[] = {"gst-launch-1.0", "filesrc", location_buf, "do-timestamp=true", "!", "h264parse", "!", "decodebin", "!", gst_sink, NULL};
379 | #endif
380 | #else
381 | #ifdef VPUDEC
382 | char mime_buf[70] = {0};
383 | snprintf(mime_buf, 70, "video\/x-h264,width=%d,height=%d,framerate=30\/1", width, height);
384 | //snprintf(mime_buf, 70, "video\/x-h264,width=%d,height=%d,framerate=30\/1,stream-format=avc", width, height);
385 | printf("Using cap: %s\n", mime_buf);
386 | const char *command[] = {"gst-launch-0.10", "fdsrc", "do-timestamp=true", "!", mime_buf, "!", "vpudec", "framedrop=false", "frame-plus=1", "low-latency=true", "!", gst_sink, NULL};
387 | #else
388 | const char *command[] = {"gst-launch-1.0", "fdsrc", "do-timestamp=true", "!", "h264parse", "!", "decodebin", "!", gst_sink, NULL};
389 | #endif
390 | #endif
391 |
392 | gst_pid = popen2(command, &gst_in_fp, &gst_out_fp);
393 | }
394 | printf("gst pid: %d\n", gst_pid);
395 | printf("gst in fp: %d\n", gst_in_fp);
396 | #if USE_FIFO
397 | fifo_fp = open(FIFO_PATH, O_WRONLY);
398 | printf("fifo_fp: %d\n", fifo_fp);
399 | #endif
400 |
401 | just_connect = 0;
402 | if (bdata_len > 0) {
403 | #if USE_FIFO
404 | if (fifo_fp > 0) {
405 | len = write(fifo_fp, control_end, bdata_len);
406 | printf("Write non control data len: %d\n", len);
407 | }
408 | #else
409 | if (gst_in_fp > 0) {
410 | len = write(gst_in_fp, control_end, bdata_len);
411 | printf("Write non control data len: %d\n", len);
412 | }
413 | #endif
414 | }
415 | } else {
416 | #if USE_FIFO
417 | if (fifo_fp > 0) {
418 | len = write(fifo_fp, data_msg_buf, len);
419 | //printf("Write to fifo len: %d\n", len);
420 | if (len < 0) {
421 | printf("Pipe input error: %s\n", strerror(errno));
422 | }
423 | }
424 | #else
425 | if (gst_in_fp > 0) {
426 | len = write(gst_in_fp, data_msg_buf, len);
427 | //printf("Piped len: %d\n", len);
428 | if (len < 0) {
429 | printf("Pipe input error: %s\n", strerror(errno));
430 | }
431 | }
432 | #endif
433 | }
434 | }
435 | } else {
436 | if (timeout) {
437 | if (gst_pid > 0) {
438 | no_data_count++;
439 | // 3 * 10 = 30 seconds
440 | if (no_data_count > 10) {
441 | printf("No data for casting after 30 seconds, close the socket and receiver\n");
442 | if (tcp_client_sock > 0) {
443 | close(tcp_client_sock);
444 | tcp_client_sock = -1;
445 | }
446 | if (gst_pid > 0) {
447 | kill(gst_pid, SIGKILL);
448 | waitpid(gst_pid, NULL, 0);
449 | gst_pid = -1;
450 | gst_in_fp = -1;
451 | gst_out_fp = -1;
452 | }
453 | if (fifo_fp > 0) {
454 | close(fifo_fp);
455 | fifo_fp = -1;
456 | }
457 | }
458 | }
459 | }
460 | }
461 | }
462 | }
463 | }
464 |
465 | return 0;
466 | }
467 |
--------------------------------------------------------------------------------
/receiver/cs_receiver.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | """
4 | /*
5 | * Copyright (C) 2016 Jones Chi
6 | *
7 | * Licensed under the Apache License, Version 2.0 (the "License");
8 | * you may not use this file except in compliance with the License.
9 | * You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | """
20 |
21 | from threading import Thread
22 | from subprocess import Popen, PIPE, STDOUT
23 | import select, socket
24 | import SocketServer
25 |
26 | HOST = ''
27 | PORT = 53515
28 | IP = '192.168.0.11'
29 |
30 | bufferSize = 1024
31 | meta_data = '{"port":%d,"name":"PyReceiver @ %s","id":"%s","width":1280,"height":960,"mirror":"h264","audio":"pcm","subtitles":"text/vtt","proxyHeaders":true,"hls":false,"upsell":true}' % (PORT, IP, IP)
32 |
33 | SAVE_TO_FILE = False
34 | class MyTCPHandler(SocketServer.BaseRequestHandler):
35 | def handle(self):
36 | if SAVE_TO_FILE:
37 | f = open('video.raw', 'wb')
38 | p = Popen(['ffplay', '-framerate', '30', '-'], stdin=PIPE, stdout=PIPE)
39 | #p = Popen(['gst-launch-1.0', 'fdsrc', '!', 'h264parse', '!', 'avdec_h264', '!', 'autovideosink'], stdin=PIPE, stdout=PIPE)
40 | skiped_metadata = False
41 | while True:
42 | data = self.request.recv(bufferSize)
43 | if data == None or len(data) <= 0:
44 | break
45 | if not skiped_metadata:
46 | print "Client connected, addr: ", self.client_address[0]
47 | if data.find('\r\n\r\n') > 0:
48 | last_ctrl = data.find('\r\n\r\n') + 4
49 | print 'Recv control data: ', data[0:last_ctrl]
50 | if len(data) > last_ctrl:
51 | p.stdin.write(data[last_ctrl:])
52 | if SAVE_TO_FILE:
53 | f.write(data[last_ctrl:])
54 | skiped_metadata = True
55 | else:
56 | p.stdin.write(data)
57 | if SAVE_TO_FILE:
58 | f.write(data)
59 | p.kill()
60 | if SAVE_TO_FILE:
61 | f.close()
62 |
63 | def resp_hello(ip, port):
64 | send_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
65 | send_sock.sendto(meta_data, (ip, port))
66 |
67 | def handle_discovery():
68 | s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
69 | s.bind(('', PORT))
70 | s.setblocking(0)
71 | while True:
72 | result = select.select([s],[],[])
73 | if len(result[0]) <= 0:
74 | continue
75 | msg, address = result[0][0].recvfrom(bufferSize)
76 | print 'Receive broadcast msg: ', msg
77 | if msg == 'hello':
78 | print 'Got discover msg, src ip: %s, port: %d' % (address[0], address[1])
79 | resp_hello(address[0], address[1])
80 |
81 |
82 | if __name__ == "__main__":
83 | server = SocketServer.TCPServer((HOST, PORT), MyTCPHandler)
84 | server_thread = Thread(target=server.serve_forever)
85 | server_thread.daemon = True
86 | server_thread.start()
87 |
88 | handle_discovery()
89 | server.shutdown()
90 |
--------------------------------------------------------------------------------
/receiver/cs_receiver_conn.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | """
4 | /*
5 | * Copyright (C) 2016 Jones Chi
6 | *
7 | * Licensed under the Apache License, Version 2.0 (the "License");
8 | * you may not use this file except in compliance with the License.
9 | * You may obtain a copy of the License at
10 | *
11 | * http://www.apache.org/licenses/LICENSE-2.0
12 | *
13 | * Unless required by applicable law or agreed to in writing, software
14 | * distributed under the License is distributed on an "AS IS" BASIS,
15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | * See the License for the specific language governing permissions and
17 | * limitations under the License.
18 | */
19 | """
20 |
21 | from subprocess import Popen, PIPE, STDOUT
22 | import socket
23 |
24 | PORT = 53516
25 | bufferSize = 1024
26 |
27 | SAVE_TO_FILE = False
28 | def connect_to_server():
29 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
30 | server_address = ('localhost', PORT)
31 | print 'Connecting to %s port %s' % server_address
32 | sock.connect(server_address)
33 | try:
34 | # Send data
35 | message = 'mirror\n'
36 | print 'Sending mirror cmd'
37 | sock.sendall(message)
38 |
39 | if SAVE_TO_FILE:
40 | f = open('video_client.raw', 'wb')
41 | p = Popen(['ffplay', '-framerate', '30', '-'], stdin=PIPE, stdout=PIPE)
42 | #p = Popen(['gst-launch-1.0', 'fdsrc', '!', 'h264parse', '!', 'avdec_h264', '!', 'autovideosink'], stdin=PIPE, stdout=PIPE)
43 | skiped_metadata = False
44 | while True:
45 | data = sock.recv(bufferSize)
46 | if data == None or len(data) <= 0:
47 | break
48 | if not skiped_metadata:
49 | if data.find('\r\n\r\n') > 0:
50 | last_ctrl = data.find('\r\n\r\n') + 4
51 | print 'Recv control data: ', data[0:last_ctrl]
52 | if len(data) > last_ctrl:
53 | p.stdin.write(data[last_ctrl:])
54 | if SAVE_TO_FILE:
55 | f.write(data[last_ctrl:])
56 | skiped_metadata = True
57 | else:
58 | p.stdin.write(data)
59 | if SAVE_TO_FILE:
60 | f.write(data)
61 | p.kill()
62 | if SAVE_TO_FILE:
63 | f.close()
64 |
65 | finally:
66 | sock.close()
67 |
68 | if __name__ == "__main__":
69 | connect_to_server()
70 |
--------------------------------------------------------------------------------
/receiver/wait_adb.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | killall cs_receiver
4 | ./cs_receiver autovideosink &
5 |
6 | while [ true ];
7 | do
8 | echo "wait adb device"
9 | adb wait-for-device
10 | echo "adb device presents, reverse port"
11 | adb reverse tcp:53515 tcp:53515
12 | while [ true ];
13 | do
14 | if [ "`adb get-state`" = "device" ]; then
15 | echo "got dev"
16 | sleep 3
17 | else
18 | break
19 | fi
20 | done
21 | done
22 |
--------------------------------------------------------------------------------
/receiver/wait_adb_arm.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | killall cs_receiver_arm
4 | ./cs_receiver_arm mfw_v4lsink &
5 |
6 | while [ true ];
7 | do
8 | echo "wait adb device"
9 | ./adb wait-for-device
10 | echo "adb device presents, reverse port"
11 | ./adb reverse tcp:53515 tcp:53515
12 | ./adb reverse tcp:2222 tcp:22
13 | while [ true ];
14 | do
15 | if [ "`./adb get-state`" = "device" ]; then
16 | sleep 3
17 | else
18 | break
19 | fi
20 | done
21 | done
22 |
--------------------------------------------------------------------------------
/receiver/wait_adb_conn.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | while [ true ];
4 | do
5 | echo "wait adb device"
6 | adb wait-for-device
7 | echo "adb device presents, try to send mirror cmd"
8 | adb forward tcp:53516 tcp:53515
9 | #python cs_receiver_conn.py
10 | #./cs_receiver_conn ffplay
11 | ./cs_receiver_conn autovideosink
12 | echo "connect failed, sleep 3 seconds"
13 | sleep 3
14 | done
15 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | include ':app'
2 |
--------------------------------------------------------------------------------