├── README.md
├── controller.py
├── controller.service
├── provision.sh
├── requirements.txt
├── server
├── detection.js
├── index.html
├── main.js
└── signalling.js
└── uv4l-raspicam.conf
/README.md:
--------------------------------------------------------------------------------
1 | # vierjavibot
2 |
3 | This project was originally developed in **48 hours**, during our 2018 edition of the Tryolabs Hackathon.
4 |
5 | Please, take into account that the code quality was greatly influenced by the little time available to complete the project.
6 | We are sharing it in hope that it may be useful for those interested in these topics.
7 |
8 | If you want to know more about it please check out [this blog post](https://tryolabs.com/blog/hackathon-robot-remote-work-iot-computer-vision/).
9 |
10 | ---
11 |
12 | This readme is **work-in-progress**, we are working on adding technical details and instructions in order to allow you to create your own remotely controlled robot.
13 |
--------------------------------------------------------------------------------
/controller.py:
--------------------------------------------------------------------------------
1 | """
2 | This script is called by the controller.service
3 | """
4 | import json
5 | import socket
6 | import os
7 | import RPi.GPIO as GPIO
8 | import pigpio
9 |
10 | socket_path = '/tmp/uv4l.socket'
11 |
12 | try:
13 | os.unlink(socket_path)
14 | except OSError:
15 | if os.path.exists(socket_path):
16 | raise
17 |
18 | s = socket.socket(socket.AF_UNIX, socket.SOCK_SEQPACKET)
19 |
20 |
21 | # print'socket_path: %s' % socket_path
22 | s.bind(socket_path)
23 | s.listen(1)
24 |
25 |
26 | def cleanup():
27 | pass
28 |
29 |
30 | class Wheels(object):
31 |
32 | def __init__(
33 | self, r_wheel_forward=6, r_wheel_backward=13, l_wheel_forward=19, l_wheel_backward=26):
34 | self.r_wheel_forward = r_wheel_forward
35 | self.r_wheel_backward = r_wheel_backward
36 | self.l_wheel_forward = l_wheel_forward
37 | self.l_wheel_backward = l_wheel_backward
38 |
39 | # Setup motors
40 | GPIO.setmode(GPIO.BCM)
41 | GPIO.setup(r_wheel_forward, GPIO.OUT)
42 | GPIO.setup(r_wheel_backward, GPIO.OUT)
43 | GPIO.setup(l_wheel_forward, GPIO.OUT)
44 | GPIO.setup(l_wheel_backward, GPIO.OUT)
45 |
46 | # Turn all motors off
47 | GPIO.output(r_wheel_forward, GPIO.LOW)
48 | GPIO.output(r_wheel_backward, GPIO.LOW)
49 | GPIO.output(l_wheel_forward, GPIO.LOW)
50 | GPIO.output(l_wheel_backward, GPIO.LOW)
51 |
52 | def _spin_right_wheel_forward(self):
53 | GPIO.output(self.r_wheel_forward, GPIO.HIGH)
54 | GPIO.output(self.r_wheel_backward, GPIO.LOW)
55 |
56 | def _spin_right_wheel_backward(self):
57 | GPIO.output(self.r_wheel_backward, GPIO.HIGH)
58 | GPIO.output(self.r_wheel_forward, GPIO.LOW)
59 |
60 | def _stop_right_wheel(self):
61 | GPIO.output(self.r_wheel_backward, GPIO.LOW)
62 | GPIO.output(self.r_wheel_forward, GPIO.LOW)
63 |
64 | def _spin_left_wheel_forward(self):
65 | GPIO.output(self.l_wheel_forward, GPIO.HIGH)
66 | GPIO.output(self.l_wheel_backward, GPIO.LOW)
67 |
68 | def _spin_left_wheel_backward(self):
69 | GPIO.output(self.l_wheel_backward, GPIO.HIGH)
70 | GPIO.output(self.l_wheel_forward, GPIO.LOW)
71 |
72 | def _stop_left_wheel(self):
73 | GPIO.output(self.l_wheel_backward, GPIO.LOW)
74 | GPIO.output(self.l_wheel_forward, GPIO.LOW)
75 |
76 | def go_fw(self):
77 | self._spin_left_wheel_forward()
78 | self._spin_right_wheel_forward()
79 |
80 | def go_fw_left(self):
81 | self._stop_left_wheel()
82 | self._spin_right_wheel_forward()
83 |
84 | def go_fw_right(self):
85 | self._spin_left_wheel_forward()
86 | self._stop_right_wheel()
87 |
88 | def go_bw(self):
89 | self._spin_left_wheel_backward()
90 | self._spin_right_wheel_backward()
91 |
92 | def go_bw_right(self):
93 | self._spin_left_wheel_backward()
94 | self._stop_right_wheel()
95 |
96 | def go_bw_left(self):
97 | self._stop_left_wheel()
98 | self._spin_right_wheel_backward()
99 |
100 | def stop(self):
101 | self._stop_left_wheel()
102 | self._stop_right_wheel()
103 |
104 | def turn_right(self):
105 | self._spin_left_wheel_forward()
106 | self._spin_right_wheel_backward()
107 |
108 | def turn_left(self):
109 | self._spin_left_wheel_backward()
110 | self._spin_right_wheel_forward()
111 |
112 |
113 | class Camera:
114 | CENTER = 40000
115 | UP_LIMIT = 80000
116 | DOWN_LIMIT = 30000
117 | STEP = 5000
118 |
119 | def __init__(self, servo=18, freq=50):
120 | self.servo = servo
121 | self.freq = freq
122 | self.pi = pigpio.pi()
123 |
124 | self.angle = self.CENTER
125 | self._set_angle()
126 |
127 | def _set_angle(self):
128 | self.pi.hardware_PWM(self.servo, self.freq, self.angle)
129 |
130 | def up(self):
131 | if self.angle + self.STEP < self.UP_LIMIT:
132 | self.angle += self.STEP
133 | self._set_angle()
134 |
135 | def down(self):
136 | if self.angle - self.STEP > self.DOWN_LIMIT:
137 | self.angle -= self.STEP
138 | self._set_angle()
139 |
140 |
141 | MAX_MESSAGE_SIZE = 4096
142 |
143 | if __name__ == "__main__":
144 | while True:
145 | wheels = Wheels()
146 | camera = Camera()
147 | print('awaiting connection...')
148 | connection, client_address = s.accept()
149 | print('client_address %s' % client_address)
150 | try:
151 | print('established connection with', client_address)
152 |
153 | while True:
154 | message = connection.recv(MAX_MESSAGE_SIZE)
155 | # print('message: {}'.format(message))
156 | if not message:
157 | break
158 | data = json.loads(message.decode('utf-8'))
159 |
160 | if 'commands' in data:
161 | if 'FORDWARD' in data['commands']:
162 | if 'RIGHT' in data['commands']:
163 | wheels.go_fw_right()
164 | elif 'LEFT' in data['commands']:
165 | wheels.go_fw_left()
166 | else:
167 | wheels.go_fw()
168 | elif 'BACKWARD' in data['commands']:
169 | if 'RIGHT' in data['commands']:
170 | wheels.go_bw_right()
171 | elif 'LEFT' in data['commands']:
172 | wheels.go_bw_left()
173 | else:
174 | wheels.go_bw()
175 | else:
176 | if 'RIGHT' in data['commands']:
177 | wheels.turn_right()
178 | elif 'LEFT' in data['commands']:
179 | wheels.turn_left()
180 | else:
181 | wheels.stop()
182 |
183 | if 'UP' in data['commands']:
184 | camera.up()
185 | elif 'DOWN' in data['commands']:
186 | camera.down()
187 |
188 | print('connection closed')
189 |
190 | finally:
191 | # Clean up the connection
192 | cleanup()
193 | connection.close()
194 |
--------------------------------------------------------------------------------
/controller.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=Bot controller
3 | After=uv4l_raspicam.service
4 |
5 | [Service]
6 | User=pi
7 | Group=pi
8 | WorkingDirectory=/home/pi/repo
9 | ExecStart=/usr/bin/python3 /home/pi/repo/controller.py
10 | ExecReload=/bin/kill -s HUP $MAINPID
11 | ExecStop=/bin/kill -s TERM $MAINPID
12 |
13 | [Install]
14 | WantedBy=multi-user.target
15 |
--------------------------------------------------------------------------------
/provision.sh:
--------------------------------------------------------------------------------
1 | # basic
2 | sudo apt-get update
3 | sudo apt-get install -y vim git python3-pip
4 |
5 | # uv4l installation
6 | curl http://www.linux-projects.org/listing/uv4l_repo/lpkey.asc | sudo apt-key add -
7 | echo "deb http://www.linux-projects.org/listing/uv4l_repo/raspbian/stretch stretch main" >>/etc/apt/sources.list
8 | sudo apt-get install -y uv4l uv4l-raspicam uv4l-raspicam-extras uv4l-dummy
9 |
10 | # isntall rpi.gpio
11 | sudo apt-get install rpi.gpio
12 |
13 | pip3 install -r requirements.txt
14 |
15 | mv /etc/uv4l/uv4l-raspicam.conf /etc/uv4l/uv4l-raspicam.conf.bak
16 | mv uv4l-raspicam.conf /etc/uv4l/uv4l-raspicam.conf
17 |
18 | cp controller.service /etc/systemd/system/
19 |
20 | systemctl enable controller.service
21 | systemctl start controller.service
22 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohttp==3.3.2
2 | ipdb==0.11
3 | ipython==6.4.0
4 | RPi.GPIO==0.6.3
5 |
--------------------------------------------------------------------------------
/server/detection.js:
--------------------------------------------------------------------------------
1 | const color = 'aqua';
2 | const lineWidth = 2;
3 |
4 | function toTuple({ y, x }) {
5 | return [y, x];
6 | }
7 |
8 | function drawSegment([ay, ax], [by, bx], color, scale, ctx) {
9 | ctx.beginPath();
10 | ctx.moveTo(ax * scale, ay * scale);
11 | ctx.lineTo(bx * scale, by * scale);
12 | ctx.lineWidth = lineWidth;
13 | ctx.strokeStyle = color;
14 | ctx.stroke();
15 | }
16 |
17 | /**
18 | * Draws a pose skeleton by looking up all adjacent keypoints/joints
19 | */
20 | function drawSkeleton(keypoints, minConfidence, ctx, scale = 1) {
21 | const adjacentKeyPoints = posenet.getAdjacentKeyPoints(keypoints, minConfidence);
22 |
23 | adjacentKeyPoints.forEach(keypoints => {
24 | drawSegment(toTuple(keypoints[0].position), toTuple(keypoints[1].position), color, scale, ctx);
25 | });
26 | }
27 |
28 | /**
29 | * Used by the drawHeatMapValues method to draw heatmap points on to
30 | * the canvas
31 | */
32 | function drawPoints(ctx, points, radius, color) {
33 | const data = points.buffer().values;
34 |
35 | for (let i = 0; i < data.length; i += 2) {
36 | const pointY = data[i];
37 | const pointX = data[i + 1];
38 |
39 | if (pointX !== 0 && pointY !== 0) {
40 | ctx.beginPath();
41 | ctx.arc(pointX, pointY, radius, 0, 2 * Math.PI);
42 | ctx.fillStyle = color;
43 | ctx.fill();
44 | }
45 | }
46 | }
47 |
48 | function drawPoint(ctx, y, x, r, color) {
49 | ctx.beginPath();
50 | ctx.arc(x, y, r, 0, 2 * Math.PI);
51 | ctx.fillStyle = color;
52 | ctx.fill();
53 | }
54 |
55 | /**
56 | * Draw pose keypoints onto a canvas
57 | */
58 | function drawKeypoints(keypoints, minConfidence, ctx, scale = 1) {
59 | for (let i = 0; i < keypoints.length; i++) {
60 | const keypoint = keypoints[i];
61 |
62 | if (keypoint.score < minConfidence) {
63 | continue;
64 | }
65 |
66 | const { y, x } = keypoint.position;
67 | drawPoint(ctx, y * scale, x * scale, 3, color);
68 | }
69 | }
70 |
71 | async function loadNet() {
72 | return await posenet.load(1.0);
73 | }
74 |
75 | async function detectBody(canvas, net) {
76 | if (net){
77 | var ctx = canvas.getContext('2d');
78 | var imageElement = ctx.getImageData(0, 0, canvas.width, canvas.height);
79 |
80 | var imageScaleFactor = 0.5;
81 | var flipHorizontal = false;
82 | var outputStride = 16;
83 | var maxPoseDetections = 2;
84 | var poses = await net.estimateMultiplePoses(
85 | imageElement,
86 | imageScaleFactor,
87 | flipHorizontal,
88 | outputStride,
89 | maxPoseDetections
90 | )
91 | return poses;
92 | } else {
93 | return net;
94 | }
95 |
96 | }
97 |
--------------------------------------------------------------------------------
/server/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | Vier[javiber]-bot
9 |
10 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
80 |
81 |
82 |
83 |
84 | Vier[javiber]-bot
85 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
115 |
116 |
117 |
118 |
119 |
--------------------------------------------------------------------------------
/server/main.js:
--------------------------------------------------------------------------------
1 | (function () {
2 | var signalObj = null;
3 | var counter = 0;
4 | const minPoseConfidence = 0.10; // 0.20 - 0.15
5 | const minPartConfidence = 0.15; // 0.15 - 0.10
6 | const skip_frame_detection = 3; //
7 | var last_poses = null;
8 | var toggleSkeleton = null;
9 | var net = null;
10 |
11 | window.addEventListener('DOMContentLoaded', function () {
12 | var isStreaming = false;
13 | var start = document.getElementById('start');
14 | var stop = document.getElementById('stop');
15 | var load_net = document.getElementById('load_net');
16 | var video = document.getElementById('v');
17 | var canvas = document.getElementById('c');
18 | var ctx = canvas.getContext('2d');
19 | var effect = document.getElementById('effect');
20 | var isEffectActive = false;
21 |
22 | // Start Streaming
23 | start.addEventListener('click', function (e) {
24 | var address = document.getElementById('address').value;
25 | var protocol = location.protocol === "https:" ? "wss:" : "ws:";
26 | var wsurl = protocol + '//' + address;
27 |
28 | if (!isStreaming) {
29 | signalObj = new signal(wsurl,
30 | function (stream) {
31 | console.log('got a stream!');
32 | //var url = window.URL || window.webkitURL;
33 | //video.src = url ? url.createObjectURL(stream) : stream; // deprecated
34 | video.srcObject = stream;
35 | video.play();
36 | },
37 | function (error) {
38 | alert(error);
39 | },
40 | function () {
41 | console.log('websocket closed. bye bye!');
42 | video.srcObject = null;
43 | //video.src = ''; // deprecated
44 | ctx.clearRect(0, 0, canvas.width, canvas.height);
45 | isStreaming = false;
46 | },
47 | function (message) {
48 | alert(message);
49 | }
50 | );
51 | }
52 | }, false);
53 |
54 | // Stop Streaming
55 | stop.addEventListener('click', function (e) {
56 | if (signalObj) {
57 | signalObj.hangup();
58 | signalObj = null;
59 | }
60 | }, false);
61 |
62 | // Load Net
63 | load_net.addEventListener('click', function (e) {
64 | if (!net) {
65 | console.log("Creating the Body detector");
66 | net = loadNet().then(function (result) {
67 | console.log("LoadNet:", result);
68 | net = result;
69 | load_net.disabled = true;
70 | load_net.style = "color: gainsboro; background-color: gray";
71 | });
72 | } else {
73 | alert("The Net was already loaded");
74 | }
75 | }, false);
76 |
77 | // toggleSkeleton
78 | toggleSkeletonButton.addEventListener('click', function () {
79 | toggleSkeleton = !toggleSkeleton;
80 | console.log("toggleSkeleton:", toggleSkeleton);
81 | }, false);
82 |
83 | // Wait until the video stream can play
84 | video.addEventListener('canplay', function (e) {
85 | if (!isStreaming) {
86 | canvas.setAttribute('width', video.videoWidth);
87 | canvas.setAttribute('height', video.videoHeight);
88 | isStreaming = true;
89 | }
90 | }, false);
91 |
92 |
93 | // Wait for the video to start to play
94 | video.addEventListener('play', function () {
95 |
96 | // Every 33 milliseconds copy the video image to the canvas
97 | setInterval(function () {
98 |
99 | if (video.paused || video.ended) {
100 | return;
101 | }
102 | var w = canvas.getAttribute('width');
103 | var h = canvas.getAttribute('height');
104 | ctx.fillRect(0, 0, w, h);
105 | ctx.drawImage(video, 0, 0, w, h);
106 |
107 | // run detector and draw on canvas
108 | if (isEffectActive) {
109 | if ((counter % skip_frame_detection) == 0) {
110 | counter = 0;
111 | const poses = detectBody(canvas, net).then(function (inner_poses) {
112 | console.log(inner_poses);
113 | last_poses = inner_poses;
114 | });
115 | }
116 | if (last_poses) {
117 | last_poses.forEach(({ score, keypoints }) => {
118 | if (score >= minPoseConfidence) {
119 | if (toggleSkeleton){
120 | drawSkeleton(keypoints, minPartConfidence, ctx);
121 | } else {
122 | drawKeypoints(keypoints, minPartConfidence, ctx);
123 | }
124 | } else {
125 | console.log("discarded due to low confidence")
126 | }
127 | });
128 | }
129 | }
130 | counter += 1;
131 |
132 | }, 33);
133 | }, false);
134 |
135 | // Detection
136 | effect.addEventListener('click', function () {
137 | isEffectActive = !isEffectActive;
138 | console.log("isEffectActive:", isEffectActive);
139 | }, false);
140 | });
141 | })();
142 |
--------------------------------------------------------------------------------
/server/signalling.js:
--------------------------------------------------------------------------------
1 | /*
2 | * window.mozRTCPeerConnection, window.mozRTCSessionDescription, window.mozRTCIceCandidate are now deprecated
3 | */
4 |
5 | RTCPeerConnection = window.RTCPeerConnection || /*window.mozRTCPeerConnection ||*/ window.webkitRTCPeerConnection;
6 | RTCSessionDescription = /*window.mozRTCSessionDescription ||*/ window.RTCSessionDescription;
7 | RTCIceCandidate = /*window.mozRTCIceCandidate ||*/ window.RTCIceCandidate;
8 | navigator.getUserMedia = navigator.getUserMedia || navigator.mozGetUserMedia || navigator.webkitGetUserMedia || navigator.msGetUserMedia;
9 |
10 | function signal(url, onStream, onError, onClose, onMessage) {
11 | if ("WebSocket" in window) {
12 | console.log("opening web socket: " + url);
13 | var ws = new WebSocket(url);
14 | var pc;
15 | var iceCandidates = [];
16 | var hasRemoteDesc = false;
17 | var isFirefox = typeof InstallTrigger !== 'undefined';// Firefox 1.0+
18 |
19 | function addIceCandidates() {
20 | if (hasRemoteDesc) {
21 | iceCandidates.forEach(function (candidate) {
22 | pc.addIceCandidate(candidate,
23 | function () {
24 | console.log("IceCandidate added: " + JSON.stringify(candidate));
25 | },
26 | function (error) {
27 | console.error("addIceCandidate error: " + error);
28 | }
29 | );
30 | });
31 | iceCandidates = [];
32 | }
33 | }
34 |
35 | ws.onopen = function () {
36 | /* First we create a peer connection */
37 | var config = {"iceServers": [{"urls": ["stun:stun.l.google.com:19302"]}]};
38 | var options = {optional: []};
39 | pc = new RTCPeerConnection(config, options);
40 | iceCandidates = [];
41 | hasRemoteDesc = false;
42 |
43 | pc.onicecandidate = function (event) {
44 | if (event.candidate) {
45 | var candidate = {
46 | sdpMLineIndex: event.candidate.sdpMLineIndex,
47 | sdpMid: event.candidate.sdpMid,
48 | candidate: event.candidate.candidate
49 | };
50 | var request = {
51 | what: "addIceCandidate",
52 | data: JSON.stringify(candidate)
53 | };
54 | ws.send(JSON.stringify(request));
55 | } else {
56 | console.log("end of candidates.");
57 | }
58 | };
59 |
60 | if ('ontrack' in pc) {
61 | pc.ontrack = function (event) {
62 | onStream(event.streams[0]);
63 | };
64 | } else { // onaddstream() deprecated
65 | pc.onaddstream = function (event) {
66 | onStream(event.stream);
67 | };
68 | }
69 |
70 | pc.onremovestream = function (event) {
71 | console.log("the stream has been removed: do your stuff now");
72 | };
73 |
74 | pc.ondatachannel = onDataChannel
75 |
76 | /* kindly signal the remote peer that we would like to initiate a call */
77 | var request = {
78 | what: "call",
79 | options: {
80 | // If forced, the hardware codec depends on the arch.
81 | // (e.g. it's H264 on the Raspberry Pi)
82 | // Make sure the browser supports the codec too.
83 | force_hw_vcodec: true,
84 | vformat: 30, /* 30=640x480, 30 fps */
85 | trickle_ice: true
86 | }
87 | };
88 |
89 | localConstraints = {}
90 | localConstraints['audio'] = { mediaSource: "audioCapture" };
91 | //localConstraints['audio'] = isFirefox ? { echoCancellation: true } : { optional: [{ echoCancellation: true }] };
92 | if (localConstraints.audio) {
93 | if (navigator.getUserMedia) {
94 | navigator.getUserMedia(localConstraints, function (stream) {
95 | if (stream) {
96 | pc.addStream(stream);
97 | }
98 | // localVideoElement.muted = true;
99 | //localVideoElement.src = URL.createObjectURL(stream); // deprecated
100 | // localVideoElement.srcObject = stream;
101 | // localVideoElement.play();
102 | }, function (error) {
103 | stop();
104 | alert("An error has occurred. Check media device, permissions on media and origin.");
105 | console.error(error);
106 | });
107 | } else {
108 | console.log("getUserMedia not supported");
109 | }
110 | }
111 | console.log("send message " + JSON.stringify(request));
112 | ws.send(JSON.stringify(request));
113 | };
114 |
115 | ws.onmessage = function (evt) {
116 | var msg = JSON.parse(evt.data);
117 | var what = msg.what;
118 | var data = msg.data;
119 |
120 | console.log("received message " + JSON.stringify(msg));
121 |
122 | switch (what) {
123 | case "offer":
124 | var mediaConstraints = {
125 | optional: [],
126 | mandatory: {
127 | OfferToReceiveAudio: true,
128 | OfferToReceiveVideo: true
129 | }
130 | };
131 | pc.setRemoteDescription(new RTCSessionDescription(JSON.parse(data)),
132 | function onRemoteSdpSuccess() {
133 | hasRemoteDesc = true;
134 | addIceCandidates();
135 | pc.createAnswer(function (sessionDescription) {
136 | pc.setLocalDescription(sessionDescription);
137 | var request = {
138 | what: "answer",
139 | data: JSON.stringify(sessionDescription)
140 | };
141 | ws.send(JSON.stringify(request));
142 | }, function (error) {
143 | onError("failed to create answer: " + error);
144 | }, mediaConstraints);
145 | },
146 | function onRemoteSdpError(event) {
147 | onError('failed to set the remote description: ' + event);
148 | ws.close();
149 | }
150 | );
151 |
152 | break;
153 |
154 | case "answer":
155 | break;
156 |
157 | case "message":
158 | if (onMessage) {
159 | onMessage(msg.data);
160 | }
161 | break;
162 |
163 | case "iceCandidate": // received when trickle ice is used (see the "call" request)
164 | if (!msg.data) {
165 | console.log("Ice Gathering Complete");
166 | break;
167 | }
168 | var elt = JSON.parse(msg.data);
169 | let candidate = new RTCIceCandidate({sdpMLineIndex: elt.sdpMLineIndex, candidate: elt.candidate});
170 | iceCandidates.push(candidate);
171 | addIceCandidates(); // it internally checks if the remote description has been set
172 | break;
173 |
174 | case "iceCandidates": // received when trickle ice is NOT used (see the "call" request)
175 | var candidates = JSON.parse(msg.data);
176 | for (var i = 0; candidates && i < candidates.length; i++) {
177 | var elt = candidates[i];
178 | let candidate = new RTCIceCandidate({sdpMLineIndex: elt.sdpMLineIndex, candidate: elt.candidate});
179 | iceCandidates.push(candidate);
180 | }
181 | addIceCandidates();
182 | break;
183 | }
184 | };
185 |
186 | ws.onclose = function (event) {
187 | console.log('socket closed with code: ' + event.code);
188 | if (pc) {
189 | pc.close();
190 | pc = null;
191 | ws = null;
192 | }
193 | if (onClose) {
194 | onClose();
195 | }
196 | };
197 |
198 | ws.onerror = function (event) {
199 | onError("An error has occurred on the websocket (make sure the address is correct)!");
200 | };
201 |
202 | this.hangup = function() {
203 | if (ws) {
204 | var request = {
205 | what: "hangup"
206 | };
207 | console.log("send message " + JSON.stringify(request));
208 | ws.send(JSON.stringify(request));
209 | }
210 | };
211 |
212 | } else {
213 | onError("Sorry, this browser does not support Web Sockets. Bye.");
214 | }
215 | }
216 |
217 |
218 | // controller
219 | datachannel = null;
220 |
221 | function onDataChannel(event) {
222 | console.log("onDataChannel()");
223 | datachannel = event.channel;
224 |
225 | event.channel.onopen = function () {
226 | console.log("Data Channel is open!");
227 | //document.getElementById('datachannels').disabled = false;
228 | };
229 |
230 | event.channel.onerror = function (error) {
231 | console.error("Data Channel Error:", error);
232 | };
233 |
234 | event.channel.onmessage = function (event) {
235 | console.log("Got Data Channel Message:", event.data);
236 | };
237 |
238 | event.channel.onclose = function () {
239 | datachannel = null;
240 | console.log("The Data Channel is Closed");
241 | };
242 | }
243 |
244 | function stop() {
245 | if (datachannel) {
246 | console.log("closing data channels");
247 | datachannel.close();
248 | datachannel = null;
249 | }
250 | }
251 |
252 | function send_message(msg) {
253 | datachannel.send(msg);
254 | console.log("message sent: ", msg);
255 | }
256 |
257 | function keyToCommand(keyCode) {
258 | switch (keyCode) {
259 | case 38:
260 | return "FORDWARD"
261 | case 40:
262 | return "BACKWARD"
263 | case 39:
264 | return "RIGHT"
265 | case 37:
266 | return "LEFT"
267 | case 33:
268 | return "UP"
269 | case 34:
270 | return "DOWN"
271 | default:
272 | return false
273 | }
274 | }
275 |
276 | var commands = []
277 |
278 | function sendCommands(){
279 | console.log(JSON.stringify({commands: commands}))
280 | send_message(JSON.stringify({commands: commands}))
281 | }
282 |
283 | function keydown(e) {
284 | command = keyToCommand(e.keyCode)
285 | if (! command) {
286 | return;
287 | }
288 | e.preventDefault();
289 | e.stopPropagation();
290 | e.stopImmediatePropagation();
291 | if (commands.indexOf(command) === -1){
292 | commands.push(command)
293 | }
294 | sendCommands()
295 | }
296 |
297 | function keyup(e) {
298 | command = keyToCommand(e.keyCode)
299 | if (! command) {
300 | return;
301 | }
302 | e.preventDefault();
303 | e.stopPropagation();
304 | e.stopImmediatePropagation();
305 | var index = commands.indexOf(command);
306 | if (index > -1) {
307 | commands.splice(index, 1);
308 | }
309 | sendCommands()
310 | }
311 |
312 |
313 | // listen to key events
314 | window.addEventListener('keydown', keydown, true);
315 | window.addEventListener('keyup', keyup, true);
316 |
--------------------------------------------------------------------------------
/uv4l-raspicam.conf:
--------------------------------------------------------------------------------
1 | # You can use this file to override the built-in defaults of the options in
2 | # the following modules:
3 | # - UV4L core
4 | # - raspicam driver
5 | # - Streaming Server module.
6 | #
7 | # To get a full list of the available options with their defaults and
8 | # descriptions, please refer to the 'uv4l', 'uv4l-raspicam' and 'uv4l-server'
9 | # manual pages, or alternatively type the following command:
10 | #
11 | # uv4l --driver raspicam --help --driver-help --server-help
12 | #
13 | # This file is parsed by uv4l through the 'uv4l_raspicam' system service script,
14 | # if installed. If you are not using the service facility to load the raspicam
15 | # driver, then to instruct uv4l to parse this file you must explicitly pass its
16 | # path to the '--driver-config-file' driver option and/or to the '--config-file'
17 | # options, for example (in one line):
18 | #
19 | # uv4l [...] --driver raspicam --config-file=/path/to/uv4l-raspicam.conf \
20 | # --driver-config-file=/path/to/uv4l-raspicam.conf
21 | #
22 | # NOTE: multi argument options must be specified with one argument per line,
23 | # e.g the command line option '--min-object-size 80 120', in this file becomes:
24 | # min-object-size = 80
25 | # min-object-size = 120
26 | # another example:
27 | # server-option = --webrtc-stun-urls=stun:stun.l.google.com:19302
28 | # server-option = --webrtc-stun-urls=stun1.l.google.com:19302
29 | #
30 | # NOTE: to comment out an option override put a # at the beginning of the
31 | # corresponding line. Remember that any commented out option that may appear
32 | # on a standard installation of this file is *not* necessarily specified with
33 | # its default built-in value.
34 |
35 | ##################################
36 | # uv4l core options
37 | ##################################
38 |
39 | driver = raspicam
40 | # video_nr = 0
41 | auto-video_nr = yes
42 | # verbosity = 6
43 | # log-color = false
44 | # syslog-host = localhost
45 | # syslog-port = 514
46 | # frame-timeout = 5000
47 | frame-buffers = 4
48 | # zero_copy = yes
49 | # drop-bad-frames = yes
50 | # relaxed-ownership = yes
51 | # extension-presence = no
52 |
53 | ##################################
54 | # raspicam driver options
55 | ##################################
56 |
57 | encoding = mjpeg
58 | # width = 640
59 | # height = 480
60 | framerate = 30
61 | #custom-sensor-config = 2
62 |
63 | ### dual camera options:
64 | # stereoscopic-mode = side_by_side
65 | # camera-number = 1
66 | # decimate = yes
67 | # swap-eyes = yes
68 |
69 | ### still and/or video options:
70 | # quality = 85
71 | # stills-denoise = yes
72 | video-denoise = no
73 | # raw = no
74 |
75 | ### h264 options:
76 | # profile = high
77 | # level = 4.2
78 | # bitrate = 8000000
79 | # intra-refresh-mode = dummy
80 | # intra-period = #arg
81 | # inline-headers = yes
82 | # quantisation-parameter #arg
83 |
84 | ### video overlay options:
85 | nopreview = no
86 | fullscreen = no
87 | # osd-layer = 2
88 | # opacity = 255
89 | ### preview window :
90 | preview = 480
91 | preview = 240
92 | preview = 320
93 | preview = 240
94 |
95 | ### post-processing options:
96 | # text-overlay = yes
97 | # text-filename = /usr/share/uv4l/raspicam/text.json
98 | # object-detection = yes
99 | # object-detection-mode = accurate_tracking
100 | # min-object-size = 80
101 | # min-object-size = 80
102 | # main-classifier = /usr/share/uv4l/raspicam/lbpcascade_frontalface.xml
103 | # secondary-classifier =/usr/share/uv4l/raspicam/lbpcascade_frontalface.xml
104 |
105 | ### image settings options:
106 | # sharpness = 0
107 | # contrast = 0
108 | # brightness = 50
109 | # saturation = 0
110 | # iso = 400
111 | # vstab = yes
112 | # ev = 0
113 | # exposure = auto
114 | # flicker = off
115 | # awb = auto
116 | # imgfx = none
117 | # metering = average
118 | # rotation = 0
119 | # hflip = no
120 | # vflip = no
121 | # shutter-speed = 0
122 | # drc = off
123 | # red-gain = 100
124 | # blue-gain = 100
125 | # text-annotation = HelloWorld!
126 | # text-annotation-background = yes
127 | ### ROI normalized to [0, 1]
128 | # roi = 0
129 | # roi = 0
130 | # roi = 1
131 | # roi = 1
132 | ### ISP blocks
133 | # black-level-compensation = yes
134 | # lens-shading = yes
135 | # automatic-defective-pixel-correlation = yes
136 | # white-balance-gain = yes
137 | # crosstalk = yes
138 | # gamma = yes
139 | # sharpening = yes
140 |
141 | ### TC358743 HDMI to MIPI converter options:
142 | # tc358743 = no
143 | # tc358743-i2c-dev = /dev/i2c-1
144 | # tc358743-init-command = /usr/share/uv4l/raspicam/tc358743_init.sh
145 | # tc358743-no-signal-fallthrough = no
146 | # tc358743-edid-file = #path
147 | # record = no
148 | # recording-dir = /usr/share/uv4l/recordings
149 | # recording-bitrate = 800000
150 |
151 | ### advanced options:
152 | # statistics = yes
153 | # output-buffers = 3
154 |
155 | ### License Key associated to serial number (from --serial-number):
156 | # license-key = #arg
157 |
158 |
159 | #################################
160 | # streaming server options
161 | #################################
162 |
163 | ### path to a separate config file that will be parsed by the streaming server
164 | ### module directly when it's loaded,
165 | ### in which you are allowed to specify all the streaming server options
166 | ### listed below in the short form "option=value" instead of the longer
167 | ### "--server-option = --option=value" form that you must use
168 | ### in this configuration file.
169 | #server-config-file = #path
170 |
171 | # server-option = --port=8080
172 | # server-option = --bind-host-address=localhost
173 | # server-option = --md5-passwords=no
174 | # server-option = --user-password=myp4ssw0rd
175 | # server-option = --admin-password=myp4ssw0rd
176 | ### To enable 'config' user authentication
177 | # server-option = --config-password=myp4ssw0rd
178 |
179 | ### HTTPS options:
180 |
181 | #server-option = --use-ssl=yes
182 | #server-option = --ssl-private-key-file=/home/pi/.ssl/selfsign.key
183 | #server-option = --ssl-certificate-file=/home/pi/.ssl/selfsign.crt
184 |
185 | # server-option = --use-ssl=yes
186 | # server-option = --ssl-private-key-file=#path
187 | # server-option = --ssl-certificate-file=#path
188 |
189 | ### WebRTC options:
190 | # server-option = --enable-webrtc=yes
191 | # server-option = --enable-webrtc-datachannels=yes
192 | # server-option = --webrtc-datachannel-label=uv4l
193 | # server-option = --webrtc-datachannel-socket=/tmp/uv4l.socket
194 | # server-option = --enable-webrtc-video=yes
195 | # server-option = --enable-webrtc-audio=yes
196 | # server-option = --webrtc-receive-video=yes
197 | # server-option = --webrtc-receive-datachannels=no
198 | # server-option = --webrtc-received-datachannel-socket=/tmp/uv4l.socket
199 | # server-option = --webrtc-receive-audio=yes
200 | # server-option = --webrtc-received-audio-volume=5.0
201 | # server-option = --webrtc-prerenderer-smoothing=yes
202 | # server-option = --webrtc-recdevice-index=0
203 | # server-option = --webrtc-vad=yes
204 | # server-option = --webrtc-echo-cancellation=no
205 | # server-option = --webrtc-preferred-vcodec=0
206 | # server-option = --webrtc-enable-hw-codec=yes
207 | # server-option = --webrtc-video-format=60
208 | # server-option = --webrtc-hw-vcodec-minbitrate=800
209 | # server-option = --webrtc-hw-vcodec-maxbitrate=4000
210 | # server-option = --webrtc-hw-vcodec-startbitrate=1200
211 | # server-option = --webrtc-hw-vcodec-intra-period=1800
212 | # server-option = --webrtc-suspend-below-min-bitrate=no
213 | server-option = --webrtc-max-playout-delay=34
214 | # server-option = --webrtc-cpu-overuse-detection=no
215 | # server-option = --webrtc-combined-audiovideo-bwe=no
216 | # server-option = --webrtc-stun-urls=stun:stun.l.google.com:19302
217 | # server-option = --webrtc-stun-urls # use this for no urls
218 | # server-option = --webrtc-ice-servers=[{"urls": "stun:stun1.example.net"}, {"urls": "turn:turn.example.org", "username": "user", "credential": "myPassword"}]
219 | # server-option = --webrtc-stun-server=yes
220 | # server-option = --webrtc-tcp-candidate-policy=1
221 | # server-option = --webrtc-rtcp-mux-policy=0
222 | # server-option = --webrtc-enable-dscp=no
223 | # server-option = --webrtc-ignore-loopback=yes
224 | # server-option = --webrtc-trickle-ice=yes
225 | # server-option = --webrtc-stats-dir=/usr/share/uv4l/statistics/
226 | ### video rendering window positions and sizes on the display.
227 | ### for each window, default values can be optionally overridden, but if you
228 | ### do this you must specify one line for each of the four x, y, width, height
229 | ### window properties (in that order).
230 | ### If fullscreen is set the image is stretched to the maximum available display
231 | ### resolution from the specified size.
232 | ### window 1
233 | # server-option = --webrtc-renderer-window=0
234 | # server-option = --webrtc-renderer-window=0
235 | # server-option = --webrtc-renderer-window=480
236 | # server-option = --webrtc-renderer-window=352
237 | # server-option = --webrtc-renderer-fullscreen=no
238 | # server-option = --webrtc-renderer-rotation=180
239 | # server-option = --webrtc-renderer-opacity=255
240 | ### window 2
241 | # server-option = --webrtc-renderer2-window=480
242 | # server-option = --webrtc-renderer2-window=0
243 | # server-option = --webrtc-renderer2-window=320
244 | # server-option = --webrtc-renderer2-window=240
245 | ### window 3
246 | # server-option = --webrtc-renderer3-window=0
247 | # server-option = --webrtc-renderer3-window=352
248 | # server-option = --webrtc-renderer3-window=176
249 | # server-option = --webrtc-renderer3-window=128
250 | # if enabled, this overrides the size of the rendering windows:
251 | # server-option = --webrtc-renderer-source-size=no
252 |
253 | ### XMPP options:
254 | # server-option = --xmpp-server=lambada.jitsi.net
255 | # server-option = --xmpp-port=5222
256 | # server-option = --xmpp-muc-domain=meet.jit.si
257 | # server-option = --xmpp-room=room
258 | # server-option = --xmpp-room-password=room_password
259 | # server-option = --xmpp-username=me
260 | # server-option = --xmpp-password=mypassword
261 | # server-option = --xmpp-reconnect=yes
262 | # server-option = --xmpp-bosh-enable
263 | # server-option = --xmpp-bosh-tls
264 | # server-option = --xmpp-bosh-server
265 | # server-option = --xmpp-bosh-port
266 | # server-option = --xmpp-bosh-hostname
267 | # server-option = --xmpp-bosh-path
268 | # server-option = --xmpp-bridge-host=localhost
269 | # server-option = --xmpp-bridge-port=7999
270 |
271 | ### Janus WebRTC Gateway options:
272 | # server-option = --janus-gateway-url=https://janus.conf.meetecho.com
273 | # server-option = --janus-gateway-root=/janus
274 | # server-option = --janus-room=1234
275 | # server-option = --janus-room-pin=#pin
276 | # server-option = --janus-username=test
277 | # server-option = --janus-token=#token
278 | # server-option = --janus-proxy-host=#host
279 | # server-option = --janus-proxy-port=80
280 | # server-option = --janus-proxy-username=#user
281 | # server-option = --janus-proxy-password=#password
282 | # server-option = --janus-proxy-bypass=#regex
283 | # server-option = --janus-force-hw-vcodec=no
284 | # server-option = --janus-video-format=#code
285 | # server-option = --janus-publish=yes
286 | # server-option = --janus-subscribe=no
287 | # server-option = --janus-reconnect=yes
288 |
289 | ### Fine-tuning options:
290 | # server-option = --connection-timeout=15
291 | # server-option = --enable-keepalive=yes
292 | # server-option = --max-keepalive-requests=0
293 | # server-option = --keepalive-timeout=7
294 | # server-option = --max-queued-connections=8
295 | # server-option = --max-streams=3
296 | # server-option = --max-threads=5
297 | # server-option = --thread-idle-time=10
298 | # server-option = --chuncked-transfer-encoding=yes
299 |
300 | ### Advanced options:
301 | # server-option = --frame-timeout=5000
302 | # server-option = --frame-buffers=auto
303 |
304 | ### These options are specific to the HTTP/HTTPS Server
305 | ### serving custom Web pages only:
306 | server-option = --enable-www-server=yes
307 | server-option = --www-root-path=/home/pi/repo/server/
308 | server-option = --www-index-file=index.html
309 | server-option = --www-port=80
310 | # server-option = --www-bind-host-address=#host
311 | # server-option = --www-password=#password
312 |
313 | #server-option = --www-use-ssl=yes
314 | #server-option = --www-ssl-private-key-file=/home/pi/.ssl/selfsign.key
315 | #server-option = --www-ssl-certificate-file=/home/pi/.ssl/selfsign.crt
316 |
317 | # server-option = --www-connection-timeout=15
318 | # server-option = --www-enable-keepalive=no
319 | # server-option = --www-max-keepalive-requests=0
320 | # server-option = --www-keepalive-timeout=7
321 | # server-option = --www-max-queued-connections=8
322 | # server-option = --www-max-threads=4
323 | # server-option = --www-thread-idle-time=10
324 | # server-option = --www-chuncked-transfer-encoding=no
325 | # server-option = --www-set-etag-header=yes
326 | server-option = --www-webrtc-signaling-path=/webrtc
327 |
328 | ### Other options:
329 | # server-option = --editable-config-file=#path
330 | # server-option = --enable-control-panel=yes
331 | # server-option = --enable-rest-api=yes
332 |
--------------------------------------------------------------------------------