6 | First try to establish live connection with Connect button. After successful connection is made you can
7 | disconnect and use import functionality.
8 |
9 |
10 | {% for message in messages %}
11 |
38 | * 'Calculate W/mAh/mWh/Ω' option will calculate remaining values from voltage and current
39 | since TC66C doesn't record anything else. Calculated values are best estimates,
40 | and they may not be accurate.
41 | WARNING: Incorrect period value will make bogus mAh/mWh values.
42 |
43 | {% endblock %}
44 |
--------------------------------------------------------------------------------
/webapp/templates/csv-import.html:
--------------------------------------------------------------------------------
1 | {% extends "layout.html" %}
2 | {% block content %}
3 |
6 | This functionality can import CSV files created by this application (via Export CSV feature).
7 | You can also create your own CSV files, but they are required to follow header/column naming and data format
8 | according to what Export CSV feature creates.
9 |
10 |
11 | {% for message in messages %}
12 |
54 |
80 |
81 |
82 |
Loading... Please wait...
83 |
84 |
85 |
86 | {% endblock %}
87 |
--------------------------------------------------------------------------------
/interfaces/um.py:
--------------------------------------------------------------------------------
1 | import codecs
2 | from collections import OrderedDict
3 | from time import time
4 |
5 | import bluetooth
6 | import serial
7 |
8 | from interfaces.interface import Interface, FatalErrorException
9 |
10 |
11 | class UmInterface(Interface):
12 | serial = None
13 | higher_resolution = False
14 | modes = {
15 | 0: "Unknown",
16 | 1: "QC2.0",
17 | 2: "QC3.0",
18 | 3: "APP2.4A",
19 | 4: "APP2.1A",
20 | 5: "APP1.0A",
21 | 6: "APP0.5A",
22 | 7: "DCP1.5A",
23 | 8: "SAMSUNG",
24 | 65535: "Unknown"
25 | }
26 |
27 | def __init__(self, port, timeout):
28 | self.port = port
29 | self.timeout = timeout
30 |
31 | def enable_higher_resolution(self):
32 | self.higher_resolution = True
33 |
34 | def connect(self):
35 | if self.serial is None:
36 | self.serial = serial.Serial(port=self.port, baudrate=9600, timeout=self.timeout, write_timeout=0)
37 |
38 | def read(self):
39 | self.open()
40 | self.send("f0")
41 | data = self.serial.read(130)
42 | return self.parse(data)
43 |
44 | def send(self, value):
45 | self.open()
46 | self.serial.write(bytes.fromhex(value))
47 |
48 | def parse(self, data):
49 | if len(data) < 130:
50 | return None
51 |
52 | data = codecs.encode(data, "hex").decode("utf-8")
53 |
54 | result = OrderedDict()
55 |
56 | multiplier = 10 if self.higher_resolution else 1
57 |
58 | result["timestamp"] = time()
59 | result["voltage"] = int("0x" + data[4] + data[5] + data[6] + data[7], 0) / (100 * multiplier)
60 | result["current"] = int("0x" + data[8] + data[9] + data[10] + data[11], 0) / (1000 * multiplier)
61 | result["power"] = int("0x" + data[12] + data[13] + data[14] + data[15] + data[16] +
62 | data[17] + data[18] + data[19], 0) / 1000
63 | result["temperature"] = int("0x" + data[20] + data[21] + data[22] + data[23], 0)
64 | result["data_plus"] = int("0x" + data[192] + data[193] + data[194] + data[195], 0) / 100
65 | result["data_minus"] = int("0x" + data[196] + data[197] + data[198] + data[199], 0) / 100
66 | result["mode_id"] = int("0x" + data[200] + data[201] + data[202] + data[203], 0)
67 | result["mode_name"] = None
68 | result["accumulated_current"] = int("0x" + data[204] + data[205] + data[206] + data[207] + data[208] +
69 | data[209] + data[210] + data[211], 0)
70 | result["accumulated_power"] = int("0x" + data[212] + data[213] + data[214] + data[215] + data[216] +
71 | data[217] + data[218] + data[219], 0)
72 | result["accumulated_time"] = int("0x" + data[224] + data[225] + data[226] + data[227] + data[228] +
73 | data[229] + data[230] + data[231], 0)
74 | result["resistance"] = int("0x" + data[244] + data[245] + data[246] + data[247] + data[248] +
75 | data[249] + data[250] + data[251], 0) / 10
76 |
77 | if result["mode_id"] in self.modes:
78 | result["mode_name"] = self.modes[result["mode_id"]]
79 |
80 | return result
81 |
82 | def open(self):
83 | if not self.serial.isOpen():
84 | self.serial.open()
85 |
86 | def disconnect(self):
87 | if self.serial:
88 | self.serial.close()
89 |
90 |
91 | class UmRfcommInterface(UmInterface):
92 | socket = None
93 |
94 | def __init__(self, address):
95 | super().__init__(None, None)
96 | self.address = address
97 |
98 | def connect(self):
99 | if self.socket is None:
100 | services = bluetooth.find_service(address=self.address)
101 | service = None
102 | for item in services:
103 | if item["protocol"] == "RFCOMM":
104 | service = item
105 | break
106 |
107 | if service is None:
108 | raise FatalErrorException("Bluetooth service not found, try to initiate Setup again")
109 |
110 | self.socket = bluetooth.BluetoothSocket(bluetooth.RFCOMM)
111 | self.socket.connect((service["host"], service["port"]))
112 |
113 | def read(self):
114 | self.connect()
115 | self.send("f0")
116 | buffer = bytearray()
117 | deadline = time() + 30
118 | while len(buffer) < 130 and time() < deadline:
119 | data = self.socket.recv(130)
120 | buffer.extend(data)
121 | return self.parse(buffer)
122 |
123 | def send(self, value):
124 | self.connect()
125 | self.socket.send(bytes.fromhex(value))
126 |
127 | def disconnect(self):
128 | if self.socket:
129 | self.socket.close()
130 | self.socket = None
131 |
--------------------------------------------------------------------------------
/web.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import logging
3 | from logging import StreamHandler
4 | from logging.handlers import TimedRotatingFileHandler
5 | import multiprocessing
6 | import random
7 | import string
8 | import sys
9 | from threading import Thread
10 | from time import sleep
11 | from urllib import request
12 | import webbrowser
13 |
14 | from flask import Flask
15 | import socketio
16 | from werkzeug.middleware.dispatcher import DispatcherMiddleware
17 |
18 | from utils.config import Config, static_path, get_data_path, initialize_paths_from_args
19 | from utils.storage import Storage
20 | from webapp.backend import Backend
21 | from webapp.index import Index
22 |
23 |
24 | def url_ok(url):
25 | try:
26 | request.urlopen(url=url)
27 | return True
28 | except Exception:
29 | return False
30 |
31 |
32 | def parse_cli(open_browser=True, webview=False):
33 | parser = argparse.ArgumentParser()
34 | parser.add_argument("port", nargs="?", type=int, default=5000, help="Port for web server to listen on")
35 | parser.add_argument("--listen", type=str, default="0.0.0.0",
36 | help="Listen on address of specific interface (defaults to all interfaces)")
37 | parser.add_argument("--on-receive", help="Call this program/script when new measurements are received")
38 | parser.add_argument("--on-receive-interval", type=int, default=60, help="Interval for --on-receive (in seconds)")
39 | parser.add_argument("--data-dir", type=str, help="Where to store configuration and user data files")
40 | if webview:
41 | parser.add_argument("--disable-gpu", action="store_true", default=False, help="Disable GPU rendering")
42 | else:
43 | parser.add_argument("--daemon", action="store_true", default=not open_browser, help="Do not launch web browser")
44 | parser.add_argument("--prefix", default="/", help="If you want to reverse-proxy from path, like /rd-usb")
45 |
46 | return parser.parse_args()
47 |
48 |
49 | def run(args=None, embedded=False):
50 | if not args:
51 | args = parse_cli()
52 |
53 | if not embedded:
54 | initialize_paths_from_args(args)
55 |
56 | port = args.port
57 | listen = args.listen
58 | daemon = "daemon" in args and args.daemon
59 |
60 | if "prefix" in args:
61 | prefix = args.prefix
62 | else:
63 | prefix = "/"
64 | if not prefix.startswith("/"):
65 | prefix = "/" + prefix
66 | if len(prefix) > 1 and prefix.endswith("/"):
67 | prefix = prefix[0:-1]
68 |
69 | app = Flask(__name__, static_folder=static_path)
70 | app.config["embedded"] = embedded
71 | app.config["app_prefix"] = prefix
72 | app.register_blueprint(Index().register())
73 |
74 | if prefix != "/":
75 | def fallback(env, resp):
76 | resp(b"200 OK", [(b"Content-Type", b"text/plain; charset=UTF-8")])
77 | return [b"use '%s' instead" % prefix.encode("utf-8")]
78 |
79 | app.config["APPLICATION_ROOT"] = prefix
80 | app.wsgi_app = DispatcherMiddleware(fallback, {prefix: app.wsgi_app})
81 |
82 | logger = logging.getLogger()
83 | logger.setLevel(logging.INFO)
84 | formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
85 |
86 | console = StreamHandler()
87 | console.setLevel(logging.DEBUG)
88 | console.setFormatter(formatter)
89 | logger.addHandler(console)
90 |
91 | if not app.debug:
92 | file = TimedRotatingFileHandler(get_data_path() + "/error.log", when="w0", backupCount=14)
93 | file.setLevel(logging.ERROR)
94 | file.setFormatter(formatter)
95 | logger.addHandler(file)
96 |
97 | try:
98 | config = Config()
99 | secret_key = config.read("secret_key")
100 | if not secret_key:
101 | secret_key = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(16))
102 | config.write("secret_key", secret_key)
103 | app.secret_key = secret_key
104 |
105 | Storage().init()
106 |
107 | sockets = socketio.Server(async_mode="threading", cors_allowed_origins="*")
108 | socketio_path = "socket.io"
109 | if len(prefix) > 1:
110 | socketio_path = prefix[1:] + "/" + socketio_path
111 | app.wsgi_app = socketio.Middleware(sockets, app.wsgi_app, socketio_path=socketio_path)
112 | sockets.register_namespace(Backend(args.on_receive, args.on_receive_interval))
113 |
114 | if not embedded:
115 | def open_in_browser():
116 | logging.info("Application is starting...")
117 |
118 | url = "http://127.0.0.1:%s" % port
119 | while not url_ok(url):
120 | sleep(0.5)
121 |
122 | logging.info("Application is available at " + url)
123 |
124 | if not app.debug and not daemon:
125 | webbrowser.open(url)
126 |
127 | Thread(target=open_in_browser, daemon=True).start()
128 |
129 | app.run(host=listen, port=port, threaded=True, use_reloader=False)
130 |
131 | except (KeyboardInterrupt, SystemExit):
132 | raise
133 | except:
134 | logging.exception(sys.exc_info()[0])
135 |
136 |
137 | if __name__ == "__main__":
138 | if len(sys.argv) > 1 and "fork" in sys.argv[1]:
139 | multiprocessing.freeze_support()
140 | exit(0)
141 |
142 | if sys.platform.startswith("win"):
143 | multiprocessing.freeze_support()
144 |
145 | run()
146 |
--------------------------------------------------------------------------------
/app.py:
--------------------------------------------------------------------------------
1 | from contextlib import redirect_stdout
2 | import io
3 | import multiprocessing
4 | import os
5 | import sys
6 | from threading import Thread
7 | from time import sleep
8 | from urllib import request
9 |
10 | from screeninfo import screeninfo
11 | import webview
12 |
13 | from utils.config import Config, get_data_path, get_cache_path, initialize_paths_from_args
14 | from utils.version import version
15 | from web import run, parse_cli
16 |
17 | debug = "FLASK_DEBUG" in os.environ
18 |
19 |
20 | class Webview:
21 | title = None
22 | width = None
23 | height = None
24 | x = None
25 | y = None
26 |
27 | callback = None
28 | window = None
29 | loaded = False
30 | sleep = 0.5
31 |
32 | loading_html = """
33 |
39 |
40 |
126 | {% with messages = get_flashed_messages(with_categories=true) %}
127 | {% if messages %}
128 |
129 | {% for category, message in messages %}
130 |
{{ message }}
131 | {% endfor %}
132 |
133 | {% endif %}
134 | {% endwith %}
135 |
136 | {% block content %}{% endblock %}
137 |
138 |
139 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
--------------------------------------------------------------------------------
/interfaces/tc.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 | import struct
4 | from time import time, sleep
5 |
6 | from Crypto.Cipher import AES
7 |
8 | try:
9 | from bleak import BleakClient, BleakError, BleakScanner
10 | supported = True
11 | except Exception as e:
12 | message = str(e)
13 | if "Only Windows 10 is supported" in message or "Requires at least Windows 10" in message:
14 | unsupported_reason = message
15 | supported = False
16 | else:
17 | raise
18 |
19 | import serial
20 |
21 | from interfaces.interface import Interface
22 |
23 | SERVER_RX_DATA = ["0000ffe9-0000-1000-8000-00805f9b34fb", "0000ffe2-0000-1000-8000-00805f9b34fb"]
24 | SERVER_TX_DATA = ["0000ffe4-0000-1000-8000-00805f9b34fb", "0000ffe1-0000-1000-8000-00805f9b34fb"]
25 | ASK_FOR_VALUES_COMMAND = "bgetva"
26 |
27 |
28 | class TcBleInterface(Interface):
29 | timeout = 30
30 | client = None
31 | loop = None
32 | bound = False
33 | addresses_index = 0
34 |
35 | def __init__(self, address):
36 | self.address = address
37 | self.response = Response()
38 |
39 | def scan(self):
40 | async def run():
41 | devices = await BleakScanner().discover()
42 | formatted = []
43 | for device in devices:
44 | formatted.append({
45 | "address": device.address,
46 | "name": device.name,
47 | })
48 | return formatted
49 |
50 | return self.get_loop().run_until_complete(run())
51 |
52 | def connect(self):
53 | self.get_loop().run_until_complete(self._connect_run(self.address))
54 |
55 | async def _connect_run(self, address):
56 | if not supported:
57 | raise NotSupportedException("TC66C over BLE is NOT SUPPORTED, reason: %s" % unsupported_reason)
58 | self.client = BleakClient(address, loop=self.get_loop())
59 | self.addresses_index = 0
60 | await self.client.connect()
61 |
62 | def disconnect(self):
63 | expiration = time() + self.timeout
64 | while self.loop and self.loop.is_running() and time() <= expiration:
65 | sleep(0.1)
66 |
67 | sleep(1)
68 |
69 | try:
70 | self.get_loop().run_until_complete(self._close_run())
71 | except RuntimeError as e:
72 | if "loop is already running" not in str(e):
73 | raise e
74 |
75 | self.bound = False
76 |
77 | async def _close_run(self):
78 | try:
79 | await self.client.stop_notify(SERVER_TX_DATA[self.addresses_index])
80 | except Exception:
81 | pass
82 |
83 | try:
84 | await self.client.disconnect()
85 | except Exception:
86 | pass
87 |
88 | def read(self):
89 | return self.get_loop().run_until_complete(self._read_run())
90 |
91 | async def _read_run(self):
92 | self.response.reset()
93 |
94 | for retry in range(0, 3):
95 | address = SERVER_RX_DATA[self.addresses_index]
96 | try:
97 | await self.client.write_gatt_char(address, self.encode_command(ASK_FOR_VALUES_COMMAND), True)
98 |
99 | if not self.bound:
100 | self.bound = True
101 | await self.client.start_notify(SERVER_TX_DATA[self.addresses_index], self.response.callback)
102 |
103 | except BleakError as e:
104 | message = str(e).lower()
105 | if "not found" in message and "characteristic" in message:
106 | self.addresses_index += 1
107 | if self.addresses_index >= len(SERVER_RX_DATA):
108 | raise
109 | else:
110 | raise
111 |
112 | expiration = time() + 5
113 | while not self.response.is_complete() and time() <= expiration:
114 | await asyncio.sleep(0.1)
115 |
116 | if not self.response.is_complete():
117 | continue
118 |
119 | try:
120 | return self.response.decode()
121 | except CorruptedResponseException as e:
122 | logging.exception(e)
123 | continue
124 |
125 | if not self.response.is_complete():
126 | raise NoResponseException
127 |
128 | return self.response.decode()
129 |
130 | def encode_command(self, command):
131 | string = command + "\r\n"
132 | encoded = string.encode("ascii")
133 | encoded = bytearray(encoded)
134 | return encoded
135 |
136 | def get_loop(self):
137 | if not self.loop:
138 | self.loop = asyncio.new_event_loop()
139 | return self.loop
140 |
141 |
142 | class TcSerialInterface(Interface):
143 | serial = None
144 |
145 | def __init__(self, port, timeout):
146 | self.port = port
147 | self.response = Response()
148 | self.timeout = timeout
149 |
150 | def connect(self):
151 | if self.serial is None:
152 | self.serial = serial.Serial(port=self.port, baudrate=115200, timeout=self.timeout, write_timeout=0)
153 |
154 | def read(self):
155 | self.open()
156 | self.send("getva")
157 | data = self.serial.read(192)
158 | self.response.reset()
159 | self.response.callback(None, data)
160 | return self.response.decode()
161 |
162 | def read_records(self):
163 | self.send("gtrec")
164 |
165 | results = []
166 | buffer = bytearray()
167 | while True:
168 | chunk = self.serial.read(8)
169 | if len(chunk) == 0:
170 | break
171 |
172 | buffer.extend(chunk)
173 | if len(buffer) >= 8:
174 | record = struct.unpack("<2I", buffer[0:8])
175 | buffer = buffer[8:]
176 |
177 | results.append({
178 | "voltage": float(record[0]) / 1000 / 10,
179 | "current": float(record[1]) / 1000 / 100,
180 | })
181 |
182 | return results
183 |
184 | def send(self, value):
185 | self.open()
186 | self.serial.write(value.encode("ascii"))
187 |
188 | def open(self):
189 | if not self.serial.isOpen():
190 | self.serial.open()
191 |
192 | def disconnect(self):
193 | if self.serial:
194 | self.serial.close()
195 |
196 |
197 | class Response:
198 | key = [
199 | 88, 33, -6, 86, 1, -78, -16, 38,
200 | -121, -1, 18, 4, 98, 42, 79, -80,
201 | -122, -12, 2, 96, -127, 111, -102, 11,
202 | -89, -15, 6, 97, -102, -72, 114, -120
203 | ]
204 | buffer = bytearray()
205 | index = 0
206 |
207 | def append(self, data):
208 | try:
209 | self.buffer.extend(data)
210 | self.index += len(data)
211 | except BufferError:
212 | pass
213 |
214 | def callback(self, sender, data):
215 | self.append(data)
216 |
217 | def is_complete(self):
218 | return self.index >= 192
219 |
220 | def decrypt(self):
221 | key = []
222 | for index, value in enumerate(self.key):
223 | key.append(value & 255)
224 |
225 | aes = AES.new(bytes(key), AES.MODE_ECB)
226 | try:
227 | return aes.decrypt(self.buffer)
228 | except ValueError:
229 | raise CorruptedResponseException
230 |
231 | def decode(self, data=None):
232 | if data is not None:
233 | self.append(data)
234 |
235 | data = self.decrypt()
236 |
237 | if self.decode_integer(data, 88) == 1:
238 | temperature_multiplier = -1
239 | else:
240 | temperature_multiplier = 1
241 |
242 | return {
243 | "timestamp": time(),
244 | "voltage": self.decode_integer(data, 48, 10000),
245 | "current": self.decode_integer(data, 52, 100000),
246 | "power": self.decode_integer(data, 56, 10000),
247 | "resistance": self.decode_integer(data, 68, 10),
248 | "accumulated_current": self.decode_integer(data, 72),
249 | "accumulated_power": self.decode_integer(data, 76),
250 | "accumulated_time": None,
251 | "temperature": self.decode_integer(data, 92) * temperature_multiplier,
252 | "data_plus": self.decode_integer(data, 96, 100),
253 | "data_minus": self.decode_integer(data, 100, 100),
254 | "mode_id": None,
255 | "mode_name": None
256 | }
257 |
258 | def decode_integer(self, data, first_byte, divider=1):
259 | temp4 = data[first_byte] & 255
260 | temp3 = data[first_byte + 1] & 255
261 | temp2 = data[first_byte + 2] & 255
262 | temp1 = data[first_byte + 3] & 255
263 | return ((((temp1 << 24) | (temp2 << 16)) | (temp3 << 8)) | temp4) / float(divider)
264 |
265 | def reset(self):
266 | self.buffer = bytearray()
267 | self.index = 0
268 |
269 |
270 | class NoResponseException(Exception):
271 | pass
272 |
273 |
274 | class CorruptedResponseException(Exception):
275 | pass
276 |
277 |
278 | class NotSupportedException(Exception):
279 | pass
280 |
--------------------------------------------------------------------------------
/pyinstaller/gui-only/hook-cefpython3.py:
--------------------------------------------------------------------------------
1 | """
2 | This is PyInstaller hook file for CEF Python. This file
3 | helps PyInstaller find CEF Python dependencies that are
4 | required to run final executable.
5 |
6 | See PyInstaller docs for hooks:
7 | https://pyinstaller.readthedocs.io/en/stable/hooks.html
8 | """
9 |
10 | import glob
11 | import os
12 | import platform
13 | import re
14 | import sys
15 |
16 | import PyInstaller
17 | from PyInstaller import log as logging
18 | from PyInstaller.compat import is_win, is_darwin, is_linux
19 | from PyInstaller.utils.hooks import is_module_satisfies, get_package_paths
20 | try:
21 | # PyInstaller >= 4.0 doesn't support Python 2.7
22 | from PyInstaller.compat import is_py2
23 | except ImportError:
24 | is_py2 = None
25 |
26 | # Constants
27 | CEFPYTHON_MIN_VERSION = "57.0"
28 | PYINSTALLER_MIN_VERSION = "3.2.1"
29 |
30 | # Makes assumption that using "python.exe" and not "pyinstaller.exe"
31 | # TODO: use this code to work cross-platform:
32 | # > from PyInstaller.utils.hooks import get_package_paths
33 | # > get_package_paths("cefpython3")
34 |
35 | CEFPYTHON3_DIR = get_package_paths("cefpython3")[1]
36 |
37 | CYTHON_MODULE_EXT = ".pyd" if is_win else ".so"
38 |
39 | # Globals
40 | logger = logging.getLogger(__name__)
41 |
42 |
43 | # Functions
44 | def check_platforms():
45 | if not is_win and not is_darwin and not is_linux:
46 | raise SystemExit("Error: Currently only Windows, Linux and Darwin "
47 | "platforms are supported, see Issue #135.")
48 |
49 |
50 | def check_pyinstaller_version():
51 | """Using is_module_satisfies() for pyinstaller fails when
52 | installed using 'pip install develop.zip' command
53 | (PyInstaller Issue #2802)."""
54 | # Example version string for dev version of pyinstaller:
55 | # > 3.3.dev0+g5dc9557c
56 | version = PyInstaller.__version__
57 | match = re.search(r"^\d+\.\d+(\.\d+)?", version)
58 | if not (match.group(0) >= PYINSTALLER_MIN_VERSION):
59 | raise SystemExit("Error: pyinstaller %s or higher is required"
60 | % PYINSTALLER_MIN_VERSION)
61 |
62 |
63 | def check_cefpython3_version():
64 | if not is_module_satisfies("cefpython3 >= %s" % CEFPYTHON_MIN_VERSION):
65 | raise SystemExit("Error: cefpython3 %s or higher is required"
66 | % CEFPYTHON_MIN_VERSION)
67 |
68 |
69 | def get_cefpython_modules():
70 | """Get all cefpython Cython modules in the cefpython3 package.
71 | It returns a list of names without file extension. Eg.
72 | 'cefpython_py27'. """
73 | pyds = glob.glob(os.path.join(CEFPYTHON3_DIR,
74 | "cefpython_py*" + CYTHON_MODULE_EXT))
75 | assert len(pyds) > 1, "Missing cefpython3 Cython modules"
76 | modules = []
77 | for path in pyds:
78 | filename = os.path.basename(path)
79 | mod = filename.replace(CYTHON_MODULE_EXT, "")
80 | modules.append(mod)
81 | return modules
82 |
83 |
84 | def get_excluded_cefpython_modules():
85 | """CEF Python package includes Cython modules for various Python
86 | versions. When using Python 2.7 pyinstaller should not
87 | bundle modules for eg. Python 3.6, otherwise it will
88 | cause to include Python 3 dll dependencies. Returns a list
89 | of fully qualified names eg. 'cefpython3.cefpython_py27'."""
90 | pyver = "".join(map(str, sys.version_info[:2]))
91 | pyver_string = "py%s" % pyver
92 | modules = get_cefpython_modules()
93 | excluded = []
94 | for mod in modules:
95 | if pyver_string in mod:
96 | continue
97 | excluded.append("cefpython3.%s" % mod)
98 | logger.info("Exclude cefpython3 module: %s" % excluded[-1])
99 | return excluded
100 |
101 |
102 | def get_cefpython3_datas():
103 | """Returning almost all of cefpython binaries as DATAS (see exception
104 | below), because pyinstaller does strange things and fails if these are
105 | returned as BINARIES. It first updates manifest in .dll files:
106 | >> Updating manifest in chrome_elf.dll
107 |
108 | And then because of that it fails to load the library:
109 | >> hsrc = win32api.LoadLibraryEx(filename, 0, LOAD_LIBRARY_AS_DATAFILE)
110 | >> pywintypes.error: (5, 'LoadLibraryEx', 'Access is denied.')
111 |
112 | It is not required for pyinstaller to modify in any way
113 | CEF binaries or to look for its dependencies. CEF binaries
114 | does not have any external dependencies like MSVCR or similar.
115 |
116 | The .pak .dat and .bin files cannot be marked as BINARIES
117 | as pyinstaller would fail to find binary depdendencies on
118 | these files.
119 |
120 | One exception is subprocess (subprocess.exe on Windows) executable
121 | file, which is passed to pyinstaller as BINARIES in order to collect
122 | its dependecies.
123 |
124 | DATAS are in format: tuple(full_path, dest_subdir).
125 | """
126 | ret = list()
127 |
128 | if is_win:
129 | cefdatadir = "."
130 | elif is_darwin or is_linux:
131 | cefdatadir = "."
132 | else:
133 | assert False, "Unsupported system {}".format(platform.system())
134 |
135 | # Binaries, licenses and readmes in the cefpython3/ directory
136 | for filename in os.listdir(CEFPYTHON3_DIR):
137 | # Ignore Cython modules which are already handled by
138 | # pyinstaller automatically.
139 | if filename[:-len(CYTHON_MODULE_EXT)] in get_cefpython_modules():
140 | continue
141 |
142 | # CEF binaries and datas
143 | extension = os.path.splitext(filename)[1]
144 | if extension in \
145 | [".exe", ".dll", ".pak", ".dat", ".bin", ".txt", ".so", ".plist"] \
146 | or filename.lower().startswith("license"):
147 | logger.info("Include cefpython3 data: {}".format(filename))
148 | ret.append((os.path.join(CEFPYTHON3_DIR, filename), cefdatadir))
149 |
150 | if is_darwin:
151 | # "Chromium Embedded Framework.framework/Resources" with subdirectories
152 | # is required. Contain .pak files and locales (each locale in separate
153 | # subdirectory).
154 | resources_subdir = \
155 | os.path.join("Chromium Embedded Framework.framework", "Resources")
156 | base_path = os.path.join(CEFPYTHON3_DIR, resources_subdir)
157 | assert os.path.exists(base_path), \
158 | "{} dir not found in cefpython3".format(resources_subdir)
159 | for path, dirs, files in os.walk(base_path):
160 | for file in files:
161 | absolute_file_path = os.path.join(path, file)
162 | dest_path = os.path.relpath(path, CEFPYTHON3_DIR)
163 | ret.append((absolute_file_path, dest_path))
164 | logger.info("Include cefpython3 data: {}".format(dest_path))
165 | elif is_win or is_linux:
166 | # The .pak files in cefpython3/locales/ directory
167 | locales_dir = os.path.join(CEFPYTHON3_DIR, "locales")
168 | assert os.path.exists(locales_dir), \
169 | "locales/ dir not found in cefpython3"
170 | for filename in os.listdir(locales_dir):
171 | logger.info("Include cefpython3 data: {}/{}".format(
172 | os.path.basename(locales_dir), filename))
173 | ret.append((os.path.join(locales_dir, filename),
174 | os.path.join(cefdatadir, "locales")))
175 |
176 | # Optional .so/.dll files in cefpython3/swiftshader/ directory
177 | swiftshader_dir = os.path.join(CEFPYTHON3_DIR, "swiftshader")
178 | if os.path.isdir(swiftshader_dir):
179 | for filename in os.listdir(swiftshader_dir):
180 | logger.info("Include cefpython3 data: {}/{}".format(
181 | os.path.basename(swiftshader_dir), filename))
182 | ret.append((os.path.join(swiftshader_dir, filename),
183 | os.path.join(cefdatadir, "swiftshader")))
184 | return ret
185 |
186 |
187 | # ----------------------------------------------------------------------------
188 | # Main
189 | # ----------------------------------------------------------------------------
190 |
191 | # Checks
192 | check_platforms()
193 | check_pyinstaller_version()
194 | check_cefpython3_version()
195 |
196 | # Info
197 | logger.info("CEF Python package directory: %s" % CEFPYTHON3_DIR)
198 |
199 | # Hidden imports.
200 | # PyInstaller has no way on detecting imports made by Cython
201 | # modules, so all pure Python imports made in cefpython .pyx
202 | # files need to be manually entered here.
203 | # TODO: Write a tool script that would find such imports in
204 | # .pyx files automatically.
205 | hiddenimports = [
206 | "codecs",
207 | "copy",
208 | "datetime",
209 | "inspect",
210 | "json",
211 | "os",
212 | "platform",
213 | "random",
214 | "re",
215 | "sys",
216 | "time",
217 | "traceback",
218 | "types",
219 | "urllib",
220 | "weakref",
221 | ]
222 | if is_py2:
223 | hiddenimports += [
224 | "urlparse",
225 | ]
226 |
227 | # Excluded modules
228 | excludedimports = get_excluded_cefpython_modules()
229 |
230 | # Include binaries requiring to collect its dependencies
231 | if is_darwin or is_linux:
232 | binaries = [(os.path.join(CEFPYTHON3_DIR, "subprocess"), ".")]
233 | elif is_win:
234 | binaries = [(os.path.join(CEFPYTHON3_DIR, "subprocess.exe"), ".")]
235 | else:
236 | binaries = []
237 |
238 | # Include datas
239 | datas = get_cefpython3_datas()
240 |
241 | # Notify pyinstaller.spec code that this hook was executed
242 | # and that it succeeded.
243 | os.environ["PYINSTALLER_CEFPYTHON3_HOOK_SUCCEEDED"] = "1"
244 |
--------------------------------------------------------------------------------
/utils/storage.py:
--------------------------------------------------------------------------------
1 | from contextlib import closing
2 | import logging
3 | import os
4 | import shutil
5 | import sqlite3
6 | from time import time
7 |
8 | import pendulum
9 |
10 | from utils.config import get_data_path
11 | from utils.converter import Converter
12 |
13 |
14 | class Storage:
15 | sqlite = None
16 | schema_version = 2
17 |
18 | def __init__(self):
19 | self.parameters = {
20 | "database": os.path.join(get_data_path(), "data.db"),
21 | "isolation_level": None,
22 | }
23 | self.converter = Converter()
24 |
25 | def connect(self, extra_parameters=None):
26 | parameters = self.parameters
27 | if extra_parameters:
28 | parameters.update(extra_parameters)
29 | connection = sqlite3.connect(**parameters)
30 | connection.row_factory = self.row_factory
31 | return connection
32 |
33 | def row_factory(self, cursor, row):
34 | dictionary = {}
35 | for index, column in enumerate(cursor.description):
36 | dictionary[column[0]] = row[index]
37 | return dictionary
38 |
39 | def init(self):
40 | with closing(self.connect()) as sqlite:
41 | cursor = sqlite.cursor()
42 | cursor.execute("SELECT name FROM sqlite_master WHERE type = 'table'")
43 | tables = []
44 | for row in cursor.fetchall():
45 | tables.append(row["name"])
46 |
47 | schema_version = self.schema_version
48 | if "version" not in tables:
49 | cursor.execute("CREATE TABLE version (version INTEGER)")
50 | cursor.execute("INSERT INTO version VALUES (%s)" % self.schema_version)
51 | else:
52 | schema_version = int(cursor.execute("SELECT version FROM version").fetchone()["version"])
53 |
54 | if "status" not in tables:
55 | cursor.execute("CREATE TABLE status (status TEXT)")
56 | cursor.execute("INSERT INTO status VALUES ('disconnected')")
57 |
58 | if "logs" not in tables:
59 | cursor.execute((
60 | "CREATE TABLE logs ("
61 | "id INTEGER PRIMARY KEY,"
62 | "message TEXT"
63 | ")"
64 | ))
65 |
66 | if "measurements" not in tables:
67 | cursor.execute((
68 | "CREATE TABLE measurements ("
69 | "id INTEGER PRIMARY KEY,"
70 | "name TEXT,"
71 | "timestamp INTEGER,"
72 | "voltage REAL,"
73 | "current REAL,"
74 | "power REAL,"
75 | "temperature REAL,"
76 | "data_plus REAL,"
77 | "data_minus REAL,"
78 | "mode_id INTEGER,"
79 | "mode_name TEXT,"
80 | "accumulated_current INTEGER,"
81 | "accumulated_power INTEGER,"
82 | "accumulated_time INTEGER,"
83 | "resistance REAL,"
84 | "session_id INTEGER"
85 | ")"
86 | ))
87 |
88 | if "sessions" not in tables:
89 | cursor.execute((
90 | "CREATE TABLE sessions ("
91 | "id INTEGER PRIMARY KEY,"
92 | "version TEXT,"
93 | "name TEXT,"
94 | "timestamp INTEGER"
95 | ")"
96 | ))
97 |
98 | if schema_version == 1:
99 | logging.info("migrating database to new version, this may take a while...")
100 |
101 | self.backup()
102 |
103 | cursor.execute((
104 | "ALTER TABLE measurements ADD session_id INTEGER"
105 | ))
106 |
107 | cursor.execute("DELETE FROM measurements WHERE name = '' OR name IS NULL")
108 |
109 | query = cursor.execute(
110 | "SELECT name, MIN(timestamp) AS timestamp FROM measurements WHERE session_id IS NULL GROUP BY name ORDER BY MIN(id)")
111 | rows = query.fetchall()
112 | for row in rows:
113 | session_name = row["name"]
114 | cursor.execute("INSERT INTO sessions (name, timestamp) VALUES (:name, :timestamp)", (
115 | session_name, row["timestamp"]
116 | ))
117 | session_id = cursor.lastrowid
118 | cursor.execute("UPDATE measurements SET session_id = :session_id WHERE name = :name", (
119 | session_id, session_name
120 | ))
121 |
122 | cursor.execute("UPDATE version SET version = 2")
123 |
124 | def store_measurement(self, data):
125 | with closing(self.connect()) as sqlite:
126 | self._insert_measurement(sqlite, data)
127 |
128 | def store_measurements(self, items):
129 | with closing(self.connect({"isolation_level": "DEFERRED"})) as sqlite:
130 | for data in items:
131 | self._insert_measurement(sqlite, data)
132 | sqlite.commit()
133 |
134 | def _insert_measurement(self, sqlite, data):
135 | if data is None:
136 | return
137 |
138 | columns = []
139 | placeholders = []
140 | values = []
141 | for name, value in data.items():
142 | columns.append(name)
143 | placeholders.append(":" + name)
144 | values.append(value)
145 |
146 | columns = ", ".join(columns)
147 | placeholders = ", ".join(placeholders)
148 | values = tuple(values)
149 |
150 | cursor = sqlite.cursor()
151 | cursor.execute("INSERT INTO measurements (" + columns + ") VALUES (" + placeholders + ")", values)
152 |
153 | def destroy_measurements(self, session):
154 | with closing(self.connect()) as sqlite:
155 | cursor = sqlite.cursor()
156 | cursor.execute("DELETE FROM measurements WHERE session_id = ?", (session,))
157 | cursor.execute("DELETE FROM sessions WHERE id = ?", (session,))
158 |
159 | def fetch_sessions(self):
160 | with closing(self.connect()) as sqlite:
161 | cursor = sqlite.cursor()
162 | return cursor.execute("SELECT * FROM sessions ORDER BY timestamp DESC").fetchall()
163 |
164 | def fetch_measurements_count(self, session):
165 | with closing(self.connect()) as sqlite:
166 | cursor = sqlite.cursor()
167 | cursor.execute("SELECT COUNT(id) AS count FROM measurements WHERE session_id = ?", (session,))
168 | return int(cursor.fetchone()["count"])
169 |
170 | def fetch_measurements(self, session, limit=None, offset=None, zeroed=False):
171 | with closing(self.connect()) as sqlite:
172 | cursor = sqlite.cursor()
173 | sql = "SELECT * FROM measurements WHERE session_id = ? ORDER BY timestamp ASC"
174 | if limit is None or offset is None:
175 | cursor.execute(sql, (session,))
176 | else:
177 | cursor.execute(sql + " LIMIT ?, ?", (session, offset, limit))
178 | items = cursor.fetchall()
179 |
180 | for index, item in enumerate(items):
181 | items[index] = self.converter.convert(item)
182 |
183 | if zeroed:
184 | self.fill_zeroed_accumulated_fields_for_measurements(session, items)
185 |
186 | return items
187 |
188 | def fill_zeroed_accumulated_fields_for_measurements(self, session, measurements):
189 | first_measurements = self.fetch_measurements(session, 1, 0)
190 | first_measurement = first_measurements[0] if len(first_measurements) else None
191 | for item in measurements:
192 | for name, value in list(item.items()):
193 | if name.startswith("accumulated_"):
194 | zeroed_name = "zeroed_%s" % name
195 | if name not in first_measurement or first_measurement[name] is None:
196 | item[zeroed_name] = None
197 | else:
198 | item[zeroed_name] = value - first_measurement[name]
199 |
200 | def fetch_last_measurement_by_name(self, name):
201 | with closing(self.connect()) as sqlite:
202 | cursor = sqlite.cursor()
203 | cursor.execute("SELECT * FROM measurements WHERE name = ? ORDER BY timestamp DESC LIMIT 1", (name,))
204 | return cursor.fetchone()
205 |
206 | def fetch_last_measurement(self):
207 | with closing(self.connect()) as sqlite:
208 | cursor = sqlite.cursor()
209 | cursor.execute("SELECT * FROM measurements ORDER BY timestamp DESC LIMIT 1")
210 | return cursor.fetchone()
211 |
212 | def get_selected_session(self, selected):
213 | with closing(self.connect()) as sqlite:
214 | cursor = sqlite.cursor()
215 | if selected == "":
216 | session = cursor.execute("SELECT * FROM sessions ORDER BY timestamp DESC LIMIT 1").fetchone()
217 | else:
218 | session = cursor.execute("SELECT * FROM sessions WHERE id = ?", (selected,)).fetchone()
219 |
220 | return session
221 |
222 | def log(self, message):
223 | with closing(self.connect()) as sqlite:
224 | cursor = sqlite.cursor()
225 | cursor.execute("INSERT INTO logs (message) VALUES (?)", (message,))
226 |
227 | def fetch_log(self):
228 | with closing(self.connect()) as sqlite:
229 | cursor = sqlite.cursor()
230 | cursor.execute("SELECT message FROM logs")
231 |
232 | log = ""
233 | for row in cursor.fetchall():
234 | log += row["message"]
235 |
236 | return log
237 |
238 | def clear_log(self):
239 | with closing(self.connect()) as sqlite:
240 | cursor = sqlite.cursor()
241 | cursor.execute("DELETE FROM logs WHERE id NOT IN (SELECT id FROM logs ORDER BY id DESC LIMIT 250)")
242 |
243 | def update_status(self, status):
244 | with closing(self.connect()) as sqlite:
245 | cursor = sqlite.cursor()
246 | cursor.execute("UPDATE status SET status = ?", (status,))
247 |
248 | def fetch_status(self):
249 | with closing(self.connect()) as sqlite:
250 | cursor = sqlite.cursor()
251 | cursor.execute("SELECT status FROM status")
252 | return cursor.fetchone()["status"]
253 |
254 | def create_session(self, name, version):
255 | with closing(self.connect()) as sqlite:
256 | cursor = sqlite.cursor()
257 | cursor.execute("INSERT INTO sessions (name, version, timestamp) VALUES (?, ?, ?)", (name, version, time()))
258 | return cursor.lastrowid
259 |
260 | def backup(self):
261 | path = self.parameters["database"]
262 | backup_path = "%s.backup-%s" % (path, pendulum.now().format("YYYY-MM-DD_HH-mm-ss"))
263 | if os.path.exists(path):
264 | shutil.copy(path, backup_path)
265 |
--------------------------------------------------------------------------------
/webapp/backend.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import json
3 | import logging
4 | import os
5 | import re
6 | import subprocess
7 | import sys
8 | from threading import Thread
9 | from time import time, sleep
10 | from timeit import default_timer as timer
11 | import traceback
12 |
13 | import bluetooth
14 | import pendulum
15 | from serial.tools.list_ports import comports
16 | from socketio import Namespace
17 |
18 | from interfaces.interface import FatalErrorException
19 | from interfaces.tc import TcBleInterface
20 | from interfaces.wrapper import Wrapper
21 | from utils.config import Config
22 | from utils.converter import Converter
23 | from utils.formatting import Format
24 | from utils.storage import Storage
25 |
26 |
27 | class Backend(Namespace):
28 | config = None
29 |
30 | def __init__(self, on_receive, on_receive_interval):
31 | super().__init__()
32 | self.daemon = Daemon(self, on_receive, on_receive_interval)
33 | self.handle_auto_connect()
34 |
35 | def handle_auto_connect(self):
36 | config = Config()
37 | setup = config.read("setup")
38 | auto_connect = self.daemon.parse_setup_option(setup, "auto_connect", str, "no")
39 | if auto_connect == "yes":
40 | self.on_open(None, config.data)
41 |
42 | def init(self):
43 | self.config = Config()
44 |
45 | def on_open(self, sid, data):
46 | self.init()
47 |
48 | if isinstance(data, str):
49 | data = json.loads(data)
50 |
51 | self.config.write("version", data["version"])
52 |
53 | if "port" in data:
54 | self.config.write("port", data["port"])
55 |
56 | if "rfcomm_address" in data:
57 | self.config.write("rfcomm_address", data["rfcomm_address"])
58 | else:
59 | data["rfcomm_address"] = self.config.read("rfcomm_address")
60 |
61 | if "ble_address" in data:
62 | self.config.write("ble_address", data["ble_address"])
63 | else:
64 | data["ble_address"] = self.config.read("ble_address")
65 |
66 | storage = Storage()
67 | last = storage.fetch_last_measurement_by_name(data["name"])
68 | if last:
69 | if time() - int(last["timestamp"]) > 3600:
70 | match = re.match(".+( [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2})$", data["name"])
71 | if match:
72 | data["name"] = data["name"][:-len(match.group(1))]
73 | data["name"] += " " + pendulum.now().format("YYYY-MM-DD HH:mm")
74 |
75 | if not data["name"]:
76 | data["name"] = "My measurement"
77 |
78 | self.config.write("name", data["name"])
79 |
80 | try:
81 | self.config.write("rate", float(data["rate"]))
82 | except ValueError:
83 | pass
84 |
85 | rfcomm = data["version"].startswith("UM") and not data["version"].endswith("Serial")
86 | if rfcomm and ("rfcomm_address" not in data or not data["rfcomm_address"]):
87 | self.daemon.log("Bluetooth address is missing. Select address in Setup")
88 | return
89 |
90 | tc_ble = data["version"].startswith("TC") and not data["version"].endswith("USB")
91 | if tc_ble and ("ble_address" not in data or not data["ble_address"]):
92 | self.daemon.log("BLE address is missing. Select address in Setup")
93 | return
94 |
95 | self.emit("connecting")
96 | self.daemon.start()
97 |
98 | def on_scan_rfcomm(self, sid):
99 | self.init()
100 | try:
101 | result = ["Results:"]
102 |
103 | devices = bluetooth.discover_devices(lookup_names=True)
104 | if len(devices) == 0:
105 | result.append("no device found, try again")
106 |
107 | for address, name in devices:
108 | name = "%s (%s)" % (address, name)
109 | result.append("