├── .gitignore ├── README.md ├── license.md ├── pyproject.toml ├── src └── frame_sdk │ ├── __init__.py │ ├── bluetooth.py │ ├── camera.py │ ├── display.py │ ├── files.py │ ├── frame.py │ ├── library_functions.py │ ├── microphone.py │ └── motion.py └── tests ├── test_bluetooth.py ├── test_camera.py ├── test_display.py ├── test_files.py ├── test_frame.py ├── test_microphone.py └── test_motion.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 110 | .pdm.toml 111 | .pdm-python 112 | .pdm-build/ 113 | 114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 115 | __pypackages__/ 116 | 117 | # Celery stuff 118 | celerybeat-schedule 119 | celerybeat.pid 120 | 121 | # SageMath parsed files 122 | *.sage.py 123 | 124 | # Environments 125 | .env 126 | .venv 127 | env/ 128 | venv/ 129 | ENV/ 130 | env.bak/ 131 | venv.bak/ 132 | 133 | # Spyder project settings 134 | .spyderproject 135 | .spyproject 136 | 137 | # Rope project settings 138 | .ropeproject 139 | 140 | # mkdocs documentation 141 | /site 142 | 143 | # mypy 144 | .mypy_cache/ 145 | .dmypy.json 146 | dmypy.json 147 | 148 | # Pyre type checker 149 | .pyre/ 150 | 151 | # pytype static type analyzer 152 | .pytype/ 153 | 154 | # Cython debug symbols 155 | cython_debug/ 156 | 157 | # PyCharm 158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 160 | # and can be added to the global gitignore or merged into this file. For a more nuclear 161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 162 | #.idea/ 163 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ### Deprecated (see [Frame SDK docs](https://docs.brilliant.xyz/frame/frame-sdk/) for details) 2 | 3 | # Frame SDK for Python 4 | The Python SDK for the Frame AI glasses from [Brilliant Labs](https://github.com/brilliantlabsAR). View on [PyPI](https://pypi.org/project/frame-sdk/). 5 | 6 | ## Install 7 | 8 | ```sh 9 | pip3 install frame-sdk 10 | ``` 11 | 12 | ## Documentation 13 | 14 | Check out [the docs](https://docs.brilliant.xyz/frame/building-apps/) for complete guidance on everything you can do with the Frame. 15 | 16 | ## Relationship to `frame-utilities-for-python` 17 | 18 | The [`frame-utilities-for-python`](https://github.com/brilliantlabsAR/frame-utilities-for-python) package is for low-level communication with both Frame and Monocle devices and is a thin wrapper around the bluetooth connection, plus some internal tools that are used in the firmware preparation process. This `frame-sdk` package is a higher-level SDK that provides a more convenient way for developers to build apps for Frame. 19 | 20 | It is recommended that you use this package for new projects, unless you have a specific need to use the lower-level `frame-utilities-for-python` package. 21 | 22 | ## Examples 23 | 24 | Here's a simple example of how to use the Frame SDK to display text, take a photo, and more. 25 | 26 | ```python 27 | import asyncio 28 | from frame_sdk import Frame 29 | from frame_sdk.display import Alignment, PaletteColors 30 | from frame_sdk.camera import Quality, AutofocusType 31 | import datetime 32 | 33 | async def main(): 34 | # allow the user to pair with a specific Frame device 35 | pairing_code = input("Enter pairing code displayed on Frame (empty for any): ") 36 | 37 | # the with statement handles the connection and disconnection to Frame 38 | async with Frame(address=pairing_code) as f: 39 | # you can access the lower-level bluetooth connection via f.bluetooth, although you shouldn't need to do this often 40 | print(f"Connected: {f.bluetooth.is_connected()}") 41 | 42 | # let's get the current battery level 43 | print(f"Frame battery: {await f.get_battery_level()}%") 44 | 45 | # let's write (or overwrite) the file greeting.txt with "Hello world". 46 | # You can provide a bytes object or convert a string with .encode() 47 | await f.files.write_file("greeting.txt", b"Hello world") 48 | 49 | # And now we read that file back. 50 | # Note that we should convert the bytearray to a string via the .decode() method. 51 | print((await f.files.read_file("greeting.txt")).decode()) 52 | 53 | # run_lua will automatically handle scripts that are too long for the MTU, so you don't need to worry about it. 54 | # It will also automatically handle responses that are too long for the MTU automatically. 55 | await f.run_lua("frame.display.text('Hello world', 50, 100);frame.display.show()") 56 | 57 | # evaluate is equivalent to f.run_lua("print(\"1+2\"), await_print=True) 58 | # It will also automatically handle responses that are too long for the MTU automatically. 59 | print(await f.evaluate("1+2")) 60 | 61 | print("Tap the Frame to continue...") 62 | await f.display.show_text("Tap the Frame to take a photo", align=Alignment.MIDDLE_CENTER) 63 | await f.motion.wait_for_tap() 64 | 65 | # take a photo and save to disk 66 | await f.display.show_text("Taking photo...", align=Alignment.MIDDLE_CENTER) 67 | await f.camera.save_photo("frame-test-photo.jpg") 68 | await f.display.show_text("Photo saved!", align=Alignment.MIDDLE_CENTER, color=PaletteColors.GREEN) 69 | # or with more control 70 | await f.camera.save_photo("frame-test-photo-2.jpg", autofocus_seconds=3, quality=Quality.HIGH, autofocus_type=AutofocusType.CENTER_WEIGHTED, resolution=720, pan=-100) 71 | # or get the raw bytes 72 | photo_bytes = await f.camera.take_photo(autofocus_seconds=1) 73 | 74 | print("About to record until you stop talking") 75 | await f.display.show_text("Say something...", align=Alignment.MIDDLE_CENTER) 76 | # record audio to a file 77 | length = await f.microphone.save_audio_file("test-audio.wav") 78 | print(f"Recorded {length:01.1f} seconds: \"./test-audio.wav\"") 79 | await f.display.show_text(f"Recorded {length:01.1f} seconds", align=Alignment.MIDDLE_CENTER) 80 | await asyncio.sleep(3) 81 | 82 | # or get the audio directly in memory 83 | await f.display.show_text("Say something else...", align=Alignment.MIDDLE_CENTER) 84 | audio_data = await f.microphone.record_audio(max_length_in_seconds=10) 85 | await f.display.show_text(f"Playing back {len(audio_data) / f.microphone.sample_rate:01.1f} seconds of audio", align=Alignment.MIDDLE_CENTER) 86 | # you can play back the audio on your computer 87 | f.microphone.play_audio_background(audio_data) 88 | # or process it using other audio handling libraries, upload to a speech-to-text service, etc. 89 | 90 | print("Move around to track intensity of your motion") 91 | await f.display.show_text("Move around to track intensity of your motion", align=Alignment.MIDDLE_CENTER) 92 | intensity_of_motion = 0 93 | prev_direction = await f.motion.get_direction() 94 | for _ in range(10): 95 | await asyncio.sleep(0.1) 96 | direction = await f.motion.get_direction() 97 | intensity_of_motion = max(intensity_of_motion, (direction-prev_direction).amplitude()) 98 | prev_direction = direction 99 | print(f"Intensity of motion: {intensity_of_motion:01.2f}") 100 | await f.display.show_text(f"Intensity of motion: {intensity_of_motion:01.2f}", align=Alignment.MIDDLE_CENTER) 101 | print("Tap the Frame to continue...") 102 | await f.motion.wait_for_tap() 103 | 104 | # Show the full palette 105 | width = 640 // 4 106 | height = 400 // 4 107 | for color in range(0, 16): 108 | tile_x = (color % 4) 109 | tile_y = (color // 4) 110 | await f.display.draw_rect(tile_x*width+1, tile_y*height+1, width, height, PaletteColors(color)) 111 | await f.display.write_text(f"{color}", tile_x*width+width//2+1, tile_y*height+height//2+1) 112 | await f.display.show() 113 | 114 | print("Tap the Frame to continue...") 115 | await f.motion.wait_for_tap() 116 | 117 | # scroll some long text 118 | await f.display.scroll_text("Never gonna give you up\nNever gonna let you down\nNever gonna run around and desert you\nNever gonna make you cry\nNever gonna say goodbye\nNever gonna tell a lie and hurt you") 119 | 120 | # display battery indicator and time as a home screen 121 | batteryPercent = await f.get_battery_level() 122 | # select a battery fill color from the default palette based on level 123 | color = PaletteColors.RED if batteryPercent < 20 else PaletteColors.YELLOW if batteryPercent < 50 else PaletteColors.GREEN 124 | # specify the size of the battery indicator in the top-right 125 | batteryWidth = 150 126 | batteryHeight = 75 127 | # draw the endcap of the battery 128 | await f.display.draw_rect(640-32,40 + batteryHeight//2-8, 32, 16, PaletteColors.WHITE) 129 | # draw the battery outline 130 | await f.display.draw_rect_filled(640-16-batteryWidth, 40-8, batteryWidth+16, batteryHeight+16, 1, PaletteColors.WHITE, PaletteColors.YELLOW) 131 | # fill the battery based on level 132 | await f.display.draw_rect(640-8-batteryWidth, 40, int(batteryWidth * 0.01 * batteryPercent), batteryHeight, color) 133 | # write the battery level 134 | await f.display.write_text(f"{batteryPercent}%", 640-8-batteryWidth, 40, batteryWidth, batteryHeight, Alignment.MIDDLE_CENTER) 135 | # write the time and date in the center of the screen 136 | await f.display.write_text(datetime.datetime.now().strftime("%#I:%M %p\n%a, %B %d, %Y").lstrip("0"), align=Alignment.MIDDLE_CENTER) 137 | # now show what we've been drawing to the buffer 138 | await f.display.show() 139 | 140 | # set a wake screen via script, so when you tap to wake the frame, it shows the battery and time 141 | await f.run_on_wake("""frame.display.text('Battery: ' .. frame.battery_level() .. '%', 10, 10); 142 | if frame.time.utc() > 10000 then 143 | local time_now = frame.time.date(); 144 | frame.display.text(time_now['hour'] .. ':' .. time_now['minute'], 300, 160); 145 | frame.display.text(time_now['month'] .. '/' .. time_now['day'] .. '/' .. time_now['year'], 300, 220) 146 | end; 147 | frame.display.show(); 148 | frame.sleep(10); 149 | frame.display.text(' ',1,1); 150 | frame.display.show(); 151 | frame.sleep()""") 152 | 153 | # tell frame to sleep after 10 seconds then clear the screen and go to sleep, without blocking for that 154 | await f.run_lua("frame.sleep(10);frame.display.text(' ',1,1);frame.display.show();frame.sleep()") 155 | 156 | # clean disconnection so next connect() succeeds 157 | await f.bluetooth.disconnect() 158 | 159 | 160 | print("disconnected") 161 | 162 | 163 | 164 | asyncio.run(main()) 165 | 166 | ``` 167 | 168 | ## Tests 169 | 170 | To run the unit tests, ensure you have pytest installed: 171 | 172 | ```sh 173 | pip3 install pytest 174 | ``` 175 | 176 | With a Frame device in range, run: 177 | 178 | ```sh 179 | python3 -m pytest tests/* 180 | ``` 181 | 182 | Note that one of the audio playback tests fails on Windows. 183 | -------------------------------------------------------------------------------- /license.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Roger 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "frame-sdk" 7 | version = "1.2.4" 8 | authors = [{ name = "Roger Pincombe", email = "pip@betechie.com" },{ name = "Brilliant Labs", email = "info@brilliant.xyz" }] 9 | description = "Python Developer SDK for Brilliant Frame glasses" 10 | readme = "README.md" 11 | requires-python = ">=3.7" 12 | license = { file = "license.md" } 13 | classifiers = [ 14 | "Programming Language :: Python :: 3", 15 | "Operating System :: OS Independent", 16 | ] 17 | dependencies = ["bleak", "exif", "numpy", "simpleaudio"] 18 | 19 | [project.urls] 20 | "Homepage" = "https://github.com/brilliantlabsAR/frame-sdk-python" 21 | "Bug Tracker" = "https://github.com/brilliantlabsAR/frame-sdk-python/issues" 22 | -------------------------------------------------------------------------------- /src/frame_sdk/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["bluetooth", "files", "frame", "display", "camera"] 2 | 3 | from .bluetooth import Bluetooth 4 | from .files import Files 5 | from .frame import Frame 6 | from .display import Display 7 | from .camera import Camera 8 | -------------------------------------------------------------------------------- /src/frame_sdk/bluetooth.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from typing import Optional, Callable, List, Tuple, Dict, Any 3 | from enum import Enum 4 | from bleak import BleakClient, BleakScanner, BleakError 5 | 6 | _FRAME_DATA_PREFIX = 1 7 | 8 | class FrameDataTypePrefixes(Enum): 9 | LONG_DATA = 0x01 10 | LONG_DATA_END = 0x02 11 | WAKE = 0x03 12 | TAP = 0x04 13 | MIC_DATA = 0x05 14 | DEBUG_PRINT = 0x06 15 | LONG_TEXT = 0x0A 16 | LONG_TEXT_END = 0x0B 17 | 18 | @property 19 | def value_as_hex(self): 20 | return f'{self.value:02x}' 21 | 22 | 23 | class Bluetooth: 24 | """ 25 | Frame bluetooth class for managing a connection and transferring data to and 26 | from the device. 27 | """ 28 | 29 | _SERVICE_UUID: str = "7a230001-5475-a6a4-654c-8431f6ad49c4" 30 | _TX_CHARACTERISTIC_UUID: str = "7a230002-5475-a6a4-654c-8431f6ad49c4" 31 | _RX_CHARACTERISTIC_UUID: str = "7a230003-5475-a6a4-654c-8431f6ad49c4" 32 | 33 | def __init__(self): 34 | self._btle_client: Optional[BleakClient] = None 35 | self._tx_characteristic: Optional[Any] = None 36 | self._user_disconnect_handler: Callable[[], None] = lambda: None 37 | 38 | self._max_receive_buffer: int = 10 * 1024 * 1024 39 | self._print_debugging: bool = False 40 | self._default_timeout: float = 10.0 41 | 42 | self._last_print_response: str = "" 43 | self._ongoing_print_response: Optional[bytearray] = None 44 | self._ongoing_print_response_chunk_count: Optional[int] = None 45 | self._print_response_event: asyncio.Event = asyncio.Event() 46 | self._user_print_response_handler: Callable[[str], None] = lambda _: None 47 | 48 | self._last_data_response: bytes = bytes() 49 | self._ongoing_data_response: Optional[bytearray] = None 50 | self._ongoing_data_response_chunk_count: Optional[int] = None 51 | self._data_response_event: asyncio.Event = asyncio.Event() 52 | self._user_data_response_handlers: Dict[FrameDataTypePrefixes, Callable[[bytes], None]] = {} 53 | 54 | 55 | def _disconnect_handler(self, _: Any) -> None: 56 | """Called internally when the bluetooth connection is lost. To add your own handler, supply a `disconnect_handler` when connecting. 57 | """ 58 | self._user_disconnect_handler() 59 | self.__init__() 60 | 61 | 62 | async def _notification_handler(self, _: Any, data: bytearray) -> None: 63 | """Called internally when a notification is received from the device. To add your own handlers, call `register_data_response_handler()` and/or `register_print_response_handler()` when connecting. 64 | 65 | Args: 66 | data (bytearray): The data received from the device as raw bytes 67 | """ 68 | if data[0] == FrameDataTypePrefixes.LONG_TEXT.value: 69 | # start of long printed data from prntLng() function 70 | if self._ongoing_print_response is None or self._ongoing_print_response_chunk_count is None: 71 | self._ongoing_print_response = bytearray() 72 | self._ongoing_print_response_chunk_count = 0 73 | if self._print_debugging: 74 | print("Starting receiving new long printed string") 75 | self._ongoing_print_response += data[1:] 76 | self._ongoing_print_response_chunk_count += 1 77 | if self._print_debugging: 78 | print(f"Received chunk #{self._ongoing_print_response_chunk_count}: "+data[1:].decode()) 79 | if len(self._ongoing_print_response) > self._max_receive_buffer: 80 | raise Exception(f"Buffered received long printed string is more than {self._max_receive_buffer} bytes") 81 | 82 | elif data[0] == FrameDataTypePrefixes.LONG_TEXT_END.value: 83 | # end of long printed data from prntLng() function 84 | total_expected_chunk_count_as_string: str = data[1:].decode() 85 | if len(total_expected_chunk_count_as_string) > 0: 86 | total_expected_chunk_count: int = int(total_expected_chunk_count_as_string) 87 | if self._print_debugging: 88 | print(f"Received final string chunk count: {total_expected_chunk_count}") 89 | if self._ongoing_print_response_chunk_count != total_expected_chunk_count: 90 | raise Exception(f"Chunk count mismatch in long received string (expected {total_expected_chunk_count}, got {self._ongoing_print_response_chunk_count})") 91 | self._last_print_response = self._ongoing_print_response.decode() 92 | self._print_response_event.set() 93 | self._ongoing_print_response = None 94 | self._ongoing_print_response_chunk_count = None 95 | if self._print_debugging: 96 | print("Finished receiving long printed string: "+self._last_print_response) 97 | self._user_print_response_handler(self._last_print_response) 98 | 99 | elif data[0] == _FRAME_DATA_PREFIX and data[1] == FrameDataTypePrefixes.LONG_DATA.value: 100 | # start of long raw data from frame.bluetooth.send("\001"..data) 101 | if self._ongoing_data_response is None or self._ongoing_data_response_chunk_count is None: 102 | self._ongoing_data_response = bytearray() 103 | self._ongoing_data_response_chunk_count = 0 104 | self._last_data_response = None 105 | if self._print_debugging: 106 | print("Starting receiving new long raw data") 107 | self._ongoing_data_response += data[2:] 108 | self._ongoing_data_response_chunk_count += 1 109 | if self._print_debugging: 110 | print(f"Received data chunk #{self._ongoing_data_response_chunk_count}: {len(data[2:])} bytes") 111 | if len(self._ongoing_data_response) > self._max_receive_buffer: 112 | raise Exception(f"Buffered received long raw data is more than {self._max_receive_buffer} bytes") 113 | 114 | elif data[0] == _FRAME_DATA_PREFIX and data[1] == FrameDataTypePrefixes.LONG_DATA_END.value: 115 | # end of long raw data from frame.bluetooth.send("\002"..chunkCount) 116 | total_expected_chunk_count_as_string: str = data[2:].decode() 117 | if len(total_expected_chunk_count_as_string) > 0: 118 | total_expected_chunk_count: int = int(total_expected_chunk_count_as_string) 119 | if self._print_debugging: 120 | print(f"Received final data chunk count: {total_expected_chunk_count}") 121 | if self._ongoing_data_response_chunk_count != total_expected_chunk_count: 122 | raise Exception(f"Chunk count mismatch in long received data (expected {total_expected_chunk_count}, got {self._ongoing_data_response_chunk_count})") 123 | self._last_data_response = bytes(self._ongoing_data_response) 124 | self._data_response_event.set() 125 | self._ongoing_data_response = None 126 | self._ongoing_data_response_chunk_count = None 127 | if self._print_debugging: 128 | if self._last_data_response is None: 129 | print("Finished receiving long raw data: No data") 130 | else: 131 | print(f"Finished receiving long raw data: {len(self._last_data_response)} bytes") 132 | self.call_data_response_handlers(self._last_data_response) 133 | 134 | elif data[0] == _FRAME_DATA_PREFIX: 135 | # received single chunk raw data from frame.bluetooth.send(data) 136 | if self._print_debugging: 137 | print(f"Received data: {len(data[1:])} bytes") 138 | self._last_data_response = data[1:] 139 | self._data_response_event.set() 140 | self.call_data_response_handlers(data[1:]) 141 | 142 | else: 143 | # received single chunk printed text from print() 144 | self._last_print_response = data.decode() 145 | if self._print_debugging: 146 | print(f"Received printed string: {self._last_print_response}") 147 | self._print_response_event.set() 148 | self._user_print_response_handler(data.decode()) 149 | 150 | def register_data_response_handler(self, prefix: FrameDataTypePrefixes = None, handler: Callable[[bytes], None] = None) -> None: 151 | """Registers a data response handler which will be called when data is received from the device that starts with the specified prefix.""" 152 | if handler is None: 153 | self._user_data_response_handlers.pop(prefix, None) 154 | else: 155 | if handler.__code__.co_argcount == 0: 156 | self._user_data_response_handlers[prefix] = lambda _: handler() 157 | else: 158 | self._user_data_response_handlers[prefix] = handler 159 | 160 | def call_data_response_handlers(self, data: bytes) -> None: 161 | """Calls all data response handlers which match the received data.""" 162 | for prefix, handler in self._user_data_response_handlers.items(): 163 | if prefix is None or (len(data) > 0 and data[0] == prefix.value): 164 | if handler is not None: 165 | handler(data[1:]) 166 | 167 | @property 168 | def print_response_handler(self) -> Callable[[str], None]: 169 | """Gets the print response handler which would be called when a print response is received.""" 170 | return self._user_print_response_handler 171 | 172 | @print_response_handler.setter 173 | def print_response_handler(self, handler: Callable[[str], None]) -> None: 174 | """Sets the print response handler which will be called when a print response is received. This is an alternative to using `wait_for_print()`, to support asynchronous print handling. 175 | 176 | Args: 177 | handler (Callable[[str], None]): The handler function to be called when a print response is received. 178 | """ 179 | if handler is None: 180 | self._user_print_response_handler = lambda _: None 181 | else: 182 | self._user_print_response_handler = handler 183 | 184 | async def connect( 185 | self, 186 | address: Optional[str] = None, 187 | print_debugging: bool = False, 188 | default_timeout: float = 10.0, 189 | ) -> str: 190 | """ 191 | Connects to the nearest Frame device. 192 | `address` can optionally be provided either as the 2 digit ID shown on 193 | Frame, or the device's full address (note that on MacOS, this is a 194 | system generated UUID not the devices real MAC address) in order to only 195 | connect to that specific device. The value should be a string, for 196 | example `"4F"` or `"78D97B6B-244B-AC86-047F-BBF72ADEB1F5"` 197 | `print_debugging` will output the raw bytes that are sent and received from Frame if set to True. 198 | `default_timeout` is the default timeout for waiting for a response from Frame, in seconds. Defaults to 10 seconds. 199 | 200 | returns the device address as a string. On MacOS, this is a unique UUID 201 | generated for that specific device. It can be used in the `address` 202 | parameter to only reconnect to that specific device. 203 | """ 204 | self._print_debugging = print_debugging 205 | self._default_timeout = default_timeout 206 | 207 | # returns list of (BLEDevice, AdvertisementData) 208 | devices: Dict[str, Tuple[Any, Any]] = await BleakScanner.discover(3, return_adv=True) 209 | 210 | filtered_list: List[Tuple[Any, Any]] = [] 211 | for d in devices.values(): 212 | if self._SERVICE_UUID in d[1].service_uuids: 213 | if address is None: 214 | filtered_list.append(d) 215 | 216 | # Filter by last two digits in the device name 217 | elif len(address) == 2 and isinstance(address, str): 218 | if d[0].name.lower()[-2:] == address.lower(): 219 | filtered_list.append(d) 220 | 221 | # Filter by full device address 222 | elif isinstance(address, str): 223 | if d[0].address.lower() == address.lower(): 224 | filtered_list.append(d) 225 | 226 | else: 227 | raise Exception("address should be a 2 digit hex string") 228 | 229 | # connect to closest device 230 | filtered_list.sort(key=lambda x: x[1].rssi, reverse=True) 231 | try: 232 | device: Any = filtered_list[0][0] 233 | 234 | except IndexError: 235 | if address is None: 236 | raise Exception("No Frame devices found") 237 | else: 238 | raise Exception("No Frame devices found matching address "+address) 239 | 240 | self._btle_client = BleakClient( 241 | device, 242 | disconnected_callback=self._disconnect_handler, 243 | ) 244 | 245 | try: 246 | await self._btle_client.connect() 247 | # Workaround to acquire MTU size because Bleak doesn't do it automatically when using BlueZ backend 248 | if self._btle_client._backend.__class__.__name__ == "BleakClientBlueZDBus": 249 | await self._btle_client._backend._acquire_mtu() 250 | 251 | await self._btle_client.start_notify( 252 | self._RX_CHARACTERISTIC_UUID, 253 | self._notification_handler, 254 | ) 255 | except BleakError as e: 256 | raise Exception("Device needs to be re-paired: "+str(e)) 257 | 258 | service: Any = self._btle_client.services.get_service( 259 | self._SERVICE_UUID, 260 | ) 261 | 262 | self._tx_characteristic = service.get_characteristic( 263 | self._TX_CHARACTERISTIC_UUID, 264 | ) 265 | 266 | return device.address 267 | 268 | client_name = self._btle_client._backend.__class__.__name__ 269 | if client_name == "BleakClientBlueZDBus": 270 | await self._btle_client._backend._acquire_mtu() 271 | 272 | async def disconnect(self) -> None: 273 | """ 274 | Disconnects from the device. 275 | """ 276 | await self._btle_client.disconnect() 277 | self._disconnect_handler(None) 278 | 279 | def is_connected(self) -> bool: 280 | """ 281 | Returns `True` if the device is connected. `False` otherwise. 282 | """ 283 | try: 284 | return self._btle_client.is_connected 285 | except AttributeError: 286 | return False 287 | 288 | def max_lua_payload(self) -> int: 289 | """ 290 | Returns the maximum length of a Lua string which may be transmitted. This is equal to the MTU - 3. 291 | """ 292 | try: 293 | return self._btle_client.mtu_size - 3 294 | except AttributeError: 295 | return 0 296 | 297 | def max_data_payload(self) -> int: 298 | """ 299 | Returns the maximum length of a raw bytearray which may be transmitted. This is equal to the MTU - 4 (since data is prefixed with a 1 byte header). 300 | """ 301 | try: 302 | return self._btle_client.mtu_size - 4 303 | except AttributeError: 304 | return 0 305 | 306 | @property 307 | def default_timeout(self) -> float: 308 | """ 309 | Gets the default timeout value in seconds 310 | """ 311 | return self._default_timeout 312 | 313 | @default_timeout.setter 314 | def default_timeout(self, value: float) -> None: 315 | """ 316 | Sets the default timeout value in seconds. When waiting for print or data without specifying a timeout, this value will be used as the default timeout. 317 | 318 | Args: 319 | value (float): The timeout value in seconds. Must be non-negative. 320 | """ 321 | if value < 0: 322 | raise ValueError("default_timeout must be a non-negative float") 323 | self._default_timeout = value 324 | 325 | @property 326 | def print_debugging(self) -> bool: 327 | """ 328 | Gets whether to print debugging information when sending and receiving data. 329 | """ 330 | return self._print_debugging 331 | 332 | @print_debugging.setter 333 | def print_debugging(self, value: bool) -> None: 334 | """ 335 | Sets whether to print debugging information when sending and receiving data. 336 | 337 | Args: 338 | value (bool): Whether to print debugging information. 339 | """ 340 | self._print_debugging = value 341 | 342 | async def _transmit(self, data: bytearray) -> None: 343 | """Internal function for sending raw data to the device. Instead of using this, use `send_lua()` or `send_data()` 344 | 345 | Args: 346 | data (bytearray): The data to send to the device as raw bytes 347 | 348 | Raises: 349 | Exception: If the payload length is too large 350 | """ 351 | if self._print_debugging: 352 | print(data) # TODO make this print nicer 353 | 354 | if len(data) > self._btle_client.mtu_size - 3: 355 | raise Exception(f"Payload length is too large: {len(data)} > {self._btle_client.mtu_size - 3}") 356 | 357 | await self._btle_client.write_gatt_char(self._tx_characteristic, data) 358 | 359 | async def send_lua(self, string: str, await_print: bool = False, timeout: Optional[float] = None) -> Optional[str]: 360 | """ 361 | Sends a Lua string to the device. The string length must be less than or 362 | equal to `max_lua_payload()`. 363 | 364 | In general, you'd be better off using Frame.run_lua(), which handles sending an receiving values longer that the MTU limit. This is the lower-level function to send an individual Lua string. 365 | 366 | If `await_print=True`, the function will block until a Lua print() 367 | occurs, or a timeout. 368 | 369 | Args: 370 | string (str): The Lua string to send. 371 | await_print (bool): Whether to block while waiting for a print response. 372 | timeout (Optional[float]): The timeout for waiting for a print response. If not provided, the default timeout will be used. 373 | 374 | Returns: 375 | Optional[str]: The print response if `await_print` is True, otherwise None. 376 | """ 377 | if await_print: 378 | self._print_response_event.clear() 379 | 380 | await self._transmit(string.encode()) 381 | 382 | if await_print: 383 | return await self.wait_for_print(timeout) 384 | 385 | async def wait_for_print(self, timeout: Optional[float] = None) -> str: 386 | """ 387 | Waits until a Lua print() occurs, with a max timeout in seconds. If `timeout` is not provided, the default timeout will be used, rather than no timeout at all. 388 | 389 | Args: 390 | timeout (Optional[float]): The timeout for waiting for a print response. If not provided, the default timeout will be used. 391 | 392 | Returns: 393 | str: The last print response received. 394 | """ 395 | if timeout is None: 396 | timeout = self._default_timeout 397 | 398 | try: 399 | await asyncio.wait_for(self._print_response_event.wait(), timeout) 400 | except asyncio.TimeoutError: 401 | raise Exception(f"Frame didn't respond with printed data (from print() or prntLng()) within {timeout} seconds") 402 | 403 | self._print_response_event.clear() 404 | 405 | return self._last_print_response 406 | 407 | async def wait_for_data(self, timeout: Optional[float] = None) -> bytes: 408 | """ 409 | Waits until data has been received from the device, with a max timeout in seconds. If `timeout` is not provided, the default timeout will be used, rather than no timeout at all. 410 | 411 | Args: 412 | timeout (Optional[float]): The timeout for waiting for a data response. If not provided, the default timeout will be used. 413 | 414 | Returns: 415 | bytes: The last data response received. 416 | """ 417 | if timeout is None: 418 | timeout = self._default_timeout 419 | 420 | try: 421 | await asyncio.wait_for(self._data_response_event.wait(), timeout) 422 | except asyncio.TimeoutError: 423 | raise Exception(f"Frame didn't respond with data (from frame.bluetooth.send(data)) within {timeout} seconds") 424 | 425 | self._data_response_event.clear() 426 | 427 | return self._last_data_response 428 | 429 | async def send_data(self, data: bytearray, await_data: bool = False) -> Optional[bytes]: 430 | """ 431 | Sends raw data to the device. The payload length must be less than or 432 | equal to `max_data_payload()`. 433 | 434 | If `await_data=True`, the function will block until a data response 435 | occurs, or a timeout. 436 | 437 | Args: 438 | data (bytearray): The raw data to send. 439 | await_data (bool): Whether to block while waiting for a data response. 440 | 441 | Returns: 442 | Optional[bytes]: The data response if `await_data` is True, otherwise None. 443 | """ 444 | if await_data: 445 | self._data_response_event.clear() 446 | 447 | await self._transmit(bytearray(b"\x01") + data) 448 | 449 | if await_data: 450 | return await self.wait_for_data() 451 | 452 | async def send_reset_signal(self) -> None: 453 | """ 454 | Sends a reset signal to the device which will reset the Lua virtual 455 | machine. 456 | """ 457 | if not self.is_connected(): 458 | await self.connect() 459 | await self._transmit(bytearray(b"\x04")) 460 | 461 | async def send_break_signal(self) -> None: 462 | """ 463 | Sends a break signal to the device which will break any currently 464 | executing Lua script. 465 | """ 466 | if not self.is_connected(): 467 | await self.connect() 468 | await self._transmit(bytearray(b"\x03")) 469 | -------------------------------------------------------------------------------- /src/frame_sdk/camera.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Optional, TYPE_CHECKING 3 | from exif import Image 4 | from datetime import datetime 5 | 6 | if TYPE_CHECKING: 7 | from .frame import Frame 8 | 9 | from enum import Enum 10 | 11 | class Quality(Enum): 12 | VERY_LOW = 'VERY_LOW' 13 | LOW = 'LOW' 14 | MEDIUM = 'MEDIUM' 15 | HIGH = 'HIGH' 16 | VERY_HIGH = 'VERY_HIGH' 17 | 18 | class AutofocusType(Enum): 19 | SPOT = "SPOT" 20 | AVERAGE = "AVERAGE" 21 | CENTER_WEIGHTED = "CENTER_WEIGHTED" 22 | 23 | 24 | class Camera: 25 | """Helpers for working with the Frame camera.""" 26 | 27 | 28 | frame: "Frame" = None 29 | 30 | _auto_process_photo = True 31 | 32 | def __init__(self, frame: "Frame"): 33 | """Initialize the Camera with a Frame instance.""" 34 | self.frame = frame 35 | 36 | @property 37 | def auto_process_photo(self) -> bool: 38 | """If true, the camera will automatically process the photo to correct rotation and add metadata.""" 39 | return self._auto_process_photo 40 | 41 | @auto_process_photo.setter 42 | def auto_process_photo(self, value: bool): 43 | """If true, the camera will automatically process the photo to correct rotation and add metadata.""" 44 | self._auto_process_photo = value 45 | 46 | 47 | async def take_photo(self, autofocus_seconds: Optional[int] = 3, quality: Quality = Quality.MEDIUM, autofocus_type: AutofocusType = AutofocusType.CENTER_WEIGHTED, resolution: Optional[int] = 512, pan: Optional[int] = 0) -> bytes: 48 | """Take a photo with the camera. 49 | 50 | Args: 51 | autofocus_seconds (Optional[int]): If provided, the camera will attempt to focus for the specified number of seconds. Defaults to 3. If `None`, the camera will not attempt to focus at all. 52 | quality (Quality): The quality of the photo. Defaults to Quality.MEDIUM. 53 | autofocus_type (AutofocusType): The type of autofocus. Defaults to AutofocusType.AVERAGE. 54 | resolution (Optional[int]): If provided, the photo resolution will be the specified square size. Valid range: 100..720. Defaults to 512. 55 | pan (Optional[int]): If provided, the photo will be panned up (negative) or down (positive) by the specified amount. Valid range: -140..140. Defaults to 0. 56 | 57 | Returns: 58 | bytes: The photo as a byte array. 59 | 60 | Raises: 61 | Exception: If the photo capture fails. 62 | """ 63 | 64 | if type(quality) == str: 65 | quality = Quality(quality) 66 | if type(autofocus_type) == int: 67 | autofocus_type = AutofocusType(autofocus_type) 68 | 69 | await self.frame.bluetooth.send_lua(f"cameraCaptureAndSend('{quality.value}',{autofocus_seconds or 'nil'},'{autofocus_type.value}',{resolution},{pan})") 70 | image_buffer = await self.frame.bluetooth.wait_for_data() 71 | 72 | if image_buffer is None or len(image_buffer) == 0: 73 | raise Exception("Failed to get photo") 74 | 75 | while image_buffer[0] == 0x04 and len(image_buffer) < 5: 76 | print("Ignoring tap data while waiting for photo") 77 | image_buffer = await self.frame.bluetooth.wait_for_data() 78 | 79 | if image_buffer is None or len(image_buffer) == 0: 80 | raise Exception("Failed to get photo") 81 | 82 | if self.auto_process_photo: 83 | image_buffer = self.process_photo(image_buffer, autofocus_type) 84 | return image_buffer 85 | 86 | async def save_photo(self, filename: str, autofocus_seconds: Optional[int] = 3, quality: Quality = Quality.MEDIUM, autofocus_type: AutofocusType = AutofocusType.AVERAGE, resolution: Optional[int] = 512, pan: Optional[int] = 0): 87 | """Save a photo to a file. 88 | 89 | Args: 90 | filename (str): The name of the file to save the photo. The file will always be saved as a jpeg image regardless of the file extension. 91 | autofocus_seconds (Optional[int]): If provided, the camera will attempt to focus for the specified number of seconds. Defaults to 3. If `None`, the camera will not attempt to focus at all. 92 | quality (Quality): The quality of the photo. Defaults to Quality.MEDIUM. 93 | autofocus_type (AutofocusType): The type of autofocus. Defaults to AutofocusType.AVERAGE. 94 | resolution (Optional[int]): If provided, the photo resolution will be the specified square size. Valid range: 100..720. Defaults to 512. 95 | pan (Optional[int]): If provided, the photo will be panned up (negative) or down (positive) by the specified amount. Valid range: -140..140. Defaults to 0. 96 | """ 97 | image_buffer = await self.take_photo(autofocus_seconds, quality, autofocus_type, resolution, pan) 98 | 99 | with open(filename, "wb") as f: 100 | f.write(image_buffer) 101 | 102 | def process_photo(self, image_buffer: bytes, autofocus_type: AutofocusType) -> bytes: 103 | """Process a photo to correct rotation and add metadata. 104 | 105 | Args: 106 | image_buffer (bytes): The photo as a byte array. 107 | autofocus_type (AutofocusType): The type of autofocus that was used to capture the photo. 108 | 109 | Returns: 110 | bytes: The processed photo as a byte array. 111 | """ 112 | image = Image(image_buffer) 113 | image.orientation = 8 114 | image.make = "Brilliant Labs" 115 | image.model = "Frame" 116 | image.software = "Frame Python SDK" 117 | if autofocus_type == AutofocusType.AVERAGE: 118 | image.metering_mode = 1 119 | elif autofocus_type == AutofocusType.CENTER_WEIGHTED: 120 | image.metering_mode = 2 121 | elif autofocus_type == AutofocusType.SPOT: 122 | image.metering_mode = 3 123 | image.datetime_original = datetime.now().strftime("%Y:%m:%d %H:%M:%S") 124 | return image.get_file() -------------------------------------------------------------------------------- /src/frame_sdk/display.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import asyncio 3 | from typing import Optional, TYPE_CHECKING 4 | from enum import Enum 5 | 6 | if TYPE_CHECKING: 7 | from .frame import Frame 8 | 9 | class Alignment(Enum): 10 | """Enum for text alignment options.""" 11 | TOP_LEFT = 'top_left' 12 | TOP_CENTER = 'top_center' 13 | TOP_RIGHT = 'top_right' 14 | MIDDLE_LEFT = 'middle_left' 15 | MIDDLE_CENTER = 'middle_center' 16 | MIDDLE_RIGHT = 'middle_right' 17 | BOTTOM_LEFT = 'bottom_left' 18 | BOTTOM_CENTER = 'bottom_center' 19 | BOTTOM_RIGHT = 'bottom_right' 20 | 21 | char_width_mapping = { 22 | 0x000020: 13, 23 | 0x000021: 5, 24 | 0x000022: 13, 25 | 0x000023: 19, 26 | 0x000024: 17, 27 | 0x000025: 34, 28 | 0x000026: 20, 29 | 0x000027: 5, 30 | 0x000028: 10, 31 | 0x000029: 11, 32 | 0x00002A: 21, 33 | 0x00002B: 19, 34 | 0x00002C: 8, 35 | 0x00002D: 17, 36 | 0x00002E: 6, 37 | 0x000030: 18, 38 | 0x000031: 16, 39 | 0x000032: 16, 40 | 0x000033: 15, 41 | 0x000034: 18, 42 | 0x000035: 15, 43 | 0x000036: 17, 44 | 0x000037: 15, 45 | 0x000038: 18, 46 | 0x000039: 17, 47 | 0x00003A: 6, 48 | 0x00003B: 8, 49 | 0x00003C: 19, 50 | 0x00003D: 19, 51 | 0x00003E: 19, 52 | 0x00003F: 14, 53 | 0x000040: 31, 54 | 0x000041: 22, 55 | 0x000042: 18, 56 | 0x000043: 16, 57 | 0x000044: 19, 58 | 0x000045: 17, 59 | 0x000046: 17, 60 | 0x000047: 18, 61 | 0x000048: 19, 62 | 0x000049: 12, 63 | 0x00004A: 14, 64 | 0x00004B: 19, 65 | 0x00004C: 16, 66 | 0x00004D: 23, 67 | 0x00004E: 19, 68 | 0x00004F: 20, 69 | 0x000050: 18, 70 | 0x000051: 22, 71 | 0x000052: 20, 72 | 0x000053: 17, 73 | 0x000054: 20, 74 | 0x000055: 19, 75 | 0x000056: 21, 76 | 0x000057: 23, 77 | 0x000058: 21, 78 | 0x000059: 23, 79 | 0x00005A: 17, 80 | 0x00005B: 9, 81 | 0x00005C: 15, 82 | 0x00005D: 10, 83 | 0x00005E: 20, 84 | 0x00005F: 25, 85 | 0x000060: 11, 86 | 0x000061: 19, 87 | 0x000062: 18, 88 | 0x000063: 13, 89 | 0x000064: 18, 90 | 0x000065: 16, 91 | 0x000066: 15, 92 | 0x000067: 20, 93 | 0x000068: 18, 94 | 0x000069: 5, 95 | 0x00006A: 11, 96 | 0x00006B: 18, 97 | 0x00006C: 8, 98 | 0x00006D: 28, 99 | 0x00006E: 18, 100 | 0x00006F: 18, 101 | 0x000070: 18, 102 | 0x000071: 18, 103 | 0x000072: 11, 104 | 0x000073: 15, 105 | 0x000074: 14, 106 | 0x000075: 17, 107 | 0x000076: 19, 108 | 0x000077: 30, 109 | 0x000078: 20, 110 | 0x000079: 20, 111 | 0x00007A: 16, 112 | 0x00007B: 12, 113 | 0x00007C: 5, 114 | 0x00007D: 12, 115 | 0x00007E: 17, 116 | 0x0000A1: 6, 117 | 0x0000A2: 14, 118 | 0x0000A3: 18, 119 | 0x0000A5: 22, 120 | 0x0000A9: 28, 121 | 0x0000AB: 17, 122 | 0x0000AE: 29, 123 | 0x0000B0: 15, 124 | 0x0000B1: 20, 125 | 0x0000B5: 17, 126 | 0x0000B7: 6, 127 | 0x0000BB: 17, 128 | 0x0000BF: 14, 129 | 0x0000C0: 22, 130 | 0x0000C1: 23, 131 | 0x0000C2: 23, 132 | 0x0000C3: 23, 133 | 0x0000C4: 23, 134 | 0x0000C5: 23, 135 | 0x0000C6: 32, 136 | 0x0000C7: 16, 137 | 0x0000C8: 17, 138 | 0x0000C9: 16, 139 | 0x0000CA: 17, 140 | 0x0000CB: 17, 141 | 0x0000CC: 12, 142 | 0x0000CD: 11, 143 | 0x0000CE: 16, 144 | 0x0000CF: 15, 145 | 0x0000D0: 22, 146 | 0x0000D1: 19, 147 | 0x0000D2: 20, 148 | 0x0000D3: 20, 149 | 0x0000D4: 20, 150 | 0x0000D5: 20, 151 | 0x0000D6: 20, 152 | 0x0000D7: 18, 153 | 0x0000D8: 20, 154 | 0x0000D9: 19, 155 | 0x0000DA: 19, 156 | 0x0000DB: 19, 157 | 0x0000DC: 19, 158 | 0x0000DD: 22, 159 | 0x0000DE: 18, 160 | 0x0000DF: 19, 161 | 0x0000E0: 19, 162 | 0x0000E1: 19, 163 | 0x0000E2: 19, 164 | 0x0000E3: 19, 165 | 0x0000E4: 19, 166 | 0x0000E5: 19, 167 | 0x0000E6: 29, 168 | 0x0000E7: 14, 169 | 0x0000E8: 17, 170 | 0x0000E9: 16, 171 | 0x0000EA: 17, 172 | 0x0000EB: 17, 173 | 0x0000EC: 11, 174 | 0x0000ED: 11, 175 | 0x0000EE: 16, 176 | 0x0000EF: 15, 177 | 0x0000F0: 18, 178 | 0x0000F1: 16, 179 | 0x0000F2: 18, 180 | 0x0000F3: 18, 181 | 0x0000F4: 18, 182 | 0x0000F5: 17, 183 | 0x0000F6: 18, 184 | 0x0000F7: 19, 185 | 0x0000F8: 18, 186 | 0x0000F9: 17, 187 | 0x0000FA: 17, 188 | 0x0000FB: 16, 189 | 0x0000FC: 17, 190 | 0x0000FD: 20, 191 | 0x0000FE: 18, 192 | 0x0000FF: 20, 193 | 0x000131: 5, 194 | 0x000141: 19, 195 | 0x000142: 10, 196 | 0x000152: 30, 197 | 0x000153: 30, 198 | 0x000160: 17, 199 | 0x000161: 15, 200 | 0x000178: 22, 201 | 0x00017D: 18, 202 | 0x00017E: 17, 203 | 0x000192: 16, 204 | 0x0020AC: 18, 205 | 0x0F0000: 70, 206 | 0x0F0001: 70, 207 | 0x0F0002: 70, 208 | 0x0F0003: 70, 209 | 0x0F0004: 91, 210 | 0x0F0005: 70, 211 | 0x0F0006: 70, 212 | 0x0F0007: 70, 213 | 0x0F0008: 70, 214 | 0x0F0009: 70, 215 | 0x0F000A: 70, 216 | 0x0F000B: 70, 217 | 0x0F000C: 70, 218 | 0x0F000D: 70, 219 | 0x0F000E: 77, 220 | 0x0F000F: 76, 221 | 0x0F0010: 70 222 | } 223 | 224 | from enum import Enum 225 | 226 | class PaletteColors(Enum): 227 | VOID = 0 228 | WHITE = 1 229 | GRAY = 2 230 | RED = 3 231 | PINK = 4 232 | DARKBROWN = 5 233 | BROWN = 6 234 | ORANGE = 7 235 | YELLOW = 8 236 | DARKGREEN = 9 237 | GREEN = 10 238 | LIGHTGREEN = 11 239 | NIGHTBLUE = 12 240 | SEABLUE = 13 241 | SKYBLUE = 14 242 | CLOUDBLUE = 15 243 | 244 | 245 | class Display: 246 | """Displays text and graphics on the Frame display.""" 247 | 248 | frame: "Frame" = None 249 | 250 | color_palette_mapping = { 251 | PaletteColors.VOID: (0, 0, 0), 252 | PaletteColors.WHITE: (255, 255, 255), 253 | PaletteColors.GRAY: (157, 157, 157), 254 | PaletteColors.RED: (190, 38, 51), 255 | PaletteColors.PINK: (224, 111, 139), 256 | PaletteColors.DARKBROWN: (73, 60, 43), 257 | PaletteColors.BROWN: (164, 100, 34), 258 | PaletteColors.ORANGE: (235, 137, 49), 259 | PaletteColors.YELLOW: (247, 226, 107), 260 | PaletteColors.DARKGREEN: (47, 72, 78), 261 | PaletteColors.GREEN: (68, 137, 26), 262 | PaletteColors.LIGHTGREEN: (163, 206, 39), 263 | PaletteColors.NIGHTBLUE: (27, 38, 50), 264 | PaletteColors.SEABLUE: (0, 87, 132), 265 | PaletteColors.SKYBLUE: (49, 162, 242), 266 | PaletteColors.CLOUDBLUE: (178, 220, 239), 267 | } 268 | 269 | _line_height = 60 270 | _char_spacing = 4 271 | 272 | @property 273 | def line_height(self) -> int: 274 | """Gets the height of each line of text in pixels. It is 60 by default, however you may override that value to change the vertical spacing of the text in all text displaying functions.""" 275 | return self._line_height 276 | 277 | @line_height.setter 278 | def line_height(self, value: int): 279 | """Sets the height of each line of text in pixels. It is 60 by default, however you may override that value to change the vertical spacing of the text in all text displaying functions.""" 280 | if value < 0: 281 | raise ValueError("line_height must be a non-negative integer") 282 | self._line_height = value 283 | 284 | @property 285 | def char_spacing(self) -> int: 286 | """Gets the spacing between characters in pixels. It is 4 by default.""" 287 | return self._char_spacing 288 | 289 | @char_spacing.setter 290 | def char_spacing(self, value: int): 291 | """Sets the spacing between characters in pixels.""" 292 | if value < 0: 293 | raise ValueError("char_spacing must be a non-negative integer") 294 | self._char_spacing = value 295 | 296 | def __init__(self, frame: "Frame"): 297 | """ 298 | Initialize the Display class. 299 | 300 | Args: 301 | frame (Frame): The Frame object to associate with this display. 302 | """ 303 | self.frame = frame 304 | 305 | @staticmethod 306 | def parse_alignment(align: Alignment) -> tuple[str, str]: 307 | """ 308 | Parse the alignment enum to horizontal and vertical alignment strings. 309 | 310 | Args: 311 | align (Alignment): The alignment enum value. 312 | 313 | Returns: 314 | tuple[str, str]: A tuple containing horizontal and vertical alignment strings. 315 | """ 316 | alignments = { 317 | Alignment.TOP_LEFT: ("left", "top"), 318 | Alignment.TOP_CENTER: ("center", "top"), 319 | Alignment.TOP_RIGHT: ("right", "top"), 320 | Alignment.MIDDLE_LEFT: ("left", "middle"), 321 | Alignment.MIDDLE_CENTER: ("center", "middle"), 322 | Alignment.MIDDLE_RIGHT: ("right", "middle"), 323 | Alignment.BOTTOM_LEFT: ("left", "bottom"), 324 | Alignment.BOTTOM_CENTER: ("center", "bottom"), 325 | Alignment.BOTTOM_RIGHT: ("right", "bottom"), 326 | } 327 | return alignments.get(align, ("left", "top")) 328 | 329 | async def show_text(self, text: str, x: int = 1, y: int = 1, max_width: Optional[int] = 640, max_height: Optional[int] = None, align: Alignment = Alignment.TOP_LEFT, color: PaletteColors = PaletteColors.WHITE): 330 | """ 331 | Show text on the display. 332 | 333 | Args: 334 | text (str): The text to display. 335 | x (int): The left pixel position to start the text. Defaults to 1. 336 | y (int): The top pixel position to start the text. Defaults to 1. 337 | max_width (Optional[int]): The maximum width for the text bounding box. If text is wider than this, it will be word-wrapped onto multiple lines automatically. Set to the full width of the display by default (640px), but can be overridden with None/null to disable word-wrapping. 338 | max_height (Optional[int]): The maximum height for the text bounding box. If text is taller than this, it will be cut off at that height. Also useful for vertical alignment. Set to the full height of the display by default (400px). 339 | align (Alignment): The alignment of the text, both horizontally if a max_width is provided, and vertically if a max_height is provided. Can be any value in frame.display.Alignment, such as Alignment.TOP_LEFT, Alignment.MIDDLE_CENTER, etc. 340 | """ 341 | await self.write_text(text, x, y, max_width, max_height, align, color) 342 | await self.show() 343 | 344 | async def write_text(self, text: str, x: int = 1, y: int = 1, max_width: Optional[int] = 640, max_height: Optional[int] = None, align: Alignment = Alignment.TOP_LEFT, color: PaletteColors = PaletteColors.WHITE): 345 | """ 346 | Write text to the display buffer. 347 | 348 | Args: 349 | text (str): The text to write. 350 | x (int): The left pixel position to start the text. Defaults to 1. 351 | y (int): The top pixel position to start the text. Defaults to 1. 352 | max_width (Optional[int]): The maximum width for the text bounding box. If text is wider than this, it will be word-wrapped onto multiple lines automatically. Set to the full width of the display by default (640px), but can be overridden with None/null to disable word-wrapping. 353 | max_height (Optional[int]): The maximum height for the text bounding box. If text is taller than this, it will be cut off at that height. Also useful for vertical alignment. Set to the full height of the display by default (400px). 354 | align (Alignment): The alignment of the text, both horizontally if a max_width is provided, and vertically if a max_height is provided. Can be any value in frame.display.Alignment, such as Alignment.TOP_LEFT, Alignment.MIDDLE_CENTER, etc. 355 | color (Palette_Colors): The color of the text. Defaults to Palette_Colors.WHITE. 356 | """ 357 | if max_width is not None: 358 | text = self.wrap_text(text, max_width) 359 | 360 | horizontal_align, vertical_align = self.parse_alignment(align) 361 | 362 | total_height_of_text = self.get_text_height(text) 363 | vertical_offset = 0 364 | if vertical_align == "middle": 365 | vertical_offset = (max_height if max_height is not None else (400-y)) // 2 - total_height_of_text // 2 366 | elif vertical_align == "bottom": 367 | vertical_offset = (max_height if max_height is not None else (400-y)) - total_height_of_text 368 | 369 | for line in text.split("\n"): 370 | this_line_x = x 371 | if horizontal_align == "center": 372 | this_line_x = x + (max_width if max_width is not None else (640-x)) // 2 - self.get_text_width(line) // 2 373 | elif horizontal_align == "right": 374 | this_line_x = x + (max_width if max_width is not None else (640-x)) - self.get_text_width(line) 375 | lua_to_send = f"frame.display.text(\"{self.frame.escape_lua_string(line)}\",{this_line_x},{y+vertical_offset}" 376 | if self.char_spacing != 4 or color != PaletteColors.WHITE: 377 | lua_to_send += ',{' 378 | if self.char_spacing != 4: 379 | lua_to_send += f'spacing={self.char_spacing}' 380 | if self.char_spacing != 4 and color != PaletteColors.WHITE: 381 | lua_to_send += ',' 382 | if color != PaletteColors.WHITE: 383 | lua_to_send += f'color="{color.name}"' 384 | lua_to_send += '}' 385 | lua_to_send += ')' 386 | await self.frame.run_lua(lua_to_send, checked=True) 387 | y += self.line_height 388 | if max_height is not None and y > max_height or y+vertical_offset > 640: 389 | break 390 | 391 | async def scroll_text(self, text: str, lines_per_frame: int = 5, delay: float = 0.12, color: PaletteColors = PaletteColors.WHITE): 392 | """ 393 | Scroll text vertically on the display. 394 | 395 | Args: 396 | text (str): The text to scroll. It is automatically wrapped to fit the display width. 397 | lines_per_frame (int): The number of vertical pixels to scroll per frame. Defaults to 5. Higher values scroll faster, but will be more jumpy. 398 | delay (float): The delay between frames in seconds. Defaults to 0.12 seconds. Lower values are faster, but may cause graphical glitches. 399 | """ 400 | margin = self.line_height 401 | text = self.wrap_text(text, 640) 402 | total_height = self.get_text_height(text) 403 | if total_height < 400: 404 | await self.write_text(text) 405 | return 406 | await self.frame.run_lua(f"scrollText(\"{self.frame.escape_lua_string(text)}\",{self.line_height},{total_height},{lines_per_frame},{delay},'{color.name}',{self.char_spacing})",checked=True,timeout=total_height/lines_per_frame*(delay+0.1)+5) 407 | 408 | def wrap_text(self, text: str, max_width: int) -> str: 409 | """ 410 | Wrap text to fit within a specified width. 411 | 412 | Args: 413 | text (str): The text to wrap. 414 | max_width (int): The maximum width for the text bounding box. 415 | 416 | Returns: 417 | str: The wrapped text. 418 | """ 419 | lines = text.split("\n") 420 | output = "" 421 | for line in lines: 422 | if self.get_text_width(line) <= max_width: 423 | output += line+"\n" 424 | else: 425 | this_line = "" 426 | words = line.split(" ") 427 | for word in words: 428 | if self.get_text_width(this_line+" "+word) > max_width: 429 | output += this_line+"\n" 430 | this_line = word 431 | elif len(this_line) == 0: 432 | this_line = word 433 | else: 434 | this_line += " "+word 435 | if len(this_line) > 0: 436 | output += this_line+"\n" 437 | return output.rstrip("\n") 438 | 439 | def get_text_height(self, text: str) -> int: 440 | """ 441 | Gets the rendered height of text in pixels. 442 | This does not perform any text wrapping but does respect any manually-included line breaks. 443 | The rendered height is affected by the `line_height` property. 444 | 445 | Args: 446 | text (str): The text to measure. 447 | 448 | Returns: 449 | int: The height of the text in pixels. 450 | """ 451 | num_lines = text.count("\n") + 1 452 | return num_lines * (self.line_height) 453 | 454 | def get_text_width(self, text: str) -> int: 455 | """ 456 | Gets the rendered width of text in pixels. 457 | Text on Frame is variable width, so this is important for positioning text. 458 | This does not perform any text wrapping but does respect any manually-included line breaks. 459 | 460 | Args: 461 | text (str): The text to measure. 462 | 463 | Returns: 464 | int: The width of the text in pixels. 465 | """ 466 | width = 0 467 | for char in text: 468 | width += char_width_mapping.get(ord(char), 25) + self.char_spacing 469 | return width 470 | 471 | async def show(self): 472 | """Swaps the buffer to show the changes. 473 | The Frame display has 2 buffers. All writing to the display via frame.display.write_text(), frame.display.draw_rect(), etc write to an off-screen buffer and are not visible. This allows you to write multiple actions at once. When you have completed drawing and want to show it to the user, call frame.display.show() which will display the buffered graphics and clear a new off-screen buffer for whatever you want to draw next.""" 474 | await self.frame.run_lua("frame.display.show()", checked=True) 475 | 476 | async def clear(self): 477 | """Clears the display.""" 478 | await self.frame.run_lua("frame.display.bitmap(1,1,4,2,15,\"\\xFF\")") 479 | await self.show() 480 | 481 | async def set_palette(self, paletteIndex: PaletteColors, rgb_color: tuple[int, int, int]): 482 | """ 483 | Sets a color in the display palette. 484 | 485 | Args: 486 | paletteIndex (PaletteColors): The PaletteColor to set. 487 | rgb_color (tuple[int, int, int]): The RGB color tuple. 488 | 489 | Raises: 490 | ValueError: If the index is out of range. 491 | """ 492 | if isinstance(paletteIndex, int): 493 | paletteIndex = PaletteColors(paletteIndex % 16) 494 | 495 | color = tuple(max(0, min(255, c)) for c in rgb_color) 496 | self.palette[paletteIndex] = color 497 | await self.frame.run_lua(f"frame.display.assign_color({paletteIndex.name},{color[0]},{color[1]},{color[2]})", checked=True) 498 | 499 | def _draw_rect_lua(self, x: int, y: int, w: int, h: int, color: PaletteColors): 500 | if isinstance(color, PaletteColors): 501 | color = color.value 502 | 503 | w = w // 8 * 8 504 | return f"frame.display.bitmap({x},{y},{w},2,{color},string.rep(\"\\xFF\",{(w//8)*h}))" 505 | 506 | async def draw_rect(self, x: int, y: int, w: int, h: int, color: PaletteColors = PaletteColors.WHITE): 507 | """ 508 | Draws a filled rectangle on the display. 509 | 510 | Args: 511 | x (int): The x position of the upper-left corner of the rectangle. 512 | y (int): The y position of the upper-left corner of the rectangle. 513 | w (int): The width of the rectangle. 514 | h (int): The height of the rectangle. 515 | color (PaletteColors): The color of the rectangle. 516 | """ 517 | if isinstance(color, PaletteColors): 518 | color = color.value 519 | 520 | w = w // 8 * 8 521 | await self.frame.run_lua(self._draw_rect_lua(x, y, w, h, color)) 522 | 523 | async def draw_rect_filled(self, x: int, y: int, w: int, h: int, border_width: int, border_color: PaletteColors, fill_color: PaletteColors): 524 | """ 525 | Draws a filled rectangle with a border on the display. 526 | 527 | Args: 528 | x (int): The x position of the upper-left corner of the rectangle. 529 | y (int): The y position of the upper-left corner of the rectangle. 530 | w (int): The width of the rectangle. 531 | h (int): The height of the rectangle. 532 | border_width (int): The width of the border in pixels. 533 | border_color (PaletteColors): The color of the border. 534 | fill_color (PaletteColors): The color of the fill. 535 | """ 536 | 537 | w = w // 8 * 8 538 | if border_width > 0: 539 | border_width = border_width // 8 * 8 540 | if border_width == 0: 541 | border_width = 8 542 | else: 543 | await self.draw_rect(x, y, w, h, fill_color) 544 | return 545 | 546 | # draw entire rectangle as border color 547 | lua_to_send = self._draw_rect_lua(x, y, w, h, border_color) 548 | # draw the inside rectangle 549 | lua_to_send += self._draw_rect_lua(x+border_width, y+border_width, w-border_width*2, h-border_width*2, fill_color) 550 | await self.frame.run_lua(lua_to_send, checked=True) -------------------------------------------------------------------------------- /src/frame_sdk/files.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import asyncio 3 | from typing import Optional, TYPE_CHECKING 4 | 5 | if TYPE_CHECKING: 6 | from .frame import Frame 7 | 8 | class Files: 9 | """Helpers for accessing the Frame filesystem.""" 10 | 11 | frame: "Frame" = None 12 | 13 | def __init__(self, frame: "Frame"): 14 | """ 15 | Initialize the Files helper with a Frame instance. 16 | 17 | Args: 18 | frame (Frame): The Frame instance to use for filesystem operations. 19 | """ 20 | self.frame = frame 21 | 22 | async def write_file(self, path: str, data: bytes, checked: bool = False) -> None: 23 | """ 24 | Write a file to the device. 25 | 26 | Args: 27 | path (str): The full filename to write on the Frame. 28 | data (bytes): The data to write to the file as bytes. You can use .encode() to get bytes from a string. 29 | checked (bool, optional): If True, each step of writing will wait for acknowledgement from the Frame before continuing. Defaults to False. 30 | 31 | Raises: 32 | Exception: If the file cannot be opened, written to, or closed. 33 | """ 34 | response = await self.frame.bluetooth.send_lua( 35 | f"w=frame.file.open(\"{path}\",\"write\")" + 36 | (";print(\"o\")" if checked else ""), await_print=checked) 37 | if checked and response != "o": 38 | raise Exception(f"Couldn't open file \"{path}\" for writing: {response}") 39 | response = await self.frame.bluetooth.send_lua( 40 | f"frame.bluetooth.receive_callback((function(d)w:write(d)end))" + 41 | (";print(\"c\")" if checked else ""), await_print=checked) 42 | if checked and response != "c": 43 | raise Exception(f"Couldn't register callback for writing to file \"{path}\": {response}") 44 | 45 | current_index = 0 46 | while current_index < len(data): 47 | max_payload = self.frame.bluetooth.max_data_payload()-1 48 | next_chunk_length = min(len(data) - current_index, max_payload) 49 | if next_chunk_length == 0: 50 | break 51 | 52 | if next_chunk_length <= 0: 53 | raise Exception("MTU too small to write file, or escape character at end of chunk") 54 | 55 | chunk = data[current_index:current_index + next_chunk_length] 56 | await self.frame.bluetooth.send_data(chunk) 57 | 58 | current_index += next_chunk_length 59 | if current_index < len(data): 60 | await asyncio.sleep(0.1) 61 | 62 | response = await self.frame.bluetooth.send_lua("w:close();print(\"c\")", await_print=checked) 63 | if checked and response != "c": 64 | raise Exception("Error closing file") 65 | response = await self.frame.bluetooth.send_lua( 66 | f"frame.bluetooth.receive_callback(nil)" + 67 | (";print(\"c\")" if checked else ""), await_print=checked) 68 | if checked and response != "c": 69 | raise Exception(f"Couldn't remove callback for writing to file \"{path}\"") 70 | 71 | async def file_exists(self, path: str) -> bool: 72 | """ 73 | Check if a file exists on the device. 74 | 75 | Args: 76 | path (str): The full path to the file to check. 77 | 78 | Returns: 79 | bool: True if the file exists, False otherwise. 80 | """ 81 | response_from_opening = await self.frame.bluetooth.send_lua( 82 | f"r=frame.file.open(\"{path}\",\"read\");print(\"o\");r:close()", await_print=True) 83 | return response_from_opening == "o" 84 | 85 | async def delete_file(self, path: str) -> bool: 86 | """ 87 | Delete a file on the device. 88 | 89 | Args: 90 | path (str): The full path to the file to delete. 91 | 92 | Returns: 93 | bool: True if the file was deleted, False if it didn't exist or failed to delete. 94 | """ 95 | response = await self.frame.bluetooth.send_lua(f"frame.file.remove(\"{path}\");print(\"d\")", await_print=True) 96 | return response == "d" 97 | 98 | async def read_file(self, path: str) -> bytes: 99 | """ 100 | Read a file from the device. 101 | 102 | Args: 103 | path (str): The full filename to read on the Frame. 104 | 105 | Returns: 106 | bytes: The content of the file as bytes. You can use .decode() to get a string. 107 | 108 | Raises: 109 | Exception: If the file does not exist. 110 | """ 111 | await self.frame.run_lua(f"printCompleteFile(\"{path}\")") 112 | result: bytes = await self.frame.bluetooth.wait_for_data() 113 | return result.strip() -------------------------------------------------------------------------------- /src/frame_sdk/frame.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import hashlib 3 | from typing import Awaitable, Callable, Optional 4 | from .bluetooth import Bluetooth, FrameDataTypePrefixes 5 | from .files import Files 6 | from .camera import Camera 7 | from .display import Display 8 | from .microphone import Microphone 9 | from .motion import Motion 10 | import random 11 | import re 12 | import time 13 | 14 | class Frame: 15 | """Represents a Frame device. Instantiate this class via `async with Frame() as f:`.""" 16 | 17 | debug_on_new_connection: bool = False 18 | 19 | def __init__(self, address: Optional[str] = None): 20 | """Initialize the Frame device and its components.""" 21 | self.bluetooth = Bluetooth() 22 | self.files = Files(self) 23 | self.camera = Camera(self) 24 | self.display = Display(self) 25 | self.microphone = Microphone(self) 26 | self.motion = Motion(self) 27 | self._lua_on_wake = None 28 | self._callback_on_wake = None 29 | if address != "": 30 | self._address = address.upper() 31 | else: 32 | self._address = None 33 | 34 | async def __aenter__(self) -> 'Frame': 35 | """Enter the asynchronous context manager. 36 | `address` can optionally be provided either as the 2 digit ID shown on 37 | Frame, or the device's full address (note that on MacOS, this is a 38 | system generated UUID not the devices real MAC address) in order to only 39 | connect to that specific device. The value should be a string, for 40 | example `"4F"` or `"78D97B6B-244B-AC86-047F-BBF72ADEB1F5"`""" 41 | await self.ensure_connected(self._address) 42 | return self 43 | 44 | async def __aexit__(self, exc_type: Optional[type], exc_value: Optional[BaseException], traceback: Optional[object]) -> None: 45 | """Exit the asynchronous context manager.""" 46 | if self.bluetooth.is_connected(): 47 | await self.bluetooth.disconnect() 48 | 49 | async def ensure_connected(self, address: Optional[str] = None) -> None: 50 | """Ensure the Frame is connected, establishing a connection if not. 51 | `address` can optionally be provided either as the 2 digit ID shown on 52 | Frame, or the device's full address (note that on MacOS, this is a 53 | system generated UUID not the devices real MAC address) in order to only 54 | connect to that specific device. The value should be a string, for 55 | example `"4F"` or `"78D97B6B-244B-AC86-047F-BBF72ADEB1F5"`""" 56 | if not self.bluetooth.is_connected(): 57 | await self.bluetooth.connect(address or self._address) 58 | self.bluetooth.print_debugging = Frame.debug_on_new_connection 59 | await self.bluetooth.send_break_signal() 60 | await self.inject_all_library_functions() 61 | await self.run_lua(f"is_awake=true;frame.time.utc({int(time.time())});frame.time.zone('{time.strftime('%z')[:3]}:{time.strftime('%z')[3:]}')", checked=True) 62 | 63 | async def evaluate(self, lua_expression: str) -> str: 64 | """Evaluates a Lua expression on the device and returns the result. 65 | 66 | Args: 67 | lua_expression (str): The Lua expression to evaluate. 68 | 69 | Returns: 70 | str: The result of the evaluation. 71 | """ 72 | await self.ensure_connected() 73 | return await self.run_lua(f"prntLng(tostring({lua_expression}))", await_print=True) 74 | 75 | async def run_lua(self, lua_string: str, await_print: bool = False, checked: bool = False, timeout: Optional[float] = None) -> Optional[str]: 76 | """ 77 | Run a Lua string on the device, automatically determining the appropriate method based on length. 78 | 79 | If `await_print=True` or `checked=True`, the function will block, otherwise it will return immediately. 80 | 81 | Args: 82 | lua_string (str): The Lua code to execute. 83 | await_print (bool): Whether to wait for a print statement from the Lua code. 84 | checked (bool): Whether to wait for confirmation of successful execution. 85 | timeout (Optional[float]): The maximum time to wait for execution. 86 | 87 | Returns: 88 | Optional[str]: The result of the Lua execution if `await_print` is True. 89 | """ 90 | await self.ensure_connected() 91 | # replace any print() calls with prntLng() calls 92 | # TODO: this is a dirty hack and instead we should fix the implementation of print() in the Frame 93 | lua_string = re.sub(r'\bprint\(', 'prntLng(', lua_string) 94 | 95 | if len(lua_string) <= self.bluetooth.max_lua_payload(): 96 | if checked and not await_print: 97 | lua_string += ";print(\"+\")" 98 | if len(lua_string) <= self.bluetooth.max_lua_payload(): 99 | result = await self.bluetooth.send_lua(lua_string, await_print=True, timeout=timeout) 100 | if result != "+": 101 | raise Exception(f"Lua did not run successfully: {result}") 102 | return None 103 | else: 104 | return await self.bluetooth.send_lua(lua_string, await_print=await_print, timeout=timeout) 105 | 106 | return await self.send_long_lua(lua_string, await_print=await_print, checked=checked, timeout=timeout) 107 | 108 | async def send_long_lua(self, string: str, await_print: bool = False, checked: bool = False, timeout: Optional[float] = None) -> Optional[str]: 109 | """ 110 | Sends a Lua string to the device that is longer than the MTU limit and thus 111 | must be sent via writing to a file and requiring that file. 112 | 113 | If `await_print=True` or `checked=True`, the function will block, otherwise it will return immediately. 114 | 115 | Args: 116 | string (str): The Lua code to execute. 117 | await_print (bool): Whether to wait for a print statement from the Lua code. 118 | checked (bool): Whether to wait for confirmation of successful execution. 119 | timeout (Optional[float]): The maximum time to wait for execution. 120 | 121 | Returns: 122 | Optional[str]: The result of the Lua execution if `await_print` is True. 123 | """ 124 | await self.ensure_connected() 125 | 126 | # we use a random name here since require() only works once per file. 127 | # TODO: confirm that the Frame implementation of Lua actually works this way. If not, we don't need to randomize the name. 128 | random_name = ''.join(chr(ord('a')+random.randint(0,25)) for _ in range(4)) 129 | 130 | await self.files.write_file(f"/{random_name}.lua", string.encode(), checked=True) 131 | if await_print: 132 | response = await self.bluetooth.send_lua(f"require(\"{random_name}\")", await_print=True, timeout=timeout) 133 | elif checked: 134 | response = await self.bluetooth.send_lua(f"require(\"{random_name}\");print('done')", await_print=True, timeout=timeout) 135 | if response != "done": 136 | raise Exception(f"require() did not return 'done': {response}") 137 | response = None 138 | else: 139 | response = await self.bluetooth.send_lua(f"require(\"{random_name}\")") 140 | await self.files.delete_file(f"/{random_name}.lua") 141 | return response 142 | 143 | async def get_battery_level(self) -> int: 144 | """Returns the battery level as a percentage between 1 and 100. 145 | 146 | Returns: 147 | int: The battery level percentage. 148 | """ 149 | await self.ensure_connected() 150 | response = await self.bluetooth.send_lua("print(frame.battery_level())", await_print=True) 151 | return int(float(response)) 152 | 153 | async def delay(self, seconds: float) -> None: 154 | """Delays execution on Frame for a given number of seconds. Technically this sends a sleep command, but it doesn't actually change the power mode. This function does not block, returning immediately. 155 | 156 | Args: 157 | seconds (float): The number of seconds to sleep. 158 | """ 159 | if seconds <= 0: 160 | raise ValueError("Delay seconds must be a positive number.") 161 | await self.ensure_connected() 162 | await self.run_lua(f"frame.sleep({seconds})") 163 | 164 | async def sleep(self, deep_sleep: bool = False) -> None: 165 | """Puts the Frame into sleep mode. There are two modes: normal and deep. 166 | Normal sleep mode can still receive bluetooth data, and is essentially the same as clearing the display and putting the camera in low power mode. The Frame will retain the time and date, and any functions and variables will stay in memory. 167 | Deep sleep mode saves additional power, but has more limitations. The Frame will not retain the time and date, and any functions and variables will not stay in memory. Blue data will not be received. The only way to wake the Frame from deep sleep is to tap it. 168 | The difference in power usage is fairly low, so it's often best to use normal sleep mode unless you need the extra power savings. 169 | """ 170 | await self.ensure_connected() 171 | if deep_sleep: 172 | await self.run_lua("frame.sleep()") 173 | else: 174 | if self._lua_on_wake is not None or self._callback_on_wake is not None: 175 | run_on_wake = self._lua_on_wake or "" 176 | if self._callback_on_wake is not None: 177 | run_on_wake = "frame.bluetooth.send('\\x"+FrameDataTypePrefixes.WAKE.value_as_hex+"');"+run_on_wake 178 | run_on_wake = "if not is_awake then;is_awake=true;"+run_on_wake+";end" 179 | await self.motion.run_on_tap(run_on_wake) 180 | await self.run_lua("frame.display.text(' ',1,1);frame.display.show();", checked=True) 181 | self.camera.is_awake = False 182 | 183 | async def stay_awake(self, value: bool) -> None: 184 | """Prevents Frame from going to sleep while it's docked onto the charging cradle. 185 | This can help during development where continuous power is needed, however may 186 | degrade the display or cause burn-in if used for extended periods of time. 187 | 188 | Args: 189 | value (bool): True to stay awake, False to allow sleep. 190 | """ 191 | await self.ensure_connected() 192 | await self.run_lua(f"frame.stay_awake({str(value).lower()})", checked=True) 193 | 194 | async def inject_library_function(self, name: str, function: str, version: int) -> None: 195 | """ 196 | Inject a function into the global environment of the device. Used to push helper library functions to the device. 197 | 198 | Args: 199 | name (str): The name of the function. 200 | function (str): The function code. 201 | version (int): The version of the function. 202 | """ 203 | await self.ensure_connected() 204 | 205 | exists = await self.bluetooth.send_lua(f"print({name} ~= nil)", await_print=True) 206 | if (self.bluetooth._print_debugging): 207 | print(f"Function {name} exists: {exists}") 208 | if (exists != "true"): 209 | # function does not yet exist, so let's see if the file for it does 210 | exists = await self.files.file_exists(f"/lib-{version}/{name}.lua") 211 | if (self.bluetooth._print_debugging): 212 | print(f"File /lib-{version}/{name}.lua exists: {exists}") 213 | 214 | if (exists): 215 | response = await self.bluetooth.send_lua(f"require(\"lib-{version}/{name}\");print(\"l\")", await_print=True) 216 | if response == "l": 217 | return 218 | 219 | if (self.bluetooth._print_debugging): 220 | print(f"Writing file /lib-{version}/{name}.lua") 221 | await self.files.write_file(f"/lib-{version}/{name}.lua", function.encode(), checked=True) 222 | 223 | if (self.bluetooth._print_debugging): 224 | print(f"Requiring lib-{version}/{name}") 225 | response = await self.bluetooth.send_lua(f"require(\"lib-{version}/{name}\");print(\"l\")", await_print=True) 226 | if response != "l": 227 | raise Exception(f"Error injecting library function: {response}") 228 | 229 | async def inject_all_library_functions(self) -> None: 230 | """ 231 | Inject all library functions into the global environment of the device. 232 | """ 233 | from .library_functions import library_print_long 234 | # hash the library_print_long function to get a version id (take only the first 6 chars) 235 | library_version = hashlib.sha256(library_print_long.encode()).hexdigest()[:6] 236 | 237 | await self.ensure_connected() 238 | response = await self.bluetooth.send_lua(f"frame.file.mkdir(\"lib-{library_version}\");print(\"c\")", await_print=True) 239 | if response == "c": 240 | if (self.bluetooth._print_debugging): 241 | print("Created lib directory") 242 | else: 243 | if (self.bluetooth._print_debugging): 244 | print("Did not create lib directory: "+response) 245 | await self.inject_library_function("prntLng", library_print_long, library_version) 246 | 247 | 248 | def escape_lua_string(self, string: str) -> str: 249 | """Escape a string for use in Lua. 250 | 251 | Args: 252 | string (str): The string to escape. 253 | 254 | Returns: 255 | str: The escaped string. 256 | """ 257 | return string.replace("\\", "\\\\").replace("\n", "\\n").replace("\r", "\\r").replace("\t", "\\t").replace("\"", "\\\"").replace("[", "[").replace("]", "]") 258 | 259 | async def run_on_wake(self, lua_script: Optional[str] = None, callback: Optional[Callable[[], None ]] = None) -> None: 260 | """ 261 | Runs a Lua function when the device wakes up from sleep. Can include lua code to be run on Frame upon wake and/or a python callback to be run locally upon wake. 262 | """ 263 | self._lua_on_wake = lua_script 264 | self._callback_on_wake = callback 265 | 266 | if callback is not None: 267 | self.bluetooth.register_data_response_handler(FrameDataTypePrefixes.WAKE, lambda data: callback()) 268 | else: 269 | self.bluetooth.register_data_response_handler(FrameDataTypePrefixes.WAKE, None) 270 | 271 | if lua_script is not None and callback is not None: 272 | await self.files.write_file("main.lua",("is_awake=true;frame.bluetooth.send('\\x"+FrameDataTypePrefixes.WAKE.value_as_hex+"');\n"+lua_script).encode(), checked=True) 273 | elif lua_script is None and callback is not None: 274 | await self.files.write_file("main.lua",("is_awake=true;frame.bluetooth.send('\\x"+FrameDataTypePrefixes.WAKE.value_as_hex+"')").encode(), checked=True) 275 | elif lua_script is not None and callback is None: 276 | await self.files.write_file("main.lua",("is_awake=true;"+lua_script).encode(), checked=True) 277 | else: 278 | await self.files.write_file("main.lua",b"is_awake=true", checked=True) -------------------------------------------------------------------------------- /src/frame_sdk/library_functions.py: -------------------------------------------------------------------------------- 1 | from .bluetooth import FrameDataTypePrefixes 2 | 3 | # these are some helper functions that we run on the Frame. The SDK will not work well if these do not exist. Every time you connect to a Frame, the SDK will automatically inject these functions into the Frame. 4 | library_print_long = """ 5 | function prntLng(stringToPrint) 6 | local mtu = frame.bluetooth.max_length() 7 | local len = string.len(stringToPrint) 8 | if len <= mtu - 3 then 9 | print(stringToPrint) 10 | return 11 | end 12 | local i = 1 13 | local chunkIndex = 0 14 | while i <= len do 15 | local j = i + mtu - 4 16 | if j > len then 17 | j = len 18 | end 19 | local chunk = string.sub(stringToPrint, i, j) 20 | print('\\x"""+FrameDataTypePrefixes.LONG_TEXT.value_as_hex+"""'..chunk) 21 | chunkIndex = chunkIndex + 1 22 | i = j + 1 23 | end 24 | print('\\x"""+FrameDataTypePrefixes.LONG_TEXT_END.value_as_hex+"""'..chunkIndex) 25 | end 26 | function sendPartial(dataToSend, max_size) 27 | local len = string.len(dataToSend) 28 | local i = 1 29 | local chunkIndex = 0 30 | while i <= len do 31 | local j = i + max_size - 4 32 | if j > len then 33 | j = len 34 | end 35 | local chunk = string.sub(dataToSend, i, j) 36 | while true do 37 | if pcall(frame.bluetooth.send, '\\x"""+FrameDataTypePrefixes.LONG_DATA.value_as_hex+"""' .. chunk) then 38 | break 39 | end 40 | end 41 | chunkIndex = chunkIndex + 1 42 | i = j + 1 43 | end 44 | return chunkIndex 45 | end 46 | function printCompleteFile(filename) 47 | local mtu = frame.bluetooth.max_length() 48 | local f = frame.file.open(filename, "read") 49 | local chunkIndex = 0 50 | local chunk = "" 51 | while true do 52 | local new_chunk = f:read() 53 | if new_chunk == nil then 54 | if string.len(chunk) > 0 then 55 | chunkIndex = chunkIndex + sendPartial(chunk, mtu) 56 | break 57 | end 58 | break 59 | end 60 | if string.len(new_chunk) == 512 then 61 | chunk = chunk .. new_chunk 62 | else 63 | chunk = chunk .. new_chunk .. "\\n" 64 | end 65 | 66 | while string.len(chunk) > mtu - 4 do 67 | local chunk_to_send = string.sub(chunk, 1, mtu - 4) 68 | chunkIndex = chunkIndex + 1 69 | chunk = string.sub(chunk, mtu - 3) 70 | while true do 71 | if pcall(frame.bluetooth.send, '\\x"""+FrameDataTypePrefixes.LONG_DATA.value_as_hex+"""' .. chunk_to_send) then 72 | break 73 | end 74 | end 75 | end 76 | end 77 | while true do 78 | if pcall(frame.bluetooth.send, '\\x"""+FrameDataTypePrefixes.LONG_DATA_END.value_as_hex+"""' .. chunkIndex) then 79 | break 80 | end 81 | end 82 | f:close() 83 | end 84 | function cameraCaptureAndSend(quality,autoExpTimeDelay,autofocusType,resolution,pan) 85 | local last_autoexp_time = 0 86 | local state = 'EXPOSING' 87 | local state_time = frame.time.utc() 88 | local chunkIndex = 0 89 | if autoExpTimeDelay == nil then 90 | state = 'CAPTURE' 91 | end 92 | 93 | while true do 94 | if state == 'EXPOSING' then 95 | if frame.time.utc() - last_autoexp_time > 0.1 then 96 | frame.camera.auto { metering = autofocusType } 97 | last_autoexp_time = frame.time.utc() 98 | end 99 | if frame.time.utc() > state_time + autoExpTimeDelay then 100 | state = 'CAPTURE' 101 | end 102 | elseif state == 'CAPTURE' then 103 | frame.camera.capture { resolution = resolution, quality = quality, pan = pan } 104 | state_time = frame.time.utc() 105 | state = 'WAIT' 106 | elseif state == 'WAIT' then 107 | if frame.camera.image_ready() then 108 | state = 'SEND' 109 | end 110 | elseif state == 'SEND' then 111 | local i = frame.camera.read(frame.bluetooth.max_length() - 1) 112 | if (i == nil) then 113 | state = 'DONE' 114 | else 115 | while true do 116 | if pcall(frame.bluetooth.send, '\\x"""+FrameDataTypePrefixes.LONG_DATA.value_as_hex+"""' .. i) then 117 | break 118 | end 119 | end 120 | chunkIndex = chunkIndex + 1 121 | end 122 | elseif state == 'DONE' then 123 | while true do 124 | if pcall(frame.bluetooth.send, '\\x"""+FrameDataTypePrefixes.LONG_DATA_END.value_as_hex+"""' .. chunkIndex) then 125 | break 126 | end 127 | end 128 | break 129 | end 130 | end 131 | end 132 | function drawRect(x,y,width,height,color) 133 | frame.display.bitmap(x,y,width,2,color,string.rep("\\xFF",math.floor(width/8*height))) 134 | end 135 | 136 | function scrollText(text, line_height, total_height, lines_per_frame, delay, text_color_name, letter_spacing) 137 | local lines = {} 138 | local line_count = 1 139 | local start = 1 140 | while true do 141 | local found_start, found_end = string.find(text, "\\n", start) 142 | if not found_start then 143 | table.insert(lines, string.sub(text, start)) 144 | break 145 | end 146 | table.insert(lines, string.sub(text, start, found_start - 1)) 147 | line_count = line_count + 1 148 | start = found_end + 1 149 | end 150 | local i = 0 151 | while i < total_height - (400 - line_height * 2) do 152 | local start_time = frame.time.utc() 153 | if i == 0 then 154 | start_time = start_time + (2 * line_height / lines_per_frame * delay) 155 | end 156 | local first_line_index = math.floor(i / line_height) + 1 157 | local first_line_offset = i % line_height 158 | local y = line_height - first_line_offset 159 | for j = first_line_index, line_count do 160 | local line = lines[j] 161 | frame.display.text(line, 1, y, {color=text_color_name, spacing=letter_spacing}) 162 | y = y + line_height 163 | if y > 400 - line_height then 164 | break 165 | end 166 | end 167 | drawRect(1, 1, 640, line_height, 15) 168 | drawRect(1, 400 - line_height, 640, line_height, 15) 169 | frame.display.show() 170 | while frame.time.utc() - start_time < delay do 171 | end 172 | i = i + lines_per_frame 173 | end 174 | local extra_time = frame.time.utc() + (1 * line_height / lines_per_frame * delay) 175 | while frame.time.utc() < extra_time do 176 | end 177 | end 178 | 179 | function microphoneRecordAndSend(sample_rate, bit_depth, max_time_in_seconds) 180 | frame.microphone.start{sample_rate=sample_rate, bit_depth=bit_depth} 181 | local end_time = frame.time.utc() + 60 * 60 * 24 182 | local max_packet_size = frame.bluetooth.max_length() - 4 183 | if max_time_in_seconds ~= nil then 184 | end_time = frame.time.utc() + max_time_in_seconds 185 | end 186 | local chunk_count = 0 187 | 188 | if max_packet_size % 2 ~= 0 then 189 | max_packet_size = max_packet_size - 1 190 | end 191 | 192 | while frame.time.utc() < end_time do 193 | s = frame.microphone.read(max_packet_size) 194 | if s == nil then 195 | break 196 | end 197 | if s ~= '' then 198 | while true do 199 | if max_time_in_seconds ~= nil then 200 | if pcall(frame.bluetooth.send, '\\x"""+FrameDataTypePrefixes.LONG_DATA.value_as_hex+"""' .. s) then 201 | break 202 | end 203 | else 204 | if pcall(frame.bluetooth.send, '\\x"""+FrameDataTypePrefixes.MIC_DATA.value_as_hex+"""' .. s) then 205 | break 206 | end 207 | end 208 | end 209 | chunk_count = chunk_count + 1 210 | end 211 | end 212 | if max_time_in_seconds ~= nil then 213 | frame.bluetooth.send('\\x"""+FrameDataTypePrefixes.LONG_DATA_END.value_as_hex+"""' .. tostring(chunk_count)) 214 | end 215 | frame.microphone.stop() 216 | end 217 | """ -------------------------------------------------------------------------------- /src/frame_sdk/microphone.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Optional, TYPE_CHECKING 3 | from datetime import datetime 4 | import numpy as np 5 | import asyncio 6 | import simpleaudio 7 | import time 8 | import wave 9 | 10 | from .bluetooth import FrameDataTypePrefixes 11 | 12 | if TYPE_CHECKING: 13 | from .frame import Frame 14 | 15 | class Microphone: 16 | """Record and play audio using the Frame microphone.""" 17 | 18 | frame: "Frame" = None 19 | _audio_buffer: Optional[np.ndarray] = None 20 | 21 | def __init__(self, frame: "Frame"): 22 | """ 23 | Initialize the Microphone with a Frame instance. 24 | 25 | Args: 26 | frame (Frame): The Frame instance to associate with the Microphone. 27 | """ 28 | self.frame = frame 29 | self._audio_buffer = None 30 | self._bit_depth = 16 31 | self._sample_rate = 8000 32 | self._silence_threshold = 0.02 33 | self._audio_finished_event = asyncio.Event() 34 | self._seconds_per_packet = 0 35 | self._last_sound_time = 0 36 | self._noise_floor = 0 37 | 38 | @property 39 | def silence_threshold(self) -> float: 40 | """ 41 | Get the silence threshold as a float between 0 and 1. 42 | 43 | Returns: 44 | float: The current silence threshold. 45 | """ 46 | return self._silence_threshold 47 | 48 | @silence_threshold.setter 49 | def silence_threshold(self, value: float) -> None: 50 | """ 51 | Set the silence threshold to a float between 0 and 1. 0.02 is the default, however you may adjust this value to be more or less sensitive to sound. 52 | 53 | Args: 54 | value (float): The new silence threshold, between 0 and 1. 55 | """ 56 | self._silence_threshold = value 57 | 58 | @property 59 | def bit_depth(self) -> int: 60 | """ 61 | Get the bit depth (number of bits per audio sample), either 8 or 16. 62 | 63 | Returns: 64 | int: The current bit depth. 65 | """ 66 | return self._bit_depth 67 | 68 | @bit_depth.setter 69 | def bit_depth(self, value: int) -> None: 70 | """ 71 | Set the bit depth (number of bits per audio sample) to either 8 or 16. The default is 16. 72 | 73 | Args: 74 | value (int): The new bit depth. Must be 8 or 16. 75 | 76 | Raises: 77 | ValueError: If the bit depth is not 8 or 16. 78 | """ 79 | if value not in [8, 16]: 80 | raise ValueError("Bit depth must be 8 or 16") 81 | self._bit_depth = value 82 | 83 | @property 84 | def sample_rate(self) -> int: 85 | """ 86 | Get the sample rate (number of audio samples per second), either 8000 or 16000. 87 | 88 | Returns: 89 | int: The current sample rate. 90 | """ 91 | return self._sample_rate 92 | 93 | @sample_rate.setter 94 | def sample_rate(self, value: int) -> None: 95 | """ 96 | Set the sample rate (number of audio samples per second) to either 8000 or 16000. The default is 8000. 97 | 98 | Args: 99 | value (int): The new sample rate. Must be 8000 or 16000. 100 | 101 | Raises: 102 | ValueError: If the sample rate is not 8000 or 16000. 103 | """ 104 | if value not in [8000, 16000]: 105 | raise ValueError("Sample rate must be 8000 or 16000") 106 | self._sample_rate = value 107 | 108 | async def record_audio(self, silence_cutoff_length_in_seconds: Optional[int] = 3, max_length_in_seconds: int = 30) -> np.ndarray: 109 | """ 110 | Record audio from the microphone. 111 | 112 | Args: 113 | silence_cutoff_length_in_seconds (int): The length of silence to allow before stopping the recording. Defaults to 3 seconds, however you can set to None to disable silence detection. 114 | max_length_in_seconds (int): The maximum length of the recording. Defaults to 30 seconds. 115 | 116 | Returns: 117 | np.ndarray: The recorded audio data. 118 | """ 119 | await self.frame.run_lua("frame.microphone.stop()", checked=False) 120 | 121 | self._audio_buffer = np.array([], dtype=np.int8 if self.bit_depth == 8 else np.int16) 122 | self.frame.bluetooth.register_data_response_handler(FrameDataTypePrefixes.MIC_DATA, self._audio_buffer_handler) 123 | self._audio_finished_event.clear() 124 | 125 | bytes_per_second = self.sample_rate * (self.bit_depth // 8) 126 | seconds_per_byte = 1 / bytes_per_second 127 | self._seconds_per_packet = seconds_per_byte * (self.frame.bluetooth.max_data_payload() - 1) 128 | self._silence_cutoff_length_in_seconds = silence_cutoff_length_in_seconds 129 | self._last_sound_time = time.time() 130 | 131 | if self.frame.bluetooth.print_debugging: 132 | print(f"Starting audio recording at {self.sample_rate} Hz, {self.bit_depth}-bit") 133 | await self.frame.bluetooth.send_lua(f"microphoneRecordAndSend({self.sample_rate},{self.bit_depth},nil)") 134 | try: 135 | await asyncio.wait_for(self._audio_finished_event.wait(), timeout=max_length_in_seconds) 136 | await self.frame.bluetooth.send_break_signal() 137 | # Trim the final _silence_cutoff_length_in_seconds seconds 138 | trim_length = (self._silence_cutoff_length_in_seconds - 0.5) * self._sample_rate 139 | if len(self._audio_buffer) > trim_length: 140 | self._audio_buffer = self._audio_buffer[:-int(trim_length)] 141 | except asyncio.TimeoutError: 142 | await self.frame.bluetooth.send_break_signal() 143 | if self.frame.bluetooth.print_debugging: 144 | print(f"\nAudio recording finished with {len(self._audio_buffer)/self._sample_rate:1.1f} seconds of audio") 145 | self.frame.bluetooth.register_data_response_handler(FrameDataTypePrefixes.MIC_DATA, None) 146 | await self.frame.run_lua("frame.microphone.stop()") 147 | 148 | return self._audio_buffer 149 | 150 | async def save_audio_file(self, filename: str, silence_cutoff_length_in_seconds: int = 3, max_length_in_seconds: int = 30) -> float: 151 | """ 152 | Save the recorded audio to a file. Regardless of any filename extension, the file will be saved as a PCM wav file. 153 | 154 | Args: 155 | filename (str): The name of the file to save the audio to. 156 | silence_cutoff_length_in_seconds (int): The length of silence to detect before stopping the recording automatically. 157 | max_length_in_seconds (int): The maximum length of the recording. 158 | 159 | Returns: 160 | float: The length of the recorded audio in seconds. 161 | """ 162 | audio_data = await self.record_audio(silence_cutoff_length_in_seconds, max_length_in_seconds) 163 | 164 | if len(audio_data) == 0: 165 | raise ValueError("No audio data recorded") 166 | 167 | # Normalize the 8 or 16 bit data range to (-1, 1) for playback 168 | if self.bit_depth == 8: 169 | audio_data = np.int16(audio_data) 170 | 171 | # based on the max and min values, normalize the data to be within the range of int16.min and int16.max. 172 | real_range = int(np.max(audio_data)) - int(np.min(audio_data)) 173 | ideal_range = int(np.iinfo(np.int16).max) - int(np.iinfo(np.int16).min) 174 | scale_factor = np.min([ideal_range / real_range, int(np.iinfo(np.int16).max) / np.max(audio_data), int(np.iinfo(np.int16).min) / np.min(audio_data)]) 175 | audio_data = (audio_data * scale_factor).astype(np.int16) 176 | 177 | with wave.open(filename,"wb") as f: 178 | f.setnchannels(1) 179 | f.setsampwidth(16 // 8) 180 | f.setframerate(self.sample_rate) 181 | f.writeframes(audio_data.tobytes()) 182 | 183 | length_in_seconds = len(audio_data) / self.sample_rate 184 | return length_in_seconds 185 | 186 | def _audio_buffer_handler(self, data: bytes) -> None: 187 | """ 188 | Handle incoming audio data and update the audio buffer. 189 | 190 | Args: 191 | data (bytes): The incoming audio data. 192 | """ 193 | if self._audio_buffer is None: 194 | return 195 | 196 | audio_data = self._convert_bytes_to_audio_data(data, self.bit_depth) 197 | self._audio_buffer = np.concatenate((self._audio_buffer, audio_data)) 198 | 199 | if self._silence_cutoff_length_in_seconds is not None: 200 | min_amplitude = int(np.min(audio_data)) 201 | max_amplitude = int(np.max(audio_data)) 202 | delta = max_amplitude - min_amplitude 203 | 204 | if self._bit_depth == 8: 205 | delta = float(delta / np.iinfo(np.int8).max) 206 | elif self._bit_depth == 16: 207 | delta = float(delta / np.iinfo(np.int16).max) 208 | 209 | self._noise_floor = self._noise_floor + (delta - self._noise_floor) * 0.1 210 | 211 | if delta - self._noise_floor > self._silence_threshold: 212 | self._last_sound_time = time.time() 213 | if self.frame.bluetooth.print_debugging: 214 | print("+", end="", flush=True) 215 | else: 216 | if time.time() - self._last_sound_time > self._silence_cutoff_length_in_seconds: 217 | self._audio_finished_event.set() 218 | elif self.frame.bluetooth.print_debugging: 219 | print("-", end="", flush=True) 220 | 221 | def _convert_bytes_to_audio_data(self, audio_buffer: bytes, bit_depth: int) -> np.ndarray: 222 | """ 223 | Convert raw audio bytes to a NumPy array. 224 | 225 | Args: 226 | audio_buffer (bytes): The raw audio data. 227 | bit_depth (int): The bit depth of the audio data. 228 | 229 | Returns: 230 | np.ndarray: The converted audio data. 231 | """ 232 | if bit_depth == 16: 233 | audio_data = np.frombuffer(bytearray(audio_buffer), dtype=np.int16) 234 | elif bit_depth == 8: 235 | audio_data = np.frombuffer(bytearray(audio_buffer), dtype=np.int8) 236 | else: 237 | raise ValueError("Unsupported bit depth") 238 | 239 | return audio_data 240 | 241 | def play_audio_background(self, audio_data: np.ndarray, sample_rate: Optional[int] = None, bit_depth: Optional[int] = None) -> simpleaudio.PlayObject: 242 | """ 243 | Play audio data in the background. 244 | 245 | Args: 246 | audio_data (np.ndarray): The audio data to play. 247 | sample_rate (Optional[int]): The sample rate of the audio data. Defaults to the instance's sample rate. 248 | bit_depth (Optional[int]): The bit depth of the audio data. Defaults to the instance's bit depth. 249 | 250 | Returns: 251 | simpleaudio.PlayObject: The play object for the audio. 252 | """ 253 | if sample_rate is None: 254 | sample_rate = self.sample_rate 255 | if bit_depth is None: 256 | bit_depth = self.bit_depth 257 | 258 | if bit_depth == 8: 259 | # Normalize to 16-bit range 260 | audio_data = audio_data.astype(np.int16) 261 | np.multiply(audio_data, 32767 / np.max(np.abs(audio_data)), out=audio_data, casting='unsafe') 262 | else: 263 | # Normalize to 16-bit range 264 | np.multiply(audio_data, 32767 / np.max(np.abs(audio_data)), out=audio_data, casting='unsafe') 265 | audio_data = audio_data.astype(np.int16) 266 | return simpleaudio.play_buffer(audio_data, num_channels=1, bytes_per_sample=2, sample_rate=sample_rate) 267 | 268 | def play_audio(self, audio_data: np.ndarray, sample_rate: Optional[int] = None, bit_depth: Optional[int] = None) -> None: 269 | """ 270 | Play audio data and wait for it to finish. 271 | 272 | Args: 273 | audio_data (np.ndarray): The audio data to play. 274 | sample_rate (Optional[int]): The sample rate of the audio data. Defaults to the instance's sample rate. 275 | bit_depth (Optional[int]): The bit depth of the audio data. Defaults to the instance's bit depth. 276 | """ 277 | player = self.play_audio_background(audio_data, sample_rate, bit_depth) 278 | player.wait_done() 279 | 280 | async def play_audio_async(self, audio_data: np.ndarray, sample_rate: Optional[int] = None, bit_depth: Optional[int] = None) -> None: 281 | """ 282 | Play audio data asynchronously. 283 | 284 | Args: 285 | audio_data (np.ndarray): The audio data to play. 286 | sample_rate (Optional[int]): The sample rate of the audio data. Defaults to the instance's sample rate. 287 | bit_depth (Optional[int]): The bit depth of the audio data. Defaults to the instance's bit depth. 288 | """ 289 | player = self.play_audio_background(audio_data, sample_rate, bit_depth) 290 | while player.is_playing(): 291 | await asyncio.sleep(0.1) -------------------------------------------------------------------------------- /src/frame_sdk/motion.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import math 3 | from typing import Awaitable, Callable, Optional, TYPE_CHECKING, Tuple 4 | import asyncio 5 | 6 | from .bluetooth import FrameDataTypePrefixes 7 | 8 | if TYPE_CHECKING: 9 | from .frame import Frame 10 | 11 | class Direction: 12 | """Represents a direction in 3D space.""" 13 | roll: float 14 | """ 15 | The roll angle of the Frame in degrees. 16 | Range: -180.0 to 180.0 17 | Examples: 0.0 (level) 18 | 10.0 (right side slightly up, head tilted to the left) 19 | 90.0 (right side up, laying down on your left side) 20 | -10.0 (left side slightly up, head tilted to the right) 21 | -90.0 (left side up, laying down on your right side) 22 | """ 23 | 24 | pitch: float 25 | """ 26 | The pitch angle of the Frame in degrees. 27 | Range: -180.0 to 180.0 28 | Example: 0.0 (level) 29 | 20.0 (looking slightly downcast) 30 | -20.0 (looking slightly upwards) 31 | 90.0 (nose straight up at the ceiling) 32 | -90.0 (nose straight down towards the floor) 33 | 110.0 (tilted backwards over the top) 34 | -110.0 (tilted backwards underneath) 35 | """ 36 | 37 | heading: float 38 | """ 39 | TODO: NOT YET IMPLEMENTED IN THE FIRMWARE 40 | The heading angle of the Frame in degrees. 41 | Range: 0.0 to 360.0 42 | Example: 0.0 (North), 90.0 (East), 180.0 (South), 270.0 (West) 43 | """ 44 | 45 | def __init__(self, roll: float = 0.0, pitch: float = 0.0, heading: float = 0.0): 46 | """ 47 | Initialize the Direction with roll, pitch, and heading values. 48 | 49 | Args: 50 | roll (float): The roll angle of the Frame in degrees. 51 | pitch (float): The pitch angle of the Frame in degrees. 52 | heading (float): The heading angle of the Frame in degrees. 53 | """ 54 | self.roll = roll 55 | self.pitch = pitch 56 | self.heading = heading 57 | 58 | def __str__(self) -> str: 59 | """ 60 | Return a string representation of the Direction. 61 | 62 | Returns: 63 | str: A string representation of the Direction. 64 | """ 65 | return f"Direction(roll={self.roll}, pitch={self.pitch}, heading={self.heading})" 66 | 67 | def __repr__(self) -> str: 68 | """ 69 | Return a detailed string representation of the Direction. 70 | 71 | Returns: 72 | str: A detailed string representation of the Direction. 73 | """ 74 | return f"Direction(roll={self.roll}, pitch={self.pitch}, heading={self.heading})" 75 | 76 | def __add__(self, other: Direction) -> Direction: 77 | """ 78 | Add two Direction objects. 79 | 80 | Args: 81 | other (Direction): The other Direction object to add. 82 | 83 | Returns: 84 | Direction: A new Direction object representing the sum of the two directions. 85 | """ 86 | new_roll = self.roll + other.roll 87 | new_pitch = self.pitch + other.pitch 88 | new_heading = (self.heading + other.heading) % 360 89 | 90 | # Clamp roll to be within -180 to 180 degrees 91 | if new_roll > 180: 92 | new_roll -= 360 93 | elif new_roll < -180: 94 | new_roll += 360 95 | 96 | # Clamp pitch to be within -180 to 180 degrees 97 | if new_pitch > 180: 98 | new_pitch -= 360 99 | elif new_pitch < -180: 100 | new_pitch += 360 101 | 102 | return Direction( 103 | roll=new_roll, 104 | pitch=new_pitch, 105 | heading=new_heading 106 | ) 107 | 108 | def __sub__(self, other: Direction) -> Direction: 109 | """ 110 | Subtract one Direction object from another. 111 | 112 | Args: 113 | other (Direction): The other Direction object to subtract. 114 | 115 | Returns: 116 | Direction: A new Direction object representing the difference between the two directions. 117 | """ 118 | new_roll = self.roll - other.roll 119 | new_pitch = self.pitch - other.pitch 120 | new_heading = (self.heading - other.heading) % 360 121 | 122 | # Clamp roll to be within -180 to 180 degrees 123 | if new_roll > 180: 124 | new_roll -= 360 125 | elif new_roll < -180: 126 | new_roll += 360 127 | 128 | # Clamp pitch to be within -180 to 180 degrees 129 | if new_pitch > 180: 130 | new_pitch -= 360 131 | elif new_pitch < -180: 132 | new_pitch += 360 133 | 134 | return Direction( 135 | roll=new_roll, 136 | pitch=new_pitch, 137 | heading=new_heading 138 | ) 139 | 140 | def amplitude(self) -> float: 141 | """ 142 | Calculate the amplitude of the Direction vector. 143 | 144 | Returns: 145 | float: The amplitude of the Direction vector. 146 | """ 147 | return (self.roll**2 + self.pitch**2 + self.heading**2)**0.5 148 | 149 | class Motion: 150 | """Handle motion on the Frame IMU.""" 151 | 152 | frame: "Frame" = None 153 | 154 | def __init__(self, frame: "Frame"): 155 | """ 156 | Initialize the Motion class with a Frame instance. 157 | 158 | Args: 159 | frame (Frame): The Frame instance to associate with the Motion class. 160 | """ 161 | self.frame = frame 162 | 163 | async def get_direction(self) -> Direction: 164 | """Gets the orientation of the Frame. Note that the `heading` is not yet implemented""" 165 | result = await self.frame.run_lua("local dir = frame.imu.direction();print(dir['roll']..','..dir['pitch']..','..dir['heading'])", await_print=True) 166 | result = result.split(",") 167 | direction = Direction(roll=float(result[0]), pitch=float(result[1]), heading=float(result[2])) 168 | 169 | return direction 170 | 171 | 172 | async def run_on_tap(self, lua_script: Optional[str] = None, callback: Optional[Callable[[], None]] = None) -> None: 173 | """Run a callback when the Frame is tapped. Can include lua code to be run on Frame upon tap and/or a python callback to be run locally upon tap.""" 174 | 175 | if callback is not None: 176 | self.frame.bluetooth.register_data_response_handler(FrameDataTypePrefixes.TAP, callback) 177 | else: 178 | self.frame.bluetooth.register_data_response_handler(FrameDataTypePrefixes.TAP, None) 179 | 180 | if lua_script is not None and callback is not None: 181 | await self.frame.run_lua("function on_tap();frame.bluetooth.send('\\x"+FrameDataTypePrefixes.TAP.value_as_hex+"');"+lua_script+";end;frame.imu.tap_callback(on_tap)", checked=True) 182 | elif lua_script is None and callback is not None: 183 | await self.frame.run_lua("function on_tap();frame.bluetooth.send('\\x"+FrameDataTypePrefixes.TAP.value_as_hex+"');end;frame.imu.tap_callback(on_tap)", checked=True) 184 | elif lua_script is not None and callback is None: 185 | await self.frame.run_lua("function on_tap();"+lua_script+";end;frame.imu.tap_callback(on_tap)", checked=True) 186 | else: 187 | await self.frame.run_lua("frame.imu.tap_callback(nil)", checked=False) 188 | 189 | async def wait_for_tap(self) -> None: 190 | """Wait for the Frame to be tapped before continuing.""" 191 | self._waiting_on_tap = asyncio.Event() 192 | await self.run_on_tap(callback= lambda : self._waiting_on_tap.set()) 193 | await self._waiting_on_tap.wait() 194 | await self.run_on_tap(callback=None) -------------------------------------------------------------------------------- /tests/test_bluetooth.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import asyncio 3 | import sys 4 | 5 | from frame_sdk import Bluetooth, Frame 6 | 7 | class TestBluetooth(unittest.IsolatedAsyncioTestCase): 8 | async def test_connect_disconnect(self): 9 | b = Bluetooth() 10 | 11 | self.assertFalse(b.is_connected()) 12 | 13 | await b.connect() 14 | self.assertTrue(b.is_connected()) 15 | 16 | await b.disconnect() 17 | self.assertFalse(b.is_connected()) 18 | 19 | async def test_send_lua(self): 20 | async with Frame() as f: 21 | 22 | self.assertEqual(await f.bluetooth.send_lua("print('hi')", await_print=True), "hi") 23 | 24 | self.assertIsNone(await f.bluetooth.send_lua("print('hi')")) 25 | await asyncio.sleep(0.1) 26 | 27 | with self.assertRaises(Exception): 28 | await f.run_lua("a = 1", await_print=True, timeout=1) 29 | 30 | 31 | async def test_send_data(self): 32 | async with Frame() as f: 33 | await f.bluetooth.send_lua( 34 | "frame.bluetooth.receive_callback((function(d)frame.bluetooth.send(d)end))" 35 | ) 36 | 37 | self.assertEqual(await f.bluetooth.send_data(b"test", await_data=True), b"test") 38 | 39 | self.assertIsNone(await f.bluetooth.send_data(b"test")) 40 | await asyncio.sleep(0.1) 41 | 42 | await f.bluetooth.send_lua("frame.bluetooth.receive_callback(nil)") 43 | 44 | with self.assertRaises(Exception): 45 | await f.bluetooth.send_data(b"test", await_data=True) 46 | 47 | async def test_mtu(self): 48 | b = Bluetooth() 49 | await b.connect() 50 | 51 | max_lua_length = b.max_lua_payload() 52 | max_data_length = b.max_data_payload() 53 | 54 | self.assertEqual(max_lua_length, max_data_length + 1) 55 | 56 | with self.assertRaises(Exception): 57 | await b.send_lua("a" * max_lua_length + 1) 58 | 59 | with self.assertRaises(Exception): 60 | await b.send_data(bytearray(b"a" * max_data_length + 1)) 61 | 62 | await b.disconnect() 63 | 64 | 65 | if __name__ == "__main__": 66 | unittest.main() 67 | -------------------------------------------------------------------------------- /tests/test_camera.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import asyncio 4 | import sys 5 | import time 6 | 7 | from frame_sdk import Frame 8 | from frame_sdk.camera import AutofocusType, Quality 9 | 10 | class TestCamera(unittest.IsolatedAsyncioTestCase): 11 | async def test_get_photo(self): 12 | """ 13 | Test taking a photo 14 | """ 15 | async with Frame() as f: 16 | photo = await f.camera.take_photo() 17 | self.assertGreater(len(photo), 2000) 18 | 19 | async def test_save_photo_to_disk(self): 20 | """ 21 | Test saving a photo to disk 22 | """ 23 | async with Frame() as f: 24 | await f.camera.save_photo("test_photo.jpg") 25 | self.assertTrue(os.path.exists("test_photo.jpg")) 26 | self.assertGreater(os.path.getsize("test_photo.jpg"), 2000) 27 | os.remove("test_photo.jpg") 28 | 29 | async def test_photo_with_autofocus_options(self): 30 | """ 31 | Test taking a photo with various autofocus options 32 | """ 33 | async with Frame() as f: 34 | 35 | startTime = time.time() 36 | photo = await f.camera.take_photo(autofocus_seconds=None) 37 | endTime = time.time() 38 | self.assertGreater(len(photo), 2000) 39 | timeToTakePhotoWithoutAutoFocus = endTime - startTime 40 | 41 | startTime = time.time() 42 | photo = await f.camera.take_photo(autofocus_seconds=1, autofocus_type=AutofocusType.SPOT) 43 | endTime = time.time() 44 | self.assertGreater(len(photo), 2000) 45 | timeToTakePhotoWithAutoFocus1Sec = endTime - startTime 46 | 47 | self.assertGreater(timeToTakePhotoWithAutoFocus1Sec, timeToTakePhotoWithoutAutoFocus) 48 | 49 | startTime = time.time() 50 | photo = await f.camera.take_photo(autofocus_seconds=3, autofocus_type=AutofocusType.CENTER_WEIGHTED) 51 | endTime = time.time() 52 | self.assertGreater(len(photo), 2000) 53 | timeToTakePhotoWithAutoFocus3Sec = endTime - startTime 54 | 55 | self.assertGreater(timeToTakePhotoWithAutoFocus3Sec, timeToTakePhotoWithAutoFocus1Sec) 56 | 57 | async def test_photo_with_quality_options(self): 58 | """ 59 | Test taking a photo with various quality options 60 | """ 61 | async with Frame() as f: 62 | photo = await f.camera.take_photo(quality=Quality.VERY_LOW) 63 | very_low_quality_size = len(photo) 64 | self.assertGreater(very_low_quality_size, 2000) 65 | 66 | photo = await f.camera.take_photo(quality=Quality.LOW) 67 | low_quality_size = len(photo) 68 | self.assertGreater(low_quality_size, very_low_quality_size) 69 | 70 | photo = await f.camera.take_photo(quality=Quality.MEDIUM) 71 | medium_quality_size = len(photo) 72 | self.assertGreater(medium_quality_size, low_quality_size) 73 | 74 | photo = await f.camera.take_photo(quality=Quality.HIGH) 75 | high_quality_size = len(photo) 76 | self.assertGreater(high_quality_size, medium_quality_size) 77 | 78 | photo = await f.camera.take_photo(quality=Quality.VERY_HIGH) 79 | very_high_quality_size = len(photo) 80 | self.assertGreater(very_high_quality_size, high_quality_size) 81 | 82 | async def test_photo_with_resolution_options(self): 83 | """ 84 | Test taking a photo with various resolution options 85 | """ 86 | async with Frame() as f: 87 | photo = await f.camera.take_photo(resolution=100) 88 | res_100_size = len(photo) 89 | self.assertGreater(res_100_size, 1000) 90 | 91 | photo = await f.camera.take_photo(resolution=512) 92 | res_512_size = len(photo) 93 | self.assertGreater(res_512_size, res_100_size) 94 | 95 | photo = await f.camera.take_photo(resolution=720) 96 | res_720_size = len(photo) 97 | self.assertGreater(res_720_size, res_512_size) 98 | 99 | async def test_photo_with_pan_options(self): 100 | """ 101 | Test taking a photo with various pan options 102 | """ 103 | async with Frame() as f: 104 | photo = await f.camera.take_photo(pan=-140) 105 | pan_m140_size = len(photo) 106 | self.assertGreater(pan_m140_size, 2000) 107 | 108 | photo = await f.camera.take_photo(pan=140) 109 | pan_140_size = len(photo) 110 | self.assertGreater(pan_140_size, 2000) 111 | 112 | 113 | if __name__ == "__main__": 114 | unittest.main() 115 | -------------------------------------------------------------------------------- /tests/test_display.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import asyncio 3 | import sys 4 | import time 5 | 6 | from frame_sdk import Frame 7 | from frame_sdk.display import Alignment, PaletteColors 8 | 9 | class TestDisplay(unittest.IsolatedAsyncioTestCase): 10 | async def test_write_text(self): 11 | async with Frame() as f: 12 | await f.display.show_text("Lorem \"ipsum\" [dolor] 'sit' amet, consectetur\nadipiscing elit.") 13 | await f.display.show_text("Lorem \"ipsum\" [dolor] 'sit' amet, consectetur\nadipiscing elit.", align=Alignment.MIDDLE_CENTER) 14 | await f.display.show_text("Lorem \"ipsum\" [dolor] 'sit' amet, consectetur\nadipiscing elit.", color=PaletteColors.RED) 15 | await f.display.show_text("Lorem \"ipsum\" [dolor] 'sit' amet, consectetur\nadipiscing elit." * 100) 16 | await f.display.show_text("Done") 17 | await f.display.clear() 18 | 19 | async def test_word_wrap(self): 20 | async with Frame() as f: 21 | wrapped400 = f.display.wrap_text("Hi bob! " * 100, 400) 22 | wrapped800 = f.display.wrap_text("Hi bob! " * 100, 800) 23 | self.assertEqual(wrapped400.count("!"), wrapped800.count("!")) 24 | self.assertAlmostEqual(wrapped400.count("\n"), wrapped800.count("\n") * 2, delta=3) 25 | self.assertAlmostEqual(f.display.get_text_height(wrapped400), f.display.get_text_height(wrapped800) * 2, delta=150) 26 | 27 | f.display.char_spacing = 10 28 | wide_wrapped_400 = f.display.wrap_text("Hi bob! " * 100, 400) 29 | self.assertGreater(f.display.get_text_width(wide_wrapped_400), f.display.get_text_width(wrapped400)) 30 | self.assertGreater(f.display.get_text_height(wide_wrapped_400), f.display.get_text_height(wrapped400)) 31 | f.display.char_spacing = 4 32 | 33 | async def test_line_height(self): 34 | async with Frame() as f: 35 | self.assertEqual(f.display.line_height,f.display.get_text_height("hello world! 123Qgjp@")) 36 | heightOfTwoLines = f.display.get_text_height("hello\nworldj") 37 | f.display.line_height += 20 38 | self.assertEqual(heightOfTwoLines + 40, f.display.get_text_height("hello p\nworld j")) 39 | 40 | async def test_draw_rectangles(self): 41 | async with Frame() as f: 42 | await f.display.draw_rect(1,1,640,400,PaletteColors.RED) 43 | await f.display.draw_rect(300,300,10,10,PaletteColors.YELLOW) 44 | await f.display.draw_rect_filled(50,50,300,300,25,PaletteColors.SEABLUE,PaletteColors.DARKGREEN) 45 | await f.display.show() 46 | await f.display.clear() 47 | 48 | async def test_scroll_text(self): 49 | async with Frame() as f: 50 | start_time = time.time() 51 | await f.display.scroll_text("Lorem \"ipsum\" [dolor] 'sit' amet, consectetur adipiscing elit.\nNulla nec nunc euismod, consectetur nunc eu, aliquam nunc.\nNulla lorem nec nunc euismod, ipsum consectetur nunc eu, aliquam nunc.") 52 | end_time = time.time() 53 | elapsed_time_1 = end_time - start_time 54 | self.assertGreaterEqual(elapsed_time_1, 5) 55 | self.assertLess(elapsed_time_1, 20) 56 | 57 | start_time = time.time() 58 | await f.display.scroll_text("Lorem \"ipsum\" [dolor] 'sit' amet, consectetur adipiscing elit.\nNulla nec nunc euismod, consectetur nunc eu, aliquam nunc.\nNulla lorem nec nunc euismod, ipsum consectetur nunc eu, aliquam nunc.\n" * 3, color=PaletteColors.SKYBLUE) 59 | end_time = time.time() 60 | elapsed_time_2 = end_time - start_time 61 | self.assertAlmostEqual(elapsed_time_1*3, elapsed_time_2, delta=8) 62 | 63 | 64 | if __name__ == "__main__": 65 | unittest.main() 66 | -------------------------------------------------------------------------------- /tests/test_files.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import asyncio 3 | import sys 4 | 5 | from frame_sdk import Frame 6 | 7 | class TestFiles(unittest.IsolatedAsyncioTestCase): 8 | async def test_write_long_file(self): 9 | """ 10 | Test writing a long file to the device. 11 | """ 12 | async with Frame() as f: 13 | content = ("Testing:\n"+("test1... " * 200) + "\nTesting 2:\n" + ("test2\n" * 100)).encode() 14 | await f.files.write_file("test.txt", content, checked=True) 15 | actual_content = await f.files.read_file("test.txt") 16 | self.assertEqual(content.decode().strip(), actual_content.decode().strip()) 17 | actual_content = await f.files.read_file("test.txt") 18 | self.assertEqual(content.strip(), actual_content.strip()) 19 | await f.files.delete_file("test.txt") 20 | 21 | async def test_write_raw_file(self): 22 | """ 23 | Test writing a file with a full spectrum of data to the device. 24 | """ 25 | async with Frame() as f: 26 | content = bytearray(range(1,255)) 27 | await f.files.write_file("test.dat", content, checked=True) 28 | actual_content = await f.files.read_file("test.dat") 29 | self.assertEqual(content, actual_content) 30 | actual_content = await f.files.read_file("test.dat") 31 | self.assertEqual(content, actual_content) 32 | await f.files.write_file("test.dat", content, checked=True) 33 | actual_content = await f.files.read_file("test.dat") 34 | self.assertEqual(content, actual_content) 35 | actual_content = await f.files.read_file("test.dat") 36 | self.assertEqual(content, actual_content) 37 | await f.files.delete_file("test.dat") 38 | 39 | if __name__ == "__main__": 40 | unittest.main() 41 | -------------------------------------------------------------------------------- /tests/test_frame.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import asyncio 3 | import time 4 | 5 | from frame_sdk import Frame 6 | from frame_sdk.camera import Quality 7 | 8 | class TestFrame(unittest.IsolatedAsyncioTestCase): 9 | async def test_send_lua(self): 10 | async with Frame() as f: 11 | 12 | self.assertEqual(await f.run_lua("print('hi')", await_print=True), "hi") 13 | 14 | self.assertIsNone(await f.run_lua("print('hi')")) 15 | await asyncio.sleep(0.1) 16 | 17 | with self.assertRaises(Exception): 18 | await f.run_lua("a = 1", await_print=True, timeout=1) 19 | 20 | 21 | async def test_send_data(self): 22 | async with Frame() as f: 23 | await f.run_lua( 24 | "frame.bluetooth.receive_callback((function(d)frame.bluetooth.send(d)end))" 25 | ) 26 | 27 | self.assertEqual(await f.bluetooth.send_data(b"test", await_data=True), b"test") 28 | 29 | self.assertIsNone(await f.bluetooth.send_data(b"test")) 30 | await asyncio.sleep(0.1) 31 | 32 | await f.run_lua("frame.bluetooth.receive_callback(nil)") 33 | 34 | with self.assertRaises(Exception): 35 | await f.bluetooth.send_data(b"test", await_data=True) 36 | 37 | async def test_long_send(self): 38 | """ 39 | Test sending lua over the MTU limit to the device and ensure it still works. 40 | """ 41 | async with Frame() as f: 42 | a_count = 32 43 | script = "a = 0;" + " ".join(f"a = a + 1;" for _ in range(a_count)) + "print(a)" 44 | response = await f.send_long_lua(script, await_print=True) 45 | self.assertEqual(str(a_count), response) 46 | 47 | a_count = 250 48 | script = "a = 0;" + " ".join(f"a = a + 1;" for _ in range(a_count)) + "print(a)" 49 | response = await f.send_long_lua(script, await_print=True) 50 | self.assertEqual(str(a_count), response) 51 | 52 | async def test_long_receive(self): 53 | """ 54 | Test receiving lua over the MTU limit from the device and ensure it still works. 55 | """ 56 | async with Frame() as f: 57 | self.assertEqual(await f.run_lua("prntLng('hi')", await_print=True), "hi") 58 | msg = "hello world! " 59 | msg = msg + msg 60 | msg = msg + msg 61 | msg = msg + msg 62 | msg = msg + msg 63 | msg = msg + msg 64 | await f.run_lua("msg = \"hello world! \";msg = msg .. msg;msg = msg .. msg;msg = msg .. msg;msg = msg .. msg;msg = msg .. msg", await_print=False) 65 | self.assertEqual("about to send 416 characters.",(await f.run_lua("print('about to send '..tostring(string.len(msg))..' characters.')", await_print=True))) 66 | self.assertEqual(msg, await f.evaluate("msg")) 67 | 68 | async def test_long_send_and_receive(self): 69 | """ 70 | Test sending and receiving lua over the MTU limit to the device and ensure it still works. 71 | """ 72 | async with Frame() as f: 73 | a_count = 2 74 | message = "".join(f"and #{i}, " for i in range(a_count)) 75 | script = "message = \"\";" + "".join(f"message = message .. \"and #{i}, \"; " for i in range(a_count)) + "print(message)" 76 | response = await f.run_lua(script, await_print=True) 77 | self.assertEqual(message, response) 78 | 79 | a_count = 50 80 | message = "".join(f"and #{i}, " for i in range(a_count)) 81 | script = "message = \"\";" + "".join(f"message = message .. \"and #{i}, \"; " for i in range(a_count)) + "print(message)" 82 | response = await f.run_lua(script, await_print=True) 83 | self.assertEqual(message, response) 84 | 85 | async def test_battery(self): 86 | async with Frame() as f: 87 | self.assertGreater(await f.get_battery_level(), 0) 88 | self.assertLessEqual(await f.get_battery_level(), 100) 89 | self.assertAlmostEqual(await f.get_battery_level(), int(float(await f.evaluate("frame.battery_level()"))), delta=15) 90 | 91 | async def test_sleep(self): 92 | async with Frame() as f: 93 | await f.run_lua("test_var = 1") 94 | self.assertAlmostEqual(int(float(await f.evaluate("frame.time.utc()"))), int(time.time()), delta=5) 95 | await f.sleep() 96 | self.assertEqual(await f.evaluate("test_var"), '1') 97 | self.assertFalse(f.camera.is_awake) 98 | 99 | if __name__ == "__main__": 100 | unittest.main() -------------------------------------------------------------------------------- /tests/test_microphone.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import asyncio 4 | import sys 5 | import time 6 | import numpy as np 7 | 8 | from frame_sdk import Frame 9 | 10 | class TestMicrophone(unittest.IsolatedAsyncioTestCase): 11 | async def test_basic_audio_recording(self): 12 | async with Frame() as f: 13 | f.microphone.sample_rate = 8000 14 | f.microphone.bit_depth = 16 15 | audio_data = await f.microphone.record_audio(None, 5) 16 | self.assertAlmostEqual(len(audio_data), 5 * 8000, delta=4000) 17 | self.assertLessEqual(np.max(audio_data), np.iinfo(np.int16).max) 18 | self.assertGreaterEqual(np.min(audio_data), np.iinfo(np.int16).min) 19 | self.assertGreater(abs(int(np.max(audio_data)) - int(np.min(audio_data))), 50) 20 | 21 | f.microphone.sample_rate = 16000 22 | f.microphone.bit_depth = 8 23 | audio_data = await f.microphone.record_audio(None, 5) 24 | self.assertAlmostEqual(len(audio_data), 5 * 16000, delta=4000) 25 | self.assertLessEqual(np.max(audio_data), np.iinfo(np.int8).max) 26 | self.assertGreaterEqual(np.min(audio_data), np.iinfo(np.int8).min) 27 | self.assertGreater(abs(int(np.max(audio_data)) - int(np.min(audio_data))), 5) 28 | 29 | async def test_end_on_silence(self): 30 | async with Frame() as f: 31 | f.microphone.sample_rate = 8000 32 | f.microphone.bit_depth = 16 33 | await f.display.show_text("Testing microphone, please be silent!") 34 | audio_data = await f.microphone.record_audio(2, 20) 35 | await f.display.clear() 36 | self.assertLess(len(audio_data), 5 * 8000) 37 | 38 | async def test_save_audio(self): 39 | async with Frame() as f: 40 | f.microphone.sample_rate = 8000 41 | f.microphone.bit_depth = 16 42 | await f.display.show_text("Testing microphone, please be silent!") 43 | length = await f.microphone.save_audio_file("test.wav",2,20) 44 | await f.display.clear() 45 | self.assertLess(length, 5) 46 | self.assertTrue(os.path.exists("test.wav")) 47 | self.assertGreater(os.path.getsize("test.wav"), 500) 48 | os.remove("test.wav") 49 | 50 | async def test_record_and_play_audio(self): 51 | async with Frame() as f: 52 | for sample_rate in [8000, 16000]: 53 | for bit_depth in [8, 16]: 54 | if sample_rate == 16000 and bit_depth == 16: 55 | continue 56 | f.microphone.sample_rate = sample_rate 57 | f.microphone.bit_depth = bit_depth 58 | data = await f.microphone.record_audio(None, 5) 59 | start_time = time.time() 60 | f.microphone.play_audio(data) 61 | end_time = time.time() 62 | self.assertAlmostEqual(end_time - start_time, 5, delta=0.5) 63 | self.assertAlmostEqual(end_time - start_time, len(data) / f.microphone.sample_rate, delta=0.2) 64 | start_time = time.time() 65 | await f.microphone.play_audio_async(data) 66 | end_time = time.time() 67 | self.assertAlmostEqual(end_time - start_time, len(data) / f.microphone.sample_rate, delta=0.2) 68 | start_time = time.time() 69 | f.microphone.play_audio_background(data) 70 | end_time = time.time() 71 | self.assertAlmostEqual(end_time - start_time, 0, delta=0.1) 72 | 73 | 74 | 75 | if __name__ == "__main__": 76 | unittest.main() 77 | -------------------------------------------------------------------------------- /tests/test_motion.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import asyncio 4 | import sys 5 | import time 6 | import numpy as np 7 | 8 | from frame_sdk import Frame 9 | from frame_sdk.motion import Direction 10 | 11 | class TestMotion(unittest.IsolatedAsyncioTestCase): 12 | async def test_get_direction(self): 13 | async with Frame() as f: 14 | await f.display.show_text("Testing motion, don't move the Frame!") 15 | direction1 = await f.motion.get_direction() 16 | self.assertIsInstance(direction1, Direction) 17 | self.assertGreaterEqual(direction1.pitch, -180) 18 | self.assertLessEqual(direction1.pitch, 180) 19 | self.assertGreaterEqual(direction1.roll, -180) 20 | self.assertLessEqual(direction1.roll, 180) 21 | self.assertGreaterEqual(direction1.heading, 0) 22 | self.assertLessEqual(direction1.heading, 360) 23 | await asyncio.sleep(1) 24 | direction2 = await f.motion.get_direction() 25 | await f.display.clear() 26 | self.assertIsInstance(direction2, Direction) 27 | diff = direction2 - direction1 28 | self.assertAlmostEqual(diff.amplitude(), 0, delta=5) 29 | self.assertAlmostEqual(direction1.pitch, direction2.pitch, delta=5) 30 | self.assertAlmostEqual(direction1.roll, direction2.roll, delta=5) 31 | self.assertAlmostEqual( 32 | direction1.heading, direction2.heading, delta=5) 33 | 34 | async def test_register_tap_handler(self): 35 | async with Frame() as f: 36 | # no good way to actually test these being called, but let's at least make sure they don't throw errors 37 | await f.display.show_text("Testing tap, tap the Frame!") 38 | await f.motion.run_on_tap(callback=lambda: print("Tapped again!")) 39 | await f.motion.run_on_tap(lua_script="print('tap1')", callback=lambda: print("tap2")) 40 | await f.motion.run_on_tap(None, None) 41 | await asyncio.sleep(1) 42 | await f.motion.run_on_tap(lua_script="frame.display.text('tapped!',1,1);frame.display.show()") 43 | 44 | 45 | 46 | if __name__ == "__main__": 47 | unittest.main() 48 | --------------------------------------------------------------------------------