├── __init__.py ├── stt ├── __init__.py └── simple_stt.py ├── object_watcher ├── __init__.py └── object_watcher.py ├── simple_audio_recorder ├── __init__.py └── simple_audio_recorder.py ├── .gitignore ├── requirements.txt ├── setup.py ├── main.py ├── README.md └── LICENSE /__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /stt/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /object_watcher/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /simple_audio_recorder/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | **.pyc 3 | *egg-info* 4 | **.wav 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pyaudio 2 | opencv-python 3 | SpeechRecognition 4 | websocket-client 5 | dlib -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup( 4 | name='mycroft_face_wake', 5 | version='', 6 | packages=['', 'object_watcher', 'simple_audio_recorder', 'stt'], 7 | url='', 8 | license='', 9 | author='Christopher Rogers', 10 | author_email='', 11 | description='' 12 | ) 13 | -------------------------------------------------------------------------------- /stt/simple_stt.py: -------------------------------------------------------------------------------- 1 | from speech_recognition import Recognizer, AudioFile 2 | 3 | 4 | class SimpleSTT(object): 5 | 6 | def __init__(self): 7 | self.recognizer = Recognizer() 8 | 9 | def transcribe(self, path_to_source): 10 | with AudioFile(path_to_source) as source: 11 | audio = self.recognizer.listen(source) 12 | return self.recognizer.recognize_google(audio) 13 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | from object_watcher.object_watcher import ObjectWatcher 3 | from object_watcher.object_watcher import create_dlib_frontal_face_detector 4 | from simple_audio_recorder.simple_audio_recorder import SimpleAudioRecorder 5 | from stt.simple_stt import SimpleSTT 6 | from websocket import create_connection 7 | import json 8 | import time 9 | import traceback 10 | 11 | transcriber = SimpleSTT() 12 | 13 | URL_TEMPLATE = "{scheme}://{host}:{port}{path}" 14 | 15 | 16 | def send_message(message, host="localhost", port=8181, path="/core", scheme="ws"): 17 | payload = json.dumps({ 18 | "type": "recognizer_loop:utterance", 19 | "context": "", 20 | "data": { 21 | "utterances": [message] 22 | } 23 | }) 24 | url = URL_TEMPLATE.format(scheme=scheme, host=host, port=str(port), path=path) 25 | ws = create_connection(url) 26 | ws.send(payload) 27 | ws.close() 28 | 29 | 30 | def transcribe_and_send(path_to_source): 31 | try: 32 | text = transcriber.transcribe(path_to_source) 33 | print(text) 34 | send_message(text) 35 | except Exception: 36 | traceback.print_exc() 37 | 38 | if __name__ == "__main__": 39 | recorder = SimpleAudioRecorder() 40 | 41 | with SimpleAudioRecorder() as recorder: 42 | fw = ObjectWatcher(detector=create_dlib_frontal_face_detector()) 43 | fw.register_object_entered_callback(recorder.start_recording, "output.wav") 44 | fw.register_object_entered_callback(print, "recording!") 45 | 46 | fw.register_object_left_callback(recorder.stop_recording) 47 | fw.register_object_left_callback(print, "done recording!") 48 | fw.register_object_left_callback(transcribe_and_send, "output.wav") 49 | 50 | fw.start() 51 | 52 | try: 53 | while True: 54 | time.sleep(2) 55 | except KeyboardInterrupt: 56 | fw.terminate() 57 | fw.join() 58 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # mycroft-face-wake 2 | 3 | Trigger Mycroft without the use of a wake-word. 4 | 5 | ## Simple Demonstration 6 | Short demo: https://www.youtube.com/watch?v=H9P5TRo8nQs&feature=youtu.be 7 | 8 | Longer explanation and demo: https://www.youtube.com/watch?v=ytKUTBfjnQI 9 | 10 | ## Description and Motivation 11 | 12 | This is a working prototype. It offers no configuration, must be run on the same host 13 | as [mycroft-core](https://github.com/MycroftAI/mycroft-core), and all STT is handled 14 | by Google's API (via the [SpeechRecognition library](https://pypi.python.org/pypi/SpeechRecognition/)). 15 | It does not replace the wake-word functionality - they can both used at the same time. 16 | 17 | I feel that this may offer a more natural way to interact with a voice assistant. Wake words are great, 18 | but often when I talk to people, I just make eye-contact and begin talking. This emulates that type of 19 | interaction (though again, it does not replace the wake-word functionality). 20 | 21 | ~~This runs quite nicely on my desktop and laptop, but a Raspberry Pi is not quite powerful enough to handle 22 | it effectively (in my brief test, it took roughly 4 seconds to detect a face on the Pi, where it happens 23 | virtually instantanesouly on a desktop/laptop). Note that there is also some extra work to get this running 24 | on a Pi - it will not work as-is. I may add support at a later date, however, given the performance, it 25 | is not a priority.~~ 26 | 27 | **Update:** This can work on a pi! See below. 28 | 29 | ## Usage (Tested on Ubuntu 17.10 - see below for Raspbery Pi Support) 30 | 31 | 1) `git clone https://github.com/ChristopherRogers1991/mycroft-face-wake.git` 32 | 2) `cd mycroft-face-wake && pip install -r requirements.txt && pip install .` 33 | 3) `python main.py` 34 | 35 | 36 | ## Raspberry Pi Support 37 | 38 | Demo Video: https://youtu.be/RhQqz_Yy9Fk 39 | 40 | This assumes you are running Raspbian on a Raspberry Pi 3. 41 | 42 | 1) `sudo apt-get install python-opencv cmake` 43 | 2) `virtualenv -p python2 --system-site-packages ~/.virtualenvs/mycroft-face-wake` 44 | 3) `source ~/.virtualenvs/mycroft-face-wake/bin/activate` 45 | 4) `pip install dlib` 46 | 5) `git clone https://github.com/ChristopherRogers1991/mycroft-face-wake.git` 47 | 6) `cd mycroft-face-wake && pip install -r requirements.txt && pip install .` 48 | 7) `python main.py` 49 | 50 | Note that this is using the python-opencv package from the repos, which only seems to work with 51 | python2. If you're ambitious, you could probabaly compile it yourself, and get it to work with 52 | python3. 53 | -------------------------------------------------------------------------------- /simple_audio_recorder/simple_audio_recorder.py: -------------------------------------------------------------------------------- 1 | import pyaudio 2 | import wave 3 | from threading import Thread 4 | import logging 5 | 6 | 7 | log = logging.getLogger(__name__) 8 | 9 | class SimpleAudioRecorder(object): 10 | 11 | def __init__(self, format=pyaudio.paInt16, channels=1, rate=44100, 12 | frames_per_buffer=1024): 13 | """ 14 | All parameters are passed straight to an instance of pyaudio.PyAudio 15 | 16 | Parameters 17 | ---------- 18 | format : int 19 | channels : int 20 | rate : int 21 | frames_per_buffer : int 22 | """ 23 | self.format = format 24 | self.channels = channels 25 | self.rate = rate 26 | self.frames_per_buffer = frames_per_buffer 27 | 28 | self.audio = pyaudio.PyAudio() 29 | self.recording = False 30 | self.thread = None 31 | self._stop_recording = False 32 | 33 | def __enter__(self): 34 | return self 35 | 36 | def __exit__(self, exc_type, exc_val, exc_tb): 37 | self.terminate() 38 | 39 | def start_recording(self, destination): 40 | """ 41 | Call this to begin recording audio. Audio recording will be done in a 42 | separate thread. Call `stop_recording` to stop the recording, and write 43 | the resulting audio to `destination`. 44 | 45 | Calling multiple times before calling `stop_recording` will have no 46 | effect (other than a logged warning). 47 | 48 | Parameters 49 | ---------- 50 | destination : str 51 | Path to an output file. If it does not exist, it will be created. 52 | If is does exist, it will be overwritten. 53 | 54 | """ 55 | if self.recording: 56 | log.warning("Already recording!") 57 | return 58 | 59 | self.thread = Thread(target=self._record_until_stopped, 60 | args=[destination]) 61 | self.thread.start() 62 | 63 | def _record_until_stopped(self, destination): 64 | stream = self.audio.open(format=self.format, channels=self.channels, rate=self.rate, input=True, 65 | frames_per_buffer=self.frames_per_buffer) 66 | 67 | frames = [] 68 | while not self._stop_recording: 69 | data = stream.read(self.frames_per_buffer) 70 | frames.append(data) 71 | 72 | stream.stop_stream() 73 | stream.close() 74 | self._write_wave_file(frames, destination) 75 | 76 | def _write_wave_file(self, frames, destination): 77 | waveFile = wave.open(destination, 'wb') 78 | waveFile.setnchannels(self.channels) 79 | waveFile.setsampwidth(self.audio.get_sample_size(self.format)) 80 | waveFile.setframerate(self.rate) 81 | waveFile.writeframes(b''.join(frames)) 82 | waveFile.close() 83 | 84 | def stop_recording(self): 85 | self._stop_recording = True 86 | self.thread.join() 87 | self.recording = False 88 | self._stop_recording = False 89 | 90 | def terminate(self): 91 | """ 92 | 93 | Wraps `self.audio.terminate`. See PyAudio docs 94 | for details. 95 | 96 | """ 97 | self.audio.terminate() 98 | 99 | 100 | if __name__ == "__main__": 101 | import time 102 | recorder = SimpleAudioRecorder() 103 | recorder.start_recording("./test.wav") 104 | time.sleep(5) 105 | recorder.stop_recording() 106 | -------------------------------------------------------------------------------- /object_watcher/object_watcher.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import cv2 4 | import time 5 | from threading import Thread 6 | 7 | try: 8 | from cv2.cv2 import CAP_PROP_FRAME_WIDTH as FRAME_WIDTH, CAP_PROP_FRAME_HEIGHT as FRAME_HEIGHT, CAP_PROP_FPS as FPS 9 | except ImportError: 10 | from cv2.cv import CV_CAP_PROP_FRAME_WIDTH as FRAME_WIDTH, CV_CAP_PROP_FRAME_HEIGHT as FRAME_HEIGHT,\ 11 | CV_CAP_PROP_FPS as FPS 12 | 13 | class ObjectWatcher(Thread): 14 | 15 | def __init__(self, device=0, delay=.2, detector=None): 16 | """ 17 | 18 | Parameters 19 | ---------- 20 | device : int 21 | The video device index, passed directly to cv2.VideoCapture 22 | delay : float 23 | To reduce false poitives/noise, the object must be present 24 | for at lease this many seconds before it will be registered. 25 | detector : callable 26 | This must take a single frame as it's only argument, and 27 | return a true or false value indicating whether the object 28 | is present. See `create_haar_cascade_detector` and 29 | `create_dlib_frontal_face_detector` for examples. 30 | """ 31 | super(ObjectWatcher, self).__init__(name="object_watcher") 32 | self._video_capture = cv2.VideoCapture(device) 33 | self._video_capture.set(FRAME_WIDTH, 320) 34 | self._video_capture.set(FRAME_HEIGHT, 240) 35 | self._video_capture.set(FPS, 5) 36 | self._delay = delay 37 | self._detector = detector 38 | 39 | self._call_backs = {"object_entered" : [], "object_left" : []} 40 | self._terminate = False 41 | self._time_of_first_face = None 42 | 43 | def terminate(self): 44 | """ 45 | Terminate the object detection thread. 46 | 47 | """ 48 | self._terminate = True 49 | 50 | def register_object_entered_callback(self, callable, *args, **kwargs): 51 | """ 52 | Callbacks are called in the order in which they were registered. 53 | 54 | Parameters 55 | ---------- 56 | callable : callable 57 | This will be called when the object is detected within the frame. 58 | args : list 59 | Passed to callable. 60 | kwargs : dict 61 | Passed to callable 62 | 63 | """ 64 | self._call_backs["object_entered"].append((callable, args, kwargs)) 65 | 66 | def register_object_left_callback(self, callable, *args, **kwargs): 67 | """ 68 | Callbacks are called in the order in which they were registered. 69 | 70 | Parameters 71 | ---------- 72 | callable : callable 73 | This will be called when the object leaves the frame. 74 | args : list 75 | Passed to callable. 76 | kwargs : dict 77 | Passed to callable 78 | 79 | """ 80 | self._call_backs["object_left"].append((callable, args, kwargs)) 81 | 82 | def device_is_ready(self, timeout=5): 83 | """ 84 | Currently unused. Likely to be removed. 85 | 86 | Parameters 87 | ---------- 88 | timeout : int 89 | 90 | Returns 91 | ------- 92 | boolean 93 | 94 | """ 95 | if timeout == 0: 96 | while not self._video_capture.isOpened(): 97 | time.sleep(1) 98 | elif timeout > 0: 99 | start = time.time() 100 | while start - time.time() < timeout and not \ 101 | self._video_capture.isOpened(): 102 | time.sleep(1) 103 | return self._video_capture.isOpened() 104 | 105 | def _object_present(self): 106 | """ 107 | 108 | Returns 109 | ------- 110 | boolean 111 | True if the object has been within frame for at 112 | self._delay seconds. 113 | 114 | """ 115 | ret, frame = self._video_capture.read() 116 | 117 | face_found = self._detector(frame) 118 | 119 | if not face_found: 120 | self._time_of_first_face = None 121 | return False 122 | 123 | now = time.time() 124 | 125 | if not self._time_of_first_face: 126 | self._time_of_first_face = now 127 | return False 128 | if now - self._time_of_first_face > self._delay: 129 | return True 130 | return False 131 | 132 | def _run_callbacks(self, callbacks): 133 | for callable, args, kwargs in self._call_backs[callbacks]: 134 | callable(*args, **kwargs) 135 | 136 | def run(self): 137 | """ 138 | Overridden from Thread. 139 | 140 | Do not call this directly! Call `.start()` to 141 | start the thread. 142 | 143 | """ 144 | while not self._terminate: 145 | if self._object_present(): 146 | self._run_callbacks("object_entered") 147 | while self._object_present() and not self._terminate: 148 | pass 149 | else: 150 | self._run_callbacks("object_left") 151 | else: 152 | self._video_capture.release() 153 | self._terminate = False 154 | 155 | 156 | def create_haar_cascade_detector(path_to_haarcascade): 157 | """ 158 | 159 | Parameters 160 | ---------- 161 | path_to_haarcascade : str 162 | The path to a haarcascade XML file. 163 | 164 | Returns 165 | ------- 166 | function 167 | A function that can be used as a detector. In an 168 | ObjectWatcher. It takes in a single frame, and 169 | returns a boolean, True if the object represented 170 | by the given haarcascade is within the frame. 171 | 172 | """ 173 | cascade = cv2.CascadeClassifier(path_to_haarcascade) 174 | def detector(frame): 175 | gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) 176 | objects = cascade.detectMultiScale( 177 | gray, 178 | scaleFactor=1.1, 179 | minNeighbors=5, 180 | minSize=(30, 30) 181 | ) 182 | 183 | return len(objects) > 0 184 | return detector 185 | 186 | 187 | def create_dlib_frontal_face_detector(): 188 | """ 189 | 190 | Returns 191 | ------- 192 | function 193 | A function that can be used as a detector in an 194 | ObjectWatcher. Returns true if a face is detected 195 | within the frame. 196 | 197 | """ 198 | import dlib 199 | frontal_face_detector = dlib.get_frontal_face_detector() 200 | def detector(frame): 201 | gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) 202 | return frontal_face_detector(gray, 0) 203 | return detector 204 | 205 | 206 | 207 | if __name__ == '__main__': 208 | fw = ObjectWatcher(detector=create_dlib_frontal_face_detector()) 209 | fw.register_object_entered_callback(print, "face entered!") 210 | fw.register_object_left_callback(print, "face left!") 211 | fw.register_object_left_callback(print) 212 | fw.start() 213 | time.sleep(10) 214 | fw.terminate() 215 | fw.join() 216 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------