├── .coveragerc ├── .gitignore ├── .readthedocs.yml ├── .style.yapf ├── .travis.yml ├── LICENSE ├── LICENSE.APACHE2 ├── LICENSE.MIT ├── MANIFEST.in ├── Makefile ├── README.rst ├── asyncari ├── __init__.py ├── client.py ├── model.py ├── state.py └── util.py ├── ci ├── rtd-requirements.txt ├── test-requirements.txt └── travis.sh ├── docs ├── Makefile ├── make.bat └── source │ ├── _static │ └── .gitkeep │ ├── conf.py │ ├── history.rst │ ├── index.rst │ └── intro.rst ├── examples ├── bell.py ├── cleanup_bridges.py ├── example.py ├── originate_example.py ├── playback.py └── zoomcall ├── newsfragments ├── .gitkeep └── README.rst ├── pyproject.toml └── tests ├── __init__.py ├── conftest.py └── test_example.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch=True 3 | source=asyncari 4 | 5 | [report] 6 | precision = 1 7 | exclude_lines = 8 | pragma: no cover 9 | abc.abstractmethod 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Add any project-specific files here: 2 | *.cfg 3 | 4 | # Sphinx docs 5 | docs/build/ 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *~ 11 | \#* 12 | .#* 13 | 14 | # C extensions 15 | *.so 16 | 17 | # Distribution / packaging 18 | .Python 19 | /build/ 20 | /develop-eggs/ 21 | /dist/ 22 | /eggs/ 23 | /lib/ 24 | /lib64/ 25 | /parts/ 26 | /sdist/ 27 | /var/ 28 | /debian/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | 33 | # Installer logs 34 | pip-log.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .coverage.* 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | 45 | # Translations 46 | *.mo 47 | 48 | # Mr Developer 49 | .mr.developer.cfg 50 | .project 51 | .pydevproject 52 | 53 | # Rope 54 | .ropeproject 55 | 56 | # Django stuff: 57 | *.log 58 | *.pot 59 | /.pybuild/ 60 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # https://docs.readthedocs.io/en/latest/yaml-config.html 2 | formats: 3 | - htmlzip 4 | - epub 5 | 6 | requirements_file: ci/rtd-requirements.txt 7 | 8 | python: 9 | version: 3 10 | pip_install: True 11 | -------------------------------------------------------------------------------- /.style.yapf: -------------------------------------------------------------------------------- 1 | [style] 2 | # Align closing bracket with visual indentation. 3 | align_closing_bracket_with_visual_indent=True 4 | 5 | # Allow dictionary keys to exist on multiple lines. For example: 6 | # 7 | # x = { 8 | # ('this is the first element of a tuple', 9 | # 'this is the second element of a tuple'): 10 | # value, 11 | # } 12 | allow_multiline_dictionary_keys=False 13 | 14 | # Allow lambdas to be formatted on more than one line. 15 | allow_multiline_lambdas=False 16 | 17 | # Insert a blank line before a class-level docstring. 18 | blank_line_before_class_docstring=False 19 | 20 | # Insert a blank line before a 'def' or 'class' immediately nested 21 | # within another 'def' or 'class'. For example: 22 | # 23 | # class Foo: 24 | # # <------ this blank line 25 | # def method(): 26 | # ... 27 | blank_line_before_nested_class_or_def=False 28 | 29 | # Do not split consecutive brackets. Only relevant when 30 | # dedent_closing_brackets is set. For example: 31 | # 32 | # call_func_that_takes_a_dict( 33 | # { 34 | # 'key1': 'value1', 35 | # 'key2': 'value2', 36 | # } 37 | # ) 38 | # 39 | # would reformat to: 40 | # 41 | # call_func_that_takes_a_dict({ 42 | # 'key1': 'value1', 43 | # 'key2': 'value2', 44 | # }) 45 | coalesce_brackets=False 46 | 47 | # The column limit. 48 | column_limit=99 49 | 50 | # Indent width used for line continuations. 51 | continuation_indent_width=4 52 | 53 | # Put closing brackets on a separate line, dedented, if the bracketed 54 | # expression can't fit in a single line. Applies to all kinds of brackets, 55 | # including function definitions and calls. For example: 56 | # 57 | # config = { 58 | # 'key1': 'value1', 59 | # 'key2': 'value2', 60 | # } # <--- this bracket is dedented and on a separate line 61 | # 62 | # time_series = self.remote_client.query_entity_counters( 63 | # entity='dev3246.region1', 64 | # key='dns.query_latency_tcp', 65 | # transform=Transformation.AVERAGE(window=timedelta(seconds=60)), 66 | # start_ts=now()-timedelta(days=3), 67 | # end_ts=now(), 68 | # ) # <--- this bracket is dedented and on a separate line 69 | dedent_closing_brackets=True 70 | 71 | # Place each dictionary entry onto its own line. 72 | each_dict_entry_on_separate_line=True 73 | 74 | # The regex for an i18n comment. The presence of this comment stops 75 | # reformatting of that line, because the comments are required to be 76 | # next to the string they translate. 77 | i18n_comment= 78 | 79 | # The i18n function call names. The presence of this function stops 80 | # reformattting on that line, because the string it has cannot be moved 81 | # away from the i18n comment. 82 | i18n_function_call= 83 | 84 | # Indent the dictionary value if it cannot fit on the same line as the 85 | # dictionary key. For example: 86 | # 87 | # config = { 88 | # 'key1': 89 | # 'value1', 90 | # 'key2': value1 + 91 | # value2, 92 | # } 93 | indent_dictionary_value=True 94 | 95 | # The number of columns to use for indentation. 96 | indent_width=4 97 | 98 | # Join short lines into one line. E.g., single line 'if' statements. 99 | join_multiple_lines=False 100 | 101 | # Use spaces around default or named assigns. 102 | spaces_around_default_or_named_assign=False 103 | 104 | # Use spaces around the power operator. 105 | spaces_around_power_operator=False 106 | 107 | # The number of spaces required before a trailing comment. 108 | spaces_before_comment=2 109 | 110 | # Insert a space between the ending comma and closing bracket of a list, 111 | # etc. 112 | space_between_ending_comma_and_closing_bracket=False 113 | 114 | # Split before arguments if the argument list is terminated by a 115 | # comma. 116 | split_arguments_when_comma_terminated=True 117 | 118 | # Set to True to prefer splitting before '&', '|' or '^' rather than 119 | # after. 120 | split_before_bitwise_operator=True 121 | 122 | # Split before a dictionary or set generator (comp_for). For example, note 123 | # the split before the 'for': 124 | # 125 | # foo = { 126 | # variable: 'Hello world, have a nice day!' 127 | # for variable in bar if variable != 42 128 | # } 129 | split_before_dict_set_generator=True 130 | 131 | # If an argument / parameter list is going to be split, then split before 132 | # the first argument. 133 | split_before_first_argument=True 134 | 135 | # Set to True to prefer splitting before 'and' or 'or' rather than 136 | # after. 137 | split_before_logical_operator=True 138 | 139 | # Split named assignments onto individual lines. 140 | split_before_named_assigns=True 141 | 142 | # The penalty for splitting right after the opening bracket. 143 | split_penalty_after_opening_bracket=30 144 | 145 | # The penalty for splitting the line after a unary operator. 146 | split_penalty_after_unary_operator=10000 147 | 148 | # The penalty for splitting right before an if expression. 149 | split_penalty_before_if_expr=0 150 | 151 | # The penalty of splitting the line around the '&', '|', and '^' 152 | # operators. 153 | split_penalty_bitwise_operator=300 154 | 155 | # The penalty for characters over the column limit. 156 | split_penalty_excess_character=4500 157 | 158 | # The penalty incurred by adding a line split to the unwrapped line. The 159 | # more line splits added the higher the penalty. 160 | split_penalty_for_added_line_split=30 161 | 162 | # The penalty of splitting a list of "import as" names. For example: 163 | # 164 | # from a_very_long_or_indented_module_name_yada_yad import (long_argument_1, 165 | # long_argument_2, 166 | # long_argument_3) 167 | # 168 | # would reformat to something like: 169 | # 170 | # from a_very_long_or_indented_module_name_yada_yad import ( 171 | # long_argument_1, long_argument_2, long_argument_3) 172 | split_penalty_import_names=0 173 | 174 | # The penalty of splitting the line around the 'and' and 'or' 175 | # operators. 176 | split_penalty_logical_operator=0 177 | 178 | # Use the Tab character for indentation. 179 | use_tabs=False 180 | 181 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - 3.5.0 4 | - 3.5.2 5 | - 3.5-dev 6 | - 3.6 7 | - 3.6-dev 8 | - 3.7-dev 9 | sudo: false 10 | dist: trusty 11 | 12 | matrix: 13 | include: 14 | - os: linux 15 | language: generic 16 | env: USE_PYPY_RELEASE_VERSION=5.9-beta 17 | # Uncomment if you want to test on pypy nightly 18 | # - os: linux 19 | # language: generic 20 | # env: USE_PYPY_NIGHTLY=1 21 | - os: osx 22 | language: generic 23 | env: MACPYTHON=3.5.4 24 | - os: osx 25 | language: generic 26 | env: MACPYTHON=3.6.3 27 | - os: linux 28 | language: python 29 | python: 3.6 30 | env: CHECK_DOCS=1 31 | - os: linux 32 | language: python 33 | python: 3.6 34 | env: CHECK_FORMATTING=1 35 | 36 | script: 37 | - ci/travis.sh 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This software is made available under the terms of *either* of the 2 | licenses found in LICENSE.APACHE2 or LICENSE.MIT. Contributions to are 3 | made under the terms of *both* these licenses. 4 | -------------------------------------------------------------------------------- /LICENSE.APACHE2: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /LICENSE.MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst CHEATSHEET.rst LICENSE* CODE_OF_CONDUCT* CONTRIBUTING* 2 | include .coveragerc .style.yapf 3 | include test-requirements.txt 4 | recursive-include docs * 5 | prune docs/build 6 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | 3 | PACKAGE = asyncari 4 | 5 | ifneq ($(wildcard /usr/share/sourcemgr/make/py),) 6 | include /usr/share/sourcemgr/make/py 7 | # availabe via http://github.com/smurfix/sourcemgr 8 | 9 | else 10 | %: 11 | @echo "Please use 'python setup.py'." 12 | @exit 1 13 | endif 14 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | asyncari 2 | ======== 3 | 4 | Welcome to `asyncari `__! 5 | 6 | AsyncARI is an AnyIO-ified adapter for the Asterisk ARI interface. 7 | 8 | License: Your choice of MIT or Apache License 2.0 9 | 10 | -------------------------------------------------------------------------------- /asyncari/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2018, Matthias Urlichs 3 | # 4 | """asyncari client 5 | """ 6 | 7 | import urllib.parse 8 | 9 | import anyio 10 | from contextlib import asynccontextmanager 11 | from asyncswagger11.http_client import AsynchronousHttpClient, ApiKeyAuthenticator 12 | 13 | from asyncari.client import Client 14 | 15 | 16 | @asynccontextmanager 17 | async def connect(base_url, apps, username, password): 18 | """Helper method for easily async connecting to ARI. 19 | 20 | :param base_url: Base URL for Asterisk HTTP server (http://localhost:8088/) 21 | :param apps: the Stasis app(s) to register for. 22 | :param username: ARI username 23 | :param password: ARI password 24 | 25 | Usage:: 26 | async with asyncari.connect(base_url, "hello", username, password) as ari: 27 | async for msg in ari: 28 | ari.taskgroup.start_soon(handle_msg, msg) 29 | 30 | """ 31 | host = urllib.parse.urlparse(base_url).netloc.split(':')[0] 32 | http_client = AsynchronousHttpClient(auth=ApiKeyAuthenticator(host, username + ':' + password)) 33 | try: 34 | async with anyio.create_task_group() as tg: 35 | client = Client(tg, base_url, apps, http_client) 36 | async with client: 37 | try: 38 | yield client 39 | finally: 40 | tg.cancel_scope.cancel() 41 | pass # end client 42 | pass # end taskgroup 43 | finally: 44 | await http_client.close() 45 | pass # end 46 | -------------------------------------------------------------------------------- /asyncari/client.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2018 Matthias Urlichs 3 | # 4 | """Asyncified ARI client library. 5 | """ 6 | 7 | import json 8 | import logging 9 | import os 10 | import re 11 | import sys 12 | import time 13 | import urllib 14 | from pprint import pformat 15 | 16 | import anyio 17 | from asyncswagger11.client import SwaggerClient 18 | from wsproto.events import CloseConnection, TextMessage 19 | 20 | from .model import CLASS_MAP 21 | from .model import Channel, Bridge, Playback, LiveRecording, StoredRecording, Endpoint, DeviceState, Sound 22 | from .model import Repository 23 | 24 | log = logging.getLogger(__name__) 25 | 26 | __all__ = ["Client"] 27 | 28 | 29 | class _EventHandler(object): 30 | """Class to allow events to be unsubscribed. 31 | """ 32 | 33 | def __init__(self, client, event_type, mangler=None, filter=None): 34 | self.send_stream, self.receive_stream = anyio.create_memory_object_stream() 35 | self.client = client 36 | self.event_type = event_type 37 | self.mangler = mangler 38 | if filter is None: 39 | 40 | def filter(evt): 41 | return True 42 | 43 | self.filter = filter 44 | 45 | async def __call__(self, msg): 46 | if not self.filter(msg): 47 | return 48 | await self.send_stream.send(msg) 49 | 50 | def open(self): 51 | log.debug("ADD %s", self.event_type) 52 | self.client.event_listeners.setdefault(self.event_type, list()).append(self) 53 | 54 | def close(self): 55 | log.debug("DEL %s", self.event_type) 56 | self.client.event_listeners[self.event_type].remove(self) 57 | 58 | async def __aenter__(self): 59 | self.open() 60 | return self 61 | 62 | async def __aexit__(self, *tb): 63 | self.close() 64 | 65 | def __aiter__(self): 66 | return self 67 | 68 | async def __anext__(self): 69 | while True: 70 | res = await self.receive_stream.receive() 71 | if self.mangler: 72 | res = self.mangler(res) 73 | if res is None: 74 | continue 75 | return res 76 | 77 | 78 | class Client: 79 | """Async ARI Client object. 80 | 81 | :param taskgroup: the AnyIO taskgroup to run our task(s) in. 82 | :param apps: the Stasis app(s) to register for. 83 | :param base_url: Base URL for accessing Asterisk. 84 | :param http_client: HTTP client interface. 85 | """ 86 | 87 | def __init__(self, taskgroup, base_url, apps, http_client): 88 | self.taskgroup = taskgroup 89 | url = urllib.parse.urljoin(base_url, "ari/api-docs/resources.json") 90 | self.swagger = SwaggerClient(http_client=http_client, url=url) 91 | self.class_map = CLASS_MAP.copy() 92 | tm = time.time() 93 | self._id_name = "ARI.%x.%x%03x" % (os.getpid(), int(tm), int(tm * 0x1000) & 0xFFF) 94 | self._id_seq = 0 95 | self._reader = None # Event reader 96 | 97 | if isinstance(apps, str): 98 | apps = apps.split(',') 99 | self._apps = apps 100 | 101 | def __repr__(self): 102 | return "<%s:%s>" % (self.__class__.__name__, self._id_name) 103 | 104 | def generate_id(self, typ=""): 105 | self._id_seq += 1 106 | return "%s.%s%d" % (self._id_name, typ, self._id_seq) 107 | 108 | def is_my_id(self, id): 109 | if id == self._id_name: 110 | return True 111 | return id.startswith(self._id_name + '.') 112 | 113 | async def __aenter__(self): 114 | await self._init() 115 | evt = anyio.Event() 116 | self.taskgroup.start_soon(self._run, evt) 117 | await evt.wait() 118 | return self 119 | 120 | async def __aexit__(self, *tb): 121 | with anyio.fail_after(1, shield=True) as scope: 122 | await self.close() 123 | 124 | async def new_channel(self, State, endpoint, **kw): 125 | """Create a new channel. Keywords 'timeout' 'variables' 126 | 'originator' 'formats' are as in ARI.channels.originateWithID(). 127 | 128 | :param State: The :class:`OutgoingState` factory to use. 129 | Called with the new channel. 130 | 131 | Returns: the state of the channel. Note that this state 132 | will have to wait for the initial ``StasisBegin`` event. 133 | """ 134 | id = self.client.generate_id() 135 | chan = Channel(self, id=id) 136 | ch = await self.channels.originateWithId(endpoint=endpoint, app=self._apps[0], **kw) 137 | return State(ch) 138 | 139 | def __enter__(self): 140 | raise RuntimeError("You need to call 'async with …'.") 141 | 142 | def __exit__(self, *tb): 143 | raise RuntimeError("You need to call 'async with …'.") 144 | 145 | def __iter__(self): 146 | raise RuntimeError("You need to call 'async for …'.") 147 | 148 | def __aiter__(self): 149 | if self._reader is None: 150 | self._reader = _EventHandler(self, '*') 151 | self._reader.open() 152 | return self._reader 153 | 154 | def on_start_of(self, endpoint): 155 | """ 156 | Iterator for StasisStart on a particular sub-endpoint. 157 | 158 | Returns an async iterator that yields (channel,start_event) tuples. 159 | """ 160 | return self.on_channel_event("StasisStart", filter=lambda evt: evt.args[0] == endpoint) 161 | 162 | @property 163 | def app(self): 164 | return self._apps[0] 165 | 166 | async def _run(self, evt: anyio.Event=None): 167 | """Connect to the WebSocket and begin processing messages. 168 | 169 | This method will block until all messages have been received from the 170 | WebSocket, or until this client has been closed. 171 | 172 | :param apps: Application (or list of applications) to connect for 173 | :type apps: str or list of str 174 | 175 | This is a coroutine. Don't call it directly, it's autostarted by 176 | the context manager. 177 | """ 178 | ws = None 179 | apps = ",".join(self._apps) 180 | 181 | try: 182 | ws = await self.swagger.events.eventWebsocket(app=apps) 183 | self.websockets.add(ws) 184 | 185 | if evt is not None: 186 | evt.set() 187 | 188 | await self.__run(ws) 189 | 190 | finally: 191 | if ws is not None: 192 | self.websockets.remove(ws) 193 | with anyio.CancelScope(shield=True): 194 | await ws.close() 195 | 196 | 197 | async def _check_runtime(self, recv): 198 | """This gets streamed a message when processing begins, and `None` 199 | when it ends. Repeat. 200 | """ 201 | while True: 202 | msg = await recv.receive() 203 | if msg is False: 204 | return 205 | assert msg is not None 206 | 207 | try: 208 | with anyio.fail_after(0.5): 209 | msg = await recv.receive() 210 | if msg is False: 211 | return 212 | assert msg is None 213 | except TimeoutError: 214 | log.error("Processing delayed: %s", msg) 215 | t = anyio.current_time() 216 | # don't hard-fail that fast when debugging 217 | with anyio.fail_after(1 if 'pdb' not in sys.modules else 99): 218 | msg = await recv.receive() 219 | if msg is False: 220 | return 221 | assert msg is None, msg 222 | pass # processing delayed, you have a problem 223 | log.error("Processing recovered after %.2f sec", (anyio.current_time()) - t) 224 | 225 | async def __run(self, ws): 226 | """Drains all messages from a WebSocket, sending them to the client's 227 | listeners. 228 | 229 | :param ws: WebSocket to drain. 230 | """ 231 | 232 | send_stream, receive_stream = anyio.create_memory_object_stream() 233 | 234 | self.taskgroup.start_soon(self._check_runtime, receive_stream) 235 | 236 | ws_ = ws.__aiter__() 237 | while True: 238 | try: 239 | msg = await ws_.__anext__() 240 | except (StopAsyncIteration,anyio.ClosedResourceError): 241 | break 242 | 243 | if isinstance(msg, CloseConnection): 244 | break 245 | elif not isinstance(msg, TextMessage): 246 | log.warning("Unknown JSON message type: %s", repr(msg)) 247 | continue # ignore 248 | msg_json = json.loads(msg.data) 249 | if not isinstance(msg_json, dict) or 'type' not in msg_json: 250 | log.error("Invalid event: %s", msg) 251 | continue 252 | try: 253 | await send_stream.send(msg_json) 254 | await self.process_ws(msg_json) 255 | finally: 256 | await send_stream.send(None) 257 | await send_stream.send(False) 258 | 259 | async def _init(self, RepositoryFactory=Repository): 260 | await self.swagger.init() 261 | # Extract models out of the events resource 262 | events = [ 263 | api['api_declaration'] for api in self.swagger.api_docs['apis'] 264 | if api['name'] == 'events' 265 | ] 266 | if events: 267 | self.event_models = events[0]['models'] 268 | else: 269 | self.event_models = {} 270 | 271 | self.repositories = { 272 | name: Repository(self, name, api) 273 | for (name, api) in self.swagger.resources.items() 274 | } 275 | self.websockets = set() 276 | self.event_listeners = {} 277 | 278 | def __getattr__(self, item): 279 | """Exposes repositories as fields of the client. 280 | 281 | :param item: Field name 282 | """ 283 | repo = self.get_repo(item) 284 | if not repo: 285 | raise AttributeError("'%r' object has no attribute '%s'" % (self, item)) 286 | return repo 287 | 288 | async def close(self): 289 | """Close this ARI client. 290 | 291 | This method will close any currently open WebSockets, and close the 292 | underlying Swaggerclient. 293 | """ 294 | for ws in list(self.websockets): # changes during processing 295 | await ws.close() 296 | await self.swagger.close() 297 | 298 | def get_repo(self, name): 299 | """Get a specific repo by name. 300 | 301 | :param name: Name of the repo to get 302 | :return: Repository, or None if not found. 303 | :rtype: asyncari.model.Repository 304 | """ 305 | return self.repositories.get(name) 306 | 307 | async def process_ws(self, msg): 308 | """Process one incoming websocket message. 309 | """ 310 | msg = EventMessage(self, msg) 311 | 312 | # First, call traditional listeners 313 | if msg['type'] not in {"ChannelDialplan","ChannelVarset"}: 314 | log.debug("DISP ***** Dispatch:%s\n%s", msg, pformat(vars(msg))) 315 | listeners = list(self.event_listeners.get(msg['type'], [])) \ 316 | + list(self.event_listeners.get('*', [])) 317 | for listener in listeners: 318 | cb = await listener(msg) 319 | 320 | # Next, dispatch the event to the objects in the message 321 | await msg._send_event() 322 | 323 | def on_event(self, event_type, mangler=None, filter=None): 324 | """Listener for events with given type. 325 | 326 | :param event_type: String name of the event to register for. 327 | 328 | Usage:: 329 | 330 | async with client.on_object_event("StasisStart") as listener: 331 | async for objs, event in listener: 332 | client.start_soon(handle_new_client, objs, event) 333 | """ 334 | return _EventHandler(self, event_type, mangler=mangler, filter=filter) 335 | 336 | def on_object_event(self, event_type, factory_fn, model_id, filter=None): 337 | """Listener for events with the given type. Event fields of 338 | the given model_id type are converted to objects. 339 | 340 | If multiple fields of the event have the type ``model_id``, a dict is 341 | passed mapping the field name to the model object. 342 | 343 | :param event_type: String name of the event to register for. 344 | :param event_cb: Callback function 345 | :type event_cb: (Obj, dict) -> None or (dict[str, Obj], dict) -> 346 | :param factory_fn: Function for creating Obj from JSON 347 | :param model_id: String id for Obj from Swagger models. 348 | 349 | Usage:: 350 | 351 | async with client.on_object_event("StasisStart", Channel,"Channel") as listener: 352 | async for objs, event in listener: 353 | client.start_soon(handle_new_client, objs, event) 354 | """ 355 | # Find the associated model from the Swagger declaration 356 | event_model = self.event_models.get(event_type) 357 | if not event_model: 358 | raise ValueError("Cannot find event model '%s'" % event_type) 359 | 360 | # Extract the fields that are of the expected type 361 | obj_fields = [k for (k, v) in event_model['properties'].items() if v['type'] == model_id] 362 | if not obj_fields: 363 | raise ValueError("Event model '%s' has no fields of type %s" % (event_type, model_id)) 364 | 365 | def extract_objects(event): 366 | """Extract objects of a given type from an event. 367 | 368 | :param event: Event 369 | :param args: Arguments to pass to the event callback 370 | :param kwargs: Keyword arguments to pass to the event 371 | callback 372 | """ 373 | # Extract the fields which are of the expected type 374 | obj = { 375 | obj_field: factory_fn(self, json=event[obj_field]) 376 | for obj_field in obj_fields if event._get(obj_field) 377 | } 378 | # If there's only one field in the schema, just pass that along 379 | if len(obj_fields) == 1: 380 | if obj: 381 | vals = list(obj.values()) 382 | obj = vals[0] 383 | else: 384 | obj = None 385 | return (obj, event) 386 | 387 | return self.on_event(event_type, mangler=extract_objects, filter=filter) 388 | 389 | def on_channel_event(self, event_type, filter=None): 390 | """Listener for Channel related events 391 | 392 | :param event_type: Name of the event to register for. 393 | 394 | Usage:: 395 | 396 | async with client.on_channel_event("StasisStart") as listener: 397 | async for objs, event in listener: 398 | client.start_soon(handle_new_client, objs, event) 399 | """ 400 | return self.on_object_event(event_type, Channel, 'Channel', filter=filter) 401 | 402 | def on_bridge_event(self, event_type, filter=None): 403 | """Listener for Bridge related events 404 | 405 | :param event_type: Name of the event to register for. 406 | """ 407 | return self.on_object_event(event_type, Bridge, 'Bridge', filter=filter) 408 | 409 | def on_playback_event(self, event_type, filter=None): 410 | """Listener for Playback related events 411 | 412 | :param event_type: Name of the event to register for. 413 | """ 414 | return self.on_object_event(event_type, Playback, 'Playback', filter=filter) 415 | 416 | def on_live_recording_event(self, event_type, filter=None): 417 | """Listener for LiveRecording related events 418 | 419 | :param event_type: Name of the event to register for. 420 | """ 421 | return self.on_object_event(event_type, LiveRecording, 'LiveRecording', filter=filter) 422 | 423 | def on_stored_recording_event(self, event_type, filter=None): 424 | """Listener for StoredRecording related events 425 | 426 | :param event_type: Name of the event to register for. 427 | """ 428 | return self.on_object_event(event_type, StoredRecording, 'StoredRecording', filter=filter) 429 | 430 | def on_endpoint_event(self, event_type, filter=None): 431 | """Listener for Endpoint related events 432 | 433 | :param event_type: Name of the event to register for. 434 | """ 435 | return self.on_object_event(event_type, Endpoint, 'Endpoint', filter=filter) 436 | 437 | def on_device_state_event(self, event_type, filter=None): 438 | """Listener for DeviceState related events 439 | 440 | :param event_type: Name of the event to register for. 441 | """ 442 | return self.on_object_event(event_type, DeviceState, 'DeviceState', filter=filter) 443 | 444 | def on_sound_event(self, event_type, filter=None): 445 | """Listener for Sound related events 446 | 447 | :param event_type: Name of the event to register for. 448 | """ 449 | return self.on_object_event(event_type, Sound, 'Sound', filter=filter) 450 | 451 | 452 | class EventMessage: 453 | """This class encapsulates an event. 454 | All elements with known types are converted to objects, 455 | if a class for them is registered. 456 | 457 | Note:: 458 | The "Dial" event is converted to "DialStart", "DialState" or 459 | "DialResult" depending on whether ``dialstatus`` is empty or not. 460 | """ 461 | 462 | def __init__(self, client, msg): 463 | self._client = client 464 | self._orig_msg = msg 465 | 466 | event_type = msg['type'] 467 | event_model = client.event_models.get(event_type) 468 | if not event_model: 469 | log.warn("Cannot find event model '%s'" % event_type) 470 | return 471 | event_model = event_model.get('properties', {}) 472 | 473 | for k, v in msg.items(): 474 | setattr(self, k, v) 475 | 476 | m = event_model.get(k) 477 | if m is None: 478 | continue 479 | t = m['type'] 480 | is_list = False 481 | m = re.match(r'''List\[(.*)\]''', t) 482 | if m: 483 | t = m.group(1) 484 | is_list = True 485 | factory = client.class_map.get(t) 486 | if factory is None: 487 | continue 488 | if is_list: 489 | v = [factory(client, json=obj) for obj in v] 490 | else: 491 | v = factory(client, json=v) 492 | 493 | setattr(self, k, v) 494 | 495 | if self.type == "Dial": 496 | if self.dialstatus == "": 497 | self.type = "DialStart" 498 | elif self.dialstatus in {"PROGRESS", "RINGING"}: 499 | self.type = "DialState" 500 | else: 501 | self.type = "DialResult" 502 | 503 | def __repr__(self): 504 | return "<%s %s>" % (self.__class__.__name__, self.type) 505 | 506 | async def _send_event(self): 507 | for k in self._orig_msg.keys(): 508 | v = getattr(self, k) 509 | do_ev = getattr(v, 'do_event', None) 510 | if do_ev is not None: 511 | await do_ev(self) 512 | 513 | def __getitem__(self, k): 514 | return self._orig_msg.__getitem__(k) 515 | 516 | def _get(self, k, v=None): 517 | return self._orig_msg.get(k, v) 518 | -------------------------------------------------------------------------------- /asyncari/model.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Model for mapping ARI Swagger resources and operations into objects. 3 | 4 | The API is modeled into the Repository pattern, as you would find in Domain 5 | Driven Design. 6 | 7 | Each Swagger Resource (a.k.a. API declaration) is mapped into a Repository 8 | object, which has the non-instance specific operations (just like what you 9 | would find in a repository object). 10 | 11 | Responses from operations are mapped into first-class objects, which themselves 12 | have methods which map to instance specific operations (just like what you 13 | would find in a domain object). 14 | 15 | The first-class objects also have 'on_event' methods, which can subscribe to 16 | Stasis events relating to that object. 17 | """ 18 | 19 | import inspect 20 | import json 21 | import logging 22 | import re 23 | from weakref import WeakValueDictionary 24 | 25 | import anyio 26 | from httpx import HTTPStatusError 27 | 28 | from .util import mayNotExist 29 | 30 | log = logging.getLogger(__name__) 31 | 32 | NO_CONTENT = 204 33 | 34 | 35 | class StateError(RuntimeError): 36 | """The expected or waited-for state didn't occur""" 37 | 38 | 39 | class EventTimeout(Exception): 40 | """There were no events past the timeout""" 41 | 42 | 43 | class ResourceExit(Exception): 44 | """The resource does no longer exist.""" 45 | 46 | 47 | class ChannelExit(ResourceExit): 48 | """The channel has hung up.""" 49 | 50 | 51 | class BridgeExit(ResourceExit): 52 | """The bridge has terminated.""" 53 | 54 | 55 | class OperationError(RuntimeError): 56 | """Some Swagger operation failed""" 57 | 58 | 59 | class Repository(object): 60 | """ARI repository. 61 | 62 | This repository maps to an ARI Swagger resource. The operations on the 63 | Swagger resource are mapped to methods on this object, using the 64 | operation's nickname. 65 | 66 | :param client: ARI client. 67 | :type client: client.Client 68 | :param name: Repository name. Maps to the basename of the resource's 69 | .json file 70 | :param resource: Associated Swagger resource. 71 | :type resource: swaggerpy.client.Resource 72 | """ 73 | 74 | def __init__(self, client, name, resource): 75 | self.client = client 76 | self.name = name 77 | self.api = resource 78 | 79 | def __repr__(self): 80 | return "Repository(%s)" % self.name 81 | 82 | def __getattr__(self, item): 83 | """Maps resource operations to methods on this object. 84 | 85 | :param item: Item name. 86 | 87 | This method needs to return a callable that returns a coroutine, 88 | which allows us to defer the attribute lookup. 89 | """ 90 | 91 | class AttrOp: 92 | def __init__(self, p, item): 93 | self.p = p 94 | self.item = item 95 | 96 | def __repr__(self): 97 | return ".%s>" % (self.p, self.item) 98 | 99 | async def __call__(self, **kwargs): 100 | oper = getattr(self.p.api, self.item, None) 101 | if not (hasattr(oper, '__call__') and hasattr(oper, 'json')): 102 | raise AttributeError("'%r' object has no attribute '%s'" % (self.p, self.item)) 103 | jsc = oper.json 104 | try: 105 | res = await oper(**kwargs) 106 | except HTTPStatusError as exc: 107 | d = getattr(exc, 'data', None) 108 | if d is not None: 109 | d = d.get('message', None) 110 | raise OperationError(d) from exc 111 | 112 | res = await promote(self.p.client, res, jsc) 113 | return res 114 | 115 | return AttrOp(self, item) 116 | #return lambda **kwargs: promote(self.client, oper(**kwargs), oper.json) 117 | 118 | 119 | class ObjectIdGenerator(object): 120 | """Interface for extracting identifying information from an object's JSON 121 | representation. 122 | """ 123 | 124 | def get_params(self, obj_json): 125 | """Gets the paramater values for specifying this object in a query. 126 | 127 | :param obj_json: Instance data. 128 | :type obj_json: dict 129 | :return: Dictionary with paramater names and values 130 | :rtype: dict of str, str 131 | """ 132 | raise NotImplementedError("Not implemented") 133 | 134 | def id_as_str(self, obj_json): 135 | """Gets a single string identifying an object. 136 | 137 | :param obj_json: Instance data. 138 | :type obj_json: dict 139 | :return: Id string. 140 | :rtype: str 141 | """ 142 | raise NotImplementedError("Not implemented") 143 | 144 | 145 | # noinspection PyDocstring 146 | class DefaultObjectIdGenerator(ObjectIdGenerator): 147 | """Id generator that works for most of our objects. 148 | 149 | :param param_name: Name of the parameter to specify in queries. 150 | :param id_field: Name of the field to specify in JSON. 151 | """ 152 | 153 | def __init__(self, param_name, id_field='id'): 154 | self.param_name = param_name 155 | self.id_field = id_field 156 | 157 | def get_params(self, obj_json): 158 | return {self.param_name: obj_json[self.id_field]} 159 | 160 | def id_as_str(self, obj_json): 161 | return obj_json[self.id_field] 162 | 163 | 164 | QLEN = 99 165 | 166 | 167 | class BaseObject(object): 168 | """Base class for ARI domain objects. 169 | 170 | :param client: ARI client. 171 | :type client: client.Client 172 | :param resource: Associated Swagger resource. 173 | :type resource: swaggerpy.client.Resource 174 | :param json: JSON representation of this object instance. 175 | :type json: dict 176 | """ 177 | 178 | id_generator = ObjectIdGenerator() 179 | cache = None 180 | active = None 181 | id = None 182 | _qr = None 183 | _qw = None 184 | _qlen = 0 185 | json = None 186 | api = None 187 | _waiting = False # protect against re-entering the event iterator 188 | 189 | def __new__(cls, client, id=None, json=None): 190 | if cls.cache is None: 191 | cls.cache = WeakValueDictionary() 192 | if cls.active is None: 193 | cls.active = set() 194 | if id is None: 195 | id = cls.id_generator.id_as_str(json) 196 | self = cls.cache.get(id) 197 | if self is not None: 198 | return self 199 | self = object.__new__(cls) 200 | cls.cache[id] = self 201 | self.json = {} 202 | return self 203 | 204 | def __init__(self, client, id=None, json=None): 205 | if self.api is None: 206 | raise RuntimeError("You need to override .api") 207 | if json is not None: 208 | # assume that the most-current event has the most-current JSON 209 | self.json.update(json) 210 | if self.id is not None: 211 | assert client == self.client 212 | return 213 | if id is None: 214 | id = self.id_generator.id_as_str(json) 215 | self.client = client 216 | self.api = getattr(self.client.swagger, self.api) 217 | self.id = id 218 | self.event_listeners = {} 219 | self._changed = anyio.Event() 220 | self._init() 221 | 222 | def _init(self): 223 | pass 224 | 225 | async def wait_for(self, check): 226 | while True: 227 | r = check() 228 | if r: 229 | return r 230 | await self._changed.wait() 231 | 232 | async def _has_changed(self): 233 | c, self._changed = self._changed, anyio.Event() 234 | c.set() 235 | 236 | def remember(self): 237 | """ 238 | Call this method after you created a persistent object. 239 | 240 | This will ensure that Python won't forget about it even if you 241 | don't keep a reference to it yourself. 242 | """ 243 | type(self).active.add(self) 244 | 245 | def __repr__(self): 246 | return "%s(%s)" % (self.__class__.__name__, self.id) 247 | 248 | def __getattr__(self, item): 249 | """Promote resource operations related to a single resource to methods 250 | on this class. 251 | 252 | :param item: 253 | """ 254 | try: 255 | return self.json[item] 256 | except KeyError: 257 | return self._get_enriched(item) 258 | 259 | def _get_enriched(self, item): 260 | oper = getattr(self.api, item, None) 261 | if not (hasattr(oper, '__call__') and hasattr(oper, 'json')): 262 | raise AttributeError("'%r' object has no attribute '%r'" % (self, item)) 263 | jsc = oper.json 264 | 265 | async def enrich_operation(**kwargs): 266 | """Enriches an operation by specifying parameters specifying this 267 | object's id (i.e., channelId=self.id), and promotes HTTP response 268 | to a first-class object. 269 | 270 | :param kwargs: Operation parameters 271 | :return: First class object mapped from HTTP response. 272 | """ 273 | # Add id to param list 274 | kwargs.update(self.id_generator.get_params(self.json)) 275 | log.debug("Issuing command %s %s", item, kwargs) 276 | oper_ = oper 277 | resp = await oper_(**kwargs) 278 | enriched = await promote(self.client, resp, jsc) 279 | return enriched 280 | 281 | return enrich_operation 282 | 283 | async def create(self, **k): 284 | res = await self._get_enriched('create')(**k) 285 | type(res).active.add(res) 286 | return res 287 | 288 | def on_event(self, event_type, fn, *args, **kwargs): 289 | """Register event callbacks for this specific domain object. 290 | 291 | :param event_type: Type of event to register for, or '*' 292 | :type event_type: str 293 | :param fn: Callback function for events: fn(evt, *args, **kwargs) 294 | :type fn: (object, dict) -> None 295 | 296 | All additional arguments or keywords are passed to `fn`. 297 | 298 | The return value is an object with a `close` method; call it to 299 | deregister the event handler. 300 | """ 301 | client = self.client 302 | callback_obj = (fn, args, kwargs) 303 | self.event_listeners.setdefault(event_type, list()).append(callback_obj) 304 | 305 | class EventUnsubscriber(object): 306 | """Class to allow events to be unsubscribed. 307 | """ 308 | 309 | def close(self_): 310 | """Unsubscribe the associated event callback. 311 | """ 312 | try: 313 | self.event_listeners[event_type].remove(callback_obj) 314 | except ValueError: 315 | pass 316 | 317 | return EventUnsubscriber() 318 | 319 | async def do_event(self, msg): 320 | """Run a message through this object's event queue/list""" 321 | callbacks = self.event_listeners.get(msg.type, []) + self.event_listeners.get("*", []) 322 | for p, a, k in callbacks: 323 | log.debug("RunCb:%s %s %s %s", self, p, a, k) 324 | r = p(msg, *a, **k) 325 | if inspect.iscoroutine(r): 326 | await r 327 | 328 | if self._qw is not None: 329 | self._qw.send_nowait(msg) 330 | 331 | # Finally trigger waiting checks 332 | await self._has_changed() 333 | 334 | def __aiter__(self): 335 | if self._qr is None: 336 | self._qw,self._qr = anyio.create_memory_object_stream(QLEN) 337 | return self 338 | 339 | async def __anext__(self): 340 | if self._qr is None: 341 | raise StopAsyncIteration 342 | if self._waiting: 343 | self._waiting = False 344 | raise RuntimeError("Another task is waiting") 345 | try: 346 | self._waiting = True 347 | res = await self._qr.receive() 348 | finally: 349 | if not self._waiting: 350 | raise RuntimeError("Another task has waited") 351 | self._waiting = False 352 | return res 353 | 354 | async def aclose(self): 355 | """No longer queue events""" 356 | if self._qw is not None: 357 | await self._qw.aclose() 358 | self._qw = None 359 | self._qr = None 360 | 361 | 362 | class Channel(BaseObject): 363 | """First class object API. 364 | 365 | :param client: ARI client. 366 | :type client: client.Client 367 | ;param id: Instance ID, if JSON is not yet known 368 | :param json: Instance data 369 | """ 370 | 371 | id_generator = DefaultObjectIdGenerator('channelId') 372 | api = "channels" 373 | bridge = None 374 | _do_hangup = None 375 | hangup_delay = 0.3 376 | _reason = None 377 | _reason_seen = None 378 | prev_state = None 379 | _state = None 380 | 381 | # last is better 382 | REASONS = ("congestion", "no_answer", "busy", "normal") 383 | 384 | def _init(self): 385 | super()._init() 386 | self.playbacks = set() 387 | self.recordings = set() 388 | self.vars = {} 389 | 390 | def set_reason(self, reason): 391 | """Set the reason for hanging up.""" 392 | 393 | if reason not in self.REASONS: 394 | raise RuntimeError("Reason '%s' unknown" % (reason,)) 395 | if self._reason is None: 396 | self._reason = reason 397 | elif self.REASONS.index(reason) > self.REASONS.index(self._reason): 398 | self._reason = reason 399 | 400 | self._reason = reason 401 | if self._reason_seen is not None: 402 | self._reason_seen.set() 403 | 404 | async def hang_up(self, reason=None): 405 | """Call this (and only this) to hang up a channel. 406 | 407 | The actual hangup happens asynchronously. 408 | """ 409 | if self._do_hangup is not None: 410 | return 411 | self._do_hangup = True 412 | if reason is not None: 413 | self.set_reason(reason) 414 | if self._reason is None: 415 | self._reason_seen = anyio.Event() 416 | self.client.taskgroup.start_soon(self._hangup_task) 417 | 418 | async def handle_exit(self, reason="normal"): 419 | """Hang up on exit. 420 | 421 | Override this to be a no-op if you want to redirect the 422 | channel to a non-Stasis dialplan entry instead. 423 | """ 424 | try: 425 | if self._do_hangup is not False: 426 | with mayNotExist: 427 | await self.hangup(reason=reason) 428 | finally: 429 | self.state = "Gone" 430 | self._changed.set() 431 | 432 | async def _hangup_task(self): 433 | if self._reason is None: 434 | with anyio.move_on_after(self.hangup_delay): 435 | await self._reason_seen.wait() 436 | 437 | try: 438 | await self.handle_exit(reason=(self._reason or "normal")) 439 | except Exception as exc: 440 | log.warning("Hangup %s: %s", self, exc) 441 | 442 | self._do_hangup = False 443 | 444 | async def safe_hangup(self): 445 | """Hangup a channel, ignoring 404 errors. 446 | 447 | :param channel: Channel to hangup. 448 | """ 449 | if not self.json: 450 | self.json = {"id": self.id} 451 | with mayNotExist: 452 | await self.hangup() 453 | 454 | async def do_event(self, msg): 455 | if msg.type == "StasisStart": 456 | type(self).active.add(self) 457 | elif msg.type in {"StasisEnd", "ChannelDestroyed"}: 458 | try: 459 | type(self).active.remove(self) 460 | except KeyError: 461 | pass 462 | self._do_hangup = False 463 | elif msg.type == "ChannelVarset": 464 | self.vars[msg.variable] = msg.value 465 | elif msg.type == "ChannelEnteredBridge": 466 | self.bridge = msg.bridge 467 | elif msg.type == "ChannelLeftBridge": 468 | if self.bridge is msg.bridge: 469 | self.bridge = None 470 | 471 | elif msg.type == "PlaybackStarted": 472 | # does happen when resuming playback 473 | # assert msg.playback not in self.playbacks 474 | self.playbacks.add(msg.playback) 475 | elif msg.type == "PlaybackFinished": 476 | try: 477 | self.playbacks.remove(msg.playback) 478 | except KeyError: 479 | log.warning("%s not in %s", msg.playback, self) 480 | 481 | elif msg.type == "RecordingStarted": 482 | assert msg.recording not in self.recordings 483 | self.recordings.add(msg.recording) 484 | elif msg.type == "RecordingFinished": 485 | try: 486 | self.recordings.remove(msg.recording) 487 | except KeyError: 488 | log.warning("%s not in %s", msg.recording, self) 489 | 490 | elif msg.type == "ChannelHangupRequest": 491 | pass 492 | elif msg.type == "ChannelConnectedLine": 493 | pass 494 | elif msg.type == "ChannelStateChange": 495 | log.debug("State:%s %s", self.state, self) 496 | pass 497 | elif msg.type == "ChannelDtmfReceived": 498 | pass 499 | elif msg.type in {"ChannelDialplan","ChannelVarset"}: 500 | pass 501 | else: 502 | log.warn("Event not recognized: %s for %s", msg, self) 503 | await super().do_event(msg) 504 | 505 | async def wait_up(self): 506 | prev = None 507 | def chk(): 508 | nonlocal prev 509 | if prev is None: 510 | prev = self.state 511 | return False 512 | if prev == self.state: 513 | return 514 | if self.state == "Down": 515 | raise ChannelExit(self.state) 516 | return self.state == "Up" 517 | 518 | await self.wait_for(chk) 519 | 520 | async def wait_bridged(self, bridge=None): 521 | """\ 522 | Wait for the channel to be bridged to @bridge. 523 | 524 | if None, wait for the channel to be connected to any bridge. 525 | """ 526 | 527 | def chk(): 528 | if self._do_hangup is not None: 529 | raise StateError(self) 530 | if bridge is None: 531 | return self.bridge is not None 532 | else: 533 | return self.bridge is bridge 534 | 535 | await self.wait_for(chk) 536 | 537 | async def wait_not_bridged(self, bridge=None): 538 | """\ 539 | Wait for the channel to no longer be bridged to @bridge. 540 | 541 | if None, wait for the channel to be not connected to any bridge. 542 | """ 543 | 544 | def chk(): 545 | if bridge is None: 546 | return self.bridge is None 547 | else: 548 | return self.bridge is not bridge 549 | 550 | await self.wait_for(chk) 551 | 552 | async def wait_not_playing(self): 553 | """\ 554 | Wait until all sound playbacks are finished. 555 | """ 556 | await self.wait_for(lambda: not self.playbacks) 557 | 558 | async def wait_down(self): 559 | await self.wait_for(lambda: self._do_hangup is False) 560 | 561 | async def __anext__(self): 562 | evt = await super().__anext__() 563 | if evt.type in {"StasisEnd", "ChannelDestroyed"}: 564 | raise StopAsyncIteration 565 | return evt 566 | 567 | 568 | class Bridge(BaseObject): 569 | """First class object API. 570 | 571 | :param client: ARI client. 572 | :type client: client.Client 573 | ;param id: Instance ID, if JSON is not yet known 574 | :param json: Instance data 575 | 576 | Warning: a bridge is not auto-deleted when the last channel leaves 577 | or when your program ends! Your code needs to do that on its own. 578 | 579 | Unique bridges should have well-known IDs so that they can be 580 | reconnected to if your program is restarted. 581 | """ 582 | 583 | id_generator = DefaultObjectIdGenerator('bridgeId') 584 | api = "bridges" 585 | 586 | def _init(self): 587 | super()._init() 588 | self.playbacks = set() 589 | self.channels = set() 590 | 591 | async def do_event(self, msg): 592 | if msg.type == "BridgeDestroyed": 593 | try: 594 | type(self).active.remove(self) 595 | except KeyError: # may or may not be ours 596 | pass 597 | elif msg.type == "BridgeMerged" and msg.bridge is not self: 598 | type(self).active.remove(self) 599 | type(self).cache[self.id] = msg.bridge 600 | msg.bridge.channels |= self.channels 601 | msg.bridge.playbacks |= self.playbacks 602 | for ch in self.channels: 603 | ch.bridge = msg.bridge 604 | for pb in self.playbacks: 605 | pb.bridge = msg.bridge 606 | elif msg.type == "ChannelEnteredBridge": 607 | assert msg.channel not in self.channels 608 | self.channels.add(msg.channel) 609 | elif msg.type == "ChannelLeftBridge": 610 | try: 611 | self.channels.remove(msg.channel) 612 | except KeyError: 613 | log.warning("%s not in %s", msg.channel, self) 614 | elif msg.type == "PlaybackStarted": 615 | assert msg.playback not in self.playbacks 616 | self.playbacks.add(msg.playback) 617 | elif msg.type == "PlaybackFinished": 618 | try: 619 | self.playbacks.remove(msg.playback) 620 | except KeyError: 621 | log.warning("%s not in %s", msg.playback, self) 622 | else: 623 | log.warn("Event not recognized: %s for %s", msg, self) 624 | await super().do_event(msg) 625 | 626 | if hasattr(msg, 'bridge'): 627 | for ch in self.channels - msg.bridge.channels: 628 | log.warn("%s: %s not listed", self, ch) 629 | for ch in msg.bridge.channels - self.channels: 630 | log.warn("%s: %s not known", self, ch) 631 | 632 | async def __anext__(self): 633 | evt = await super().__anext__() 634 | if evt.type == "BridgeDestroyed": 635 | raise BridgeExit(evt) 636 | elif evt.type == "BridgeMerged" and msg.bridge is not self: 637 | self._queue = None 638 | raise StopAsyncIteration 639 | return evt 640 | 641 | 642 | class Playback(BaseObject): 643 | """First class object API. 644 | 645 | :param client: ARI client. 646 | :type client: client.Client 647 | ;param id: Instance ID, if JSON is not yet known 648 | :param json: Instance data 649 | """ 650 | id_generator = DefaultObjectIdGenerator('playbackId') 651 | api = "playbacks" 652 | ref = None 653 | 654 | def _init(self): 655 | self._is_playing = anyio.Event() 656 | self._is_done = anyio.Event() 657 | target = self.json.get('target_uri', '') 658 | if target.startswith('channel:'): 659 | self.ref = Channel(self.client, id=target[8:]) 660 | elif target.startswith('bridge:'): 661 | self.ref = Bridge(self.client, id=target[7:]) 662 | 663 | async def do_event(self, msg): 664 | if self.ref is not None: 665 | await self.ref.do_event(msg) 666 | if msg.type == "PlaybackStarted": 667 | self._is_playing.set() 668 | elif msg.type == "PlaybackFinished": 669 | self._is_playing.set() 670 | self._is_done.set() 671 | else: 672 | log.warn("Event not recognized: %s for %s", msg, self) 673 | await super().do_event(msg) 674 | 675 | async def wait_playing(self): 676 | """Wait until the sound has started playing""" 677 | await self._is_playing.wait() 678 | 679 | async def wait_done(self): 680 | """Wait until the sound has stopped playing""" 681 | await self._is_done.wait() 682 | 683 | 684 | class LiveRecording(BaseObject): 685 | """First class object API. 686 | 687 | :param client: ARI client 688 | :type client: client.Client 689 | ;param id: Instance ID, if JSON is not yet known 690 | :param json: Instance data 691 | """ 692 | id_generator = DefaultObjectIdGenerator('recordingName', id_field='name') 693 | api = "recordings" 694 | ref = None 695 | 696 | def _init(self): 697 | self._is_recording = anyio.Event() 698 | self._is_done = anyio.Event() 699 | target = self.json.get('target_uri', '') 700 | if target.startswith('channel:'): 701 | self.ref = Channel(self.client, id=target[8:]) 702 | elif target.startswith('bridge:'): 703 | self.ref = Bridge(self.client, id=target[7:]) 704 | 705 | async def do_event(self, msg): 706 | if self.ref is not None: 707 | await self.ref.do_event(msg) 708 | if msg.type == "RecordingStarted": 709 | self._is_recording.set() 710 | elif msg.type == "RecordingFinished": 711 | self._is_recording.set() 712 | self._is_done.set() 713 | else: 714 | log.warn("Event not recognized: %s for %s", msg, self) 715 | await super().do_event(msg) 716 | 717 | async def wait_recording(self): 718 | """Wait until the sound has started recording""" 719 | await self._is_recording.wait() 720 | 721 | async def wait_done(self): 722 | """Wait until the sound has stopped recording""" 723 | await self._is_done.wait() 724 | 725 | 726 | class StoredRecording(BaseObject): 727 | """First class object API. 728 | 729 | :param client: ARI client 730 | :type client: client.Client 731 | ;param id: Instance ID, if JSON is not yet known 732 | :param json: Instance data 733 | """ 734 | id_generator = DefaultObjectIdGenerator('recordingName', id_field='name') 735 | api = "recordings" 736 | 737 | async def do_event(self, msg): 738 | log.warn("Event not recognized: %s for %s", msg, self) 739 | await super().do_event(msg) 740 | 741 | 742 | # noinspection PyDocstring 743 | class EndpointIdGenerator(ObjectIdGenerator): 744 | """Id generator for endpoints, because they are weird. 745 | """ 746 | 747 | def get_params(self, obj_json): 748 | return {'tech': obj_json['technology'], 'resource': obj_json['resource']} 749 | 750 | def id_as_str(self, obj_json): 751 | return "%(tech)s/%(resource)s" % self.get_params(obj_json) 752 | 753 | 754 | class Endpoint(BaseObject): 755 | """First class object API. 756 | 757 | :param client: ARI client. 758 | :type client: client.Client 759 | ;param id: Instance ID, if JSON is not yet known 760 | :param json: Instance data 761 | """ 762 | id_generator = EndpointIdGenerator() 763 | api = "endpoints" 764 | 765 | async def do_event(self, msg): 766 | log.warn("Event not recognized: %s for %s", msg, self) 767 | await super().do_event(msg) 768 | 769 | 770 | class DeviceState(BaseObject): 771 | """First class object API. 772 | 773 | :param client: ARI client. 774 | :type client: client.Client 775 | ;param id: Instance ID, if JSON is not yet known 776 | :param json: Instance data 777 | """ 778 | id_generator = DefaultObjectIdGenerator('deviceName', id_field='name') 779 | endpoint = "deviceStates" 780 | 781 | async def do_event(self, msg): 782 | log.warn("Event not recognized: %s for %s", msg, self) 783 | await super().do_event(msg) 784 | 785 | 786 | class Sound(BaseObject): 787 | """First class object API. 788 | 789 | :param client: ARI client. 790 | :type client: client.Client 791 | ;param id: Instance ID, if JSON is not yet known 792 | :param json: Instance data 793 | """ 794 | 795 | id_generator = DefaultObjectIdGenerator('soundId') 796 | endpoint = "sounds" 797 | 798 | async def do_event(self, msg): 799 | log.warn("Event not recognized: %s for %s", msg, self) 800 | await super().do_event(msg) 801 | 802 | 803 | class Mailbox(BaseObject): 804 | """First class object API. 805 | 806 | :param client: ARI client. 807 | :type client: client.Client 808 | ;param id: Instance ID, if JSON is not yet known 809 | :param json: Instance data 810 | """ 811 | 812 | id_generator = DefaultObjectIdGenerator('mailboxName', id_field='name') 813 | endpoint = "mailboxes" 814 | 815 | async def do_event(self, msg): 816 | log.warn("Event not recognized: %s for %s", msg, self) 817 | await super().do_event(msg) 818 | 819 | 820 | async def promote(client, resp, operation_json): 821 | """Promote a response from the request's HTTP response to a first class 822 | object. 823 | 824 | :param client: ARI client. 825 | :type client: client.Client 826 | :param resp: asks response. 827 | :type resp: asks.Response 828 | :param operation_json: JSON model from Swagger API. 829 | :type operation_json: dict 830 | :return: 831 | """ 832 | if resp.status_code == NO_CONTENT: 833 | log.debug("resp=%s", resp) 834 | return None 835 | res = resp.text 836 | if res == "": 837 | log.debug("resp=%s (empty)", resp) 838 | return None 839 | resp_json = json.loads(res) 840 | log.debug("resp=%s", resp_json) 841 | 842 | response_class = operation_json['responseClass'] 843 | is_list = False 844 | m = re.match('''List\[(.*)\]''', response_class) 845 | if m: 846 | response_class = m.group(1) 847 | is_list = True 848 | factory = CLASS_MAP.get(response_class) 849 | if factory: 850 | if is_list: 851 | return [factory(client, json=obj) for obj in resp_json] 852 | return factory(client, json=resp_json) 853 | log.info("No mapping for %s" % response_class) 854 | return resp_json 855 | 856 | 857 | CLASS_MAP = { 858 | 'Bridge': Bridge, 859 | 'Channel': Channel, 860 | 'Endpoint': Endpoint, 861 | 'Playback': Playback, 862 | 'LiveRecording': LiveRecording, 863 | 'StoredRecording': StoredRecording, 864 | 'Mailbox': Mailbox, 865 | 'DeviceState': DeviceState, 866 | } 867 | -------------------------------------------------------------------------------- /asyncari/state.py: -------------------------------------------------------------------------------- 1 | """ 2 | Basic state machine for ARI channels. 3 | 4 | The principle is very simple: On entering a state, :meth:`State.run` is 5 | called. Exiting the state passes control back to the caller. If the channel 6 | hangs up, a :class:`ChannelExit` exception is raised. 7 | """ 8 | 9 | import functools 10 | import inspect 11 | import logging 12 | import math 13 | from concurrent.futures import CancelledError 14 | 15 | import anyio 16 | from httpx import HTTPStatusError 17 | from contextlib import asynccontextmanager 18 | 19 | from .model import ChannelExit, StateError 20 | from .util import NumberTooShortError, NumberTooLongError, NumberTimeoutError, DigitTimeoutError 21 | 22 | log = logging.getLogger(__name__) 23 | 24 | __all__ = [ 25 | "ToplevelChannelState", 26 | "ChannelState", 27 | "BridgeState", 28 | "HangupBridgeState", 29 | "OutgoingChannelState", 30 | "DTMFHandler", 31 | "EvtHandler", 32 | "as_task", 33 | "as_handler_task", 34 | "SyncReadNumber", 35 | "AsyncReadNumber", 36 | "SyncPlay", 37 | ] 38 | 39 | _StartEvt = "_StartEvent" 40 | 41 | CAUSE_MAP = { 42 | 1: "congestion", 43 | 2: "congestion", 44 | 3: "congestion", 45 | 16: "normal", 46 | 17: "busy", 47 | 18: "no_answer", 48 | 19: "no_answer", # but ringing 49 | 21: "busy", # rejected 50 | 27: "congestion", 51 | 34: "congestion", 52 | 38: "congestion", 53 | } 54 | 55 | 56 | class _ResultEvent: 57 | type = "_result" 58 | 59 | def __init__(self, res): 60 | self.res = res 61 | 62 | 63 | # Time for a stupid helper 64 | def _count(it): 65 | n = 0 66 | for _ in it: 67 | n += 1 68 | return n 69 | 70 | 71 | class _ErrorEvent: 72 | type = "_error" 73 | 74 | def __init__(self, exc): 75 | self.exc = exc 76 | 77 | 78 | class DialFailed(RuntimeError): 79 | """ 80 | This exception is raised when dialling fails to establish a channel. 81 | """ 82 | 83 | def __init__(self, status, cause_code=None): 84 | self.status = status 85 | self.cause_code = cause_code 86 | 87 | def repr(self): 88 | return "<%s:%s %s>" % (self.__class__.__name__, self.status, self.cause_code) 89 | 90 | 91 | def as_task(proc): 92 | @functools.wraps(proc) 93 | async def worker(self, *a, **kw): 94 | self.taskgroup.start_soon(functools.partial(proc, self, *a, **kw), name=proc.__name__) 95 | 96 | assert inspect.iscoroutinefunction(proc) 97 | return worker 98 | 99 | 100 | class BaseEvtHandler: 101 | """Our generic event handler. 102 | 103 | Event handlers can be stacked. Events will be processed by the top-most 104 | handler; an event percolates down if it isn't processed. 105 | 106 | Events get queued by calling :meth:`handle`. The handler's main loop 107 | repeatedly calls :meth:`get_event` to fetch the next event, and 108 | processes it. If the event handler either does not exist or explicitly 109 | returns `False`, it is relegated to the next-upper layer, or printed as 110 | a warning (for the bottom event handler). 111 | 112 | Hangups and other "terminal" events should always be processed by the 113 | outermost event handler. 114 | 115 | A handler is activated by entering its async context. It is terminated 116 | by calling :meth:`done`, usually triggered by an event. 117 | 118 | By default, specific events are processed by calling ``on_EVENTNAME``, 119 | though your runner is free to override that. 120 | 121 | To start an event handler, you typically use it as a context manager. 122 | Alternately, you can call its :meth:`start_task` method. In either case, 123 | the actual state machine will run in the background. 124 | 125 | Do not instantiate a ``BaseEvtHandler`` directly. Always use or 126 | subclass :class:`EvtHandler`, :class:`ChannelState` or 127 | :class:`BridgeState`. 128 | 129 | You can pass in an `anyio.abc.Event` as the ``ready`` argument to the 130 | class. It will be set once event processing is set up and the 131 | ``on_start`` handler has finished. 132 | """ 133 | # Internally, start_task starts a separate task that enters this state machine's context. 134 | # Entering the context starts _run_with_taskgroup, which creates the 135 | # loop's task group and then executes .run, which loops over incoming 136 | # events and processes them. 137 | # 138 | # Calling .done cancels the task group's context, thus terminates everything that's internal. 139 | # Awaiting the handler itself waits for the internal loop to end. 140 | 141 | # Main client, for Asterisk ARI calls 142 | client = None 143 | 144 | # The event handler leeching off us 145 | _sub = None 146 | 147 | # The task group used to start our main loop 148 | _base_tg = None 149 | 150 | # The task group within our main loop 151 | _tg = None 152 | 153 | # Event signalling that our main loop is done 154 | _done = None 155 | 156 | # Our event channel 157 | _qr = None 158 | _qw = None 159 | 160 | # If this is a model-based toplevel handler, this is the name of the attribute it's based on 161 | _src = None 162 | 163 | # event for maybe-starting a new task 164 | _proc_check = None 165 | 166 | # Lock to prevent parallel runs of get_event 167 | _proc_lock = None 168 | 169 | # Number of tasks working the queue 170 | _n_proc = 0 171 | 172 | # scope of runner wrapper 173 | _run_with_scope = None 174 | 175 | def __init__(self, client, taskgroup=None, ready: anyio.abc.Event=None): 176 | self.client = client 177 | self._base_tg = taskgroup or client.taskgroup 178 | self._ready = ready 179 | 180 | async def start_task(self): 181 | """This is a shortcut for running this object's async context 182 | manager / event loop in a separate task.""" 183 | await self._base_tg.start(self._run_ctx, name="start_task " + self.ref_id) 184 | 185 | async def _run_ctx(self, *, task_status): 186 | assert self._done is None 187 | self._done = anyio.Event() 188 | async with self.task: # event loop 189 | task_status.started() 190 | await self._done.wait() 191 | 192 | async def _task_setup(self): 193 | assert self._qw is None 194 | self._qw,self._qr = anyio.create_memory_object_stream(20) 195 | 196 | async def _task_teardown(self): 197 | if self._qw is not None: 198 | qw, self._qr, self._qw = self._qw, None, None 199 | await qw.aclose() 200 | 201 | @property 202 | @asynccontextmanager 203 | async def task(self): 204 | """ 205 | Context manager to run this state machine's "run" method / main loop. 206 | 207 | Called via ``class.new``. 208 | """ 209 | if self._run_with_scope is not None: 210 | raise RuntimeError("Task already running") 211 | 212 | yielded = False 213 | with anyio.CancelScope() as sc: 214 | try: 215 | if self._done is None: 216 | self._done = anyio.Event() 217 | self._run_with_exc = None 218 | 219 | self._run_with_scope = sc 220 | await self._base_tg.start(self._run_with_tg, name="run " + repr(self)) 221 | yielded = True 222 | yield self 223 | 224 | except BaseException as ex: 225 | exc, self._run_with_exc = self._run_with_exc, None 226 | if exc is not None: 227 | raise ex from exc 228 | raise 229 | 230 | else: 231 | exc, self._run_with_exc = self._run_with_exc, None 232 | if exc is not None: 233 | raise exc 234 | 235 | finally: 236 | self._run_with_scope = None 237 | 238 | with anyio.move_on_after(2, shield=True): 239 | await self.done() 240 | 241 | if self._done is not None: 242 | await self._done.wait() 243 | if sc.cancel_called and not yielded: 244 | yield self # ugh 245 | pass 246 | 247 | @property 248 | def taskgroup(self): 249 | """the taskgroup to use""" 250 | if self._tg is None: 251 | return self._base_tg 252 | else: 253 | return self._tg 254 | 255 | async def _run_with_tg(self, *, task_status = None): 256 | try: 257 | async with anyio.create_task_group() as tg: 258 | self._tg = tg 259 | await self._task_setup() 260 | await self.run(task_status=task_status) 261 | except Exception as exc: 262 | self._run_with_exc = exc 263 | if self._run_with_scope is not None: 264 | self._run_with_scope.cancel() 265 | finally: 266 | self._tg = None 267 | await self._task_teardown() 268 | if self._done is not None: 269 | self._done.set() 270 | self._done = None 271 | 272 | async def done(self): 273 | """Signal that this event handler has finished. 274 | 275 | This call cancels the main loop, if any, as well as the loop of any 276 | sub-event handlers which might be running. 277 | """ 278 | log.debug("TeardownRun %r < %r", self, getattr(self, '_prev', None)) 279 | if self._tg is not None: 280 | self._tg.cancel_scope.cancel() 281 | 282 | async def done_sub(self): 283 | """Terminate my sub-handler, assuming one exists. 284 | 285 | Returns True if there was a sub-handler to cancel, False 286 | otherwise. 287 | """ 288 | if not self._sub: 289 | return False 290 | await self._sub.done() 291 | self._sub = None 292 | return True 293 | 294 | async def handle(self, evt): 295 | """Dispatch a single event to this handler. 296 | 297 | * Feed the event to the current sub-handler, if any. 298 | * If the event is handled, return True. 299 | * Otherwise, call ``self.on_EventName(evt)``. If that handler 300 | explicitly returns False, return that, else return True. 301 | """ 302 | if self._sub is not None: 303 | await self._sub.handle(evt) 304 | else: 305 | await self._handle_here(evt) 306 | 307 | async def _handle_here(self, evt): 308 | if self._qw is not None: 309 | try: 310 | await self._qw.send(evt) 311 | except anyio.ClosedResourceError: 312 | pass 313 | 314 | async def _dispatch(self, evt): 315 | typ = evt.type 316 | try: 317 | handler = getattr(self, 'on_' + typ) 318 | except AttributeError: 319 | await self._handle_prev(evt) 320 | return 321 | res = handler(evt) 322 | if inspect.iscoroutine(res): 323 | res = await res 324 | 325 | if res is not False and not res: 326 | res = True 327 | return res 328 | 329 | async def _handle_prev(self, evt): 330 | log.debug("Unhandled event %s on %s", evt, self) 331 | return False 332 | 333 | async def run(self, *, task_status=None): 334 | """ 335 | Process my events. 336 | 337 | Override+call this e.g. for overall timeouts:: 338 | 339 | async def run(self): 340 | with anyio.fail_after(30): 341 | await super().run() 342 | 343 | This method creates a runner task that do the actual event processing. 344 | A new runner is started if processing an event takes longer than 0.1 seconds. 345 | 346 | Do not replace this method. Do not call it directly. 347 | """ 348 | log.debug("SetupRun %r < %r", self, getattr(self, '_prev', None)) 349 | if task_status is not None: 350 | task_status.started() 351 | await self.on_start() 352 | if self._ready is not None: 353 | self._ready.set() 354 | 355 | self._proc_lock = anyio.Lock() 356 | while True: 357 | if self._n_proc == 0: 358 | self.taskgroup.start_soon(self._process, name="Worker " + self.ref_id) 359 | self._proc_check = anyio.Event() 360 | await anyio.sleep(0.1) 361 | await self._proc_check.wait() 362 | 363 | async def _process(self, evt: anyio.abc.Event=None): 364 | if evt is not None: 365 | evt.set() 366 | try: 367 | log.debug("StartRun %r < %r", self, getattr(self, '_prev', None)) 368 | while True: 369 | self._n_proc += 1 370 | try: 371 | async with self._proc_lock: 372 | evt = await self.get_event() 373 | except StopAsyncIteration: 374 | return 375 | finally: 376 | self._n_proc -= 1 377 | if self._n_proc == 0: 378 | self._proc_check.set() 379 | 380 | # Any unhandled event is relegated to the parent 381 | try: 382 | success = await self._dispatch(evt) 383 | except BaseException as exc: 384 | with anyio.fail_after(2, shield=True): 385 | await self._handle_prev(evt) 386 | raise 387 | else: 388 | if success: 389 | await self._handle_prev(evt) 390 | 391 | finally: 392 | log.debug("StopRun %r < %r", self, getattr(self, '_prev', None)) 393 | 394 | async def get_event(self): 395 | """ 396 | Get the next event from this handler's queue. 397 | Supersede this e.g. for per-event timeouts:: 398 | 399 | class TimeoutEvent: 400 | type = "MyTimeout" 401 | 402 | async def get_event(): 403 | with anyio.move_on_after(30): 404 | return await super().get_event() 405 | return TimeoutEvent() 406 | 407 | async on_MyTimeout(self, evt): 408 | await self.done(None) 409 | 410 | Raises StopAsyncIteration when no more events will arrive. 411 | """ 412 | if self._qr is None: 413 | raise StopAsyncIteration 414 | evt = await self._qr.receive() 415 | if evt is None: 416 | raise StopAsyncIteration 417 | log.debug("Event:%s %s", self, evt) 418 | return evt 419 | 420 | def _repr(self): 421 | """List of attribute+value pairs to include in ``repr``.""" 422 | res = [] 423 | if self._src: 424 | res.append((self._src, getattr(self, self._src))) 425 | return res 426 | 427 | @property 428 | def ref(self): 429 | s = self 430 | while s._src is None and getattr(s, '_prev', None) is not None: 431 | s = s._prev 432 | if s._src is None: 433 | return None 434 | return getattr(s, s._src) 435 | 436 | @property 437 | def ref_id(self): 438 | r = self.ref 439 | if r is None: 440 | return '?' 441 | return r.id 442 | 443 | def __repr__(self): 444 | return "<%s: %s>" % ( 445 | self.__class__.__name__, ','.join("%s=%s" % (a, b) for a, b in self._repr()) 446 | ) 447 | 448 | async def on_start(self): 449 | """Called when the state machine starts up (initial pseudo event). 450 | Defaults to doing nothing. 451 | """ 452 | pass 453 | 454 | async def on_result(self, res): 455 | """Called when a sub-handler's state machine returns a value. 456 | The default is to do nothing. 457 | """ 458 | pass 459 | 460 | async def on_error(self, exc): 461 | """Called when a sub-handler's state macheine raises an error. 462 | 463 | The default is to re-raise the error. 464 | """ 465 | raise exc 466 | 467 | def on__result(self, evt): 468 | """Dispatcher-internal method. Please ignore.""" 469 | return self.on_result(evt.res) 470 | 471 | def on__error(self, evt): 472 | """Dispatcher-internal method. Please ignore.""" 473 | return self.on_error(evt.exc) 474 | 475 | def __await__(self): 476 | """Wait for the run task to terminate and return its result.""" 477 | if self._done is not None: 478 | yield from self._done.wait().__await__() 479 | 480 | 481 | class _EvtHandler(BaseEvtHandler): 482 | """ 483 | common methods for AsyncEvtHandler and SyncEvtHandler 484 | """ 485 | # The event handler we're leeching off of 486 | _prev = None 487 | 488 | # Our main loop's result 489 | _result = None 490 | 491 | def __init__(self, prev, **kw): 492 | self._prev = prev 493 | super().__init__(prev.client, taskgroup=prev.taskgroup, **kw) 494 | 495 | async def _handle_prev(self, evt): 496 | await self._prev._handle_here(evt) 497 | return True 498 | 499 | async def _run_with_tg(self, **kw): 500 | # the event handler stack doesn't allow branches 501 | if self._prev._sub is not None: 502 | raise RuntimeError("Our parent already has a sub-handler") 503 | self._prev._sub = self 504 | 505 | try: 506 | await super()._run_with_tg(**kw) 507 | 508 | finally: 509 | if self._prev._sub is not self: 510 | raise RuntimeError("Problem nesting event handlers") 511 | self._prev._sub = None 512 | 513 | def __await__(self): 514 | # alias "await Handler()" to "await Handler()._await()" 515 | return self._await().__await__() 516 | 517 | async def done(self, result=None): 518 | """Signal that this event handler has finished with this result. 519 | """ 520 | if result is not None: 521 | self._result = result 522 | await super().done() 523 | 524 | async def _await(self): 525 | raise RuntimeError("Use a subclass.") 526 | 527 | 528 | class AsyncEvtHandler(_EvtHandler): 529 | """ 530 | This event handler operates asynchronously, i.e. you start it 531 | off and get its result in an event:: 532 | 533 | class MenuOne(AsyncEvtHandler): 534 | pass # do whatever it takes to handle this submenu 535 | # Somewhere in there you'll call "await self.done(RESULT)" 536 | 537 | async def on_dtmf_1(self evt): 538 | await MenuOne(self) # this returns (almost) immediately 539 | 540 | async def on_result(self, res): 541 | pass # do whatever you want with RESULT 542 | 543 | async def on_error(self, err): 544 | raise err # do whatever you want with the error 545 | 546 | Alternately, use :class:`SyncEvtHandler` in a separate task. 547 | 548 | """ 549 | 550 | async def _run_with_tg(self, **kw): 551 | try: 552 | await super()._run_with_tg(**kw) 553 | except anyio.get_cancelled_exc_class(): 554 | with anyio.fail_after(2, shield=True): 555 | if self._done.is_set(): 556 | await self._handle_prev(_ResultEvent(self._result)) 557 | else: 558 | await self._handle_prev(_ErrorEvent(CancelledError())) 559 | raise 560 | except Exception as exc: 561 | await self._handle_prev(_ErrorEvent(exc)) 562 | except BaseException: 563 | with anyio.fail_after(2, shield=True): 564 | await self._handle_prev(_ErrorEvent(CancelledError())) 565 | raise 566 | else: 567 | await self._handle_prev(_ResultEvent(self._result)) 568 | 569 | async def _await(self): 570 | await self._start_task() 571 | 572 | 573 | class SyncEvtHandler(_EvtHandler): 574 | """ 575 | This event handler operates synchronously, i.e. you can simply run it 576 | and get its result:: 577 | 578 | class MenuOne(SyncEvtHandler): 579 | pass # do whatever it takes to handle this submenu 580 | # Somewhere in there you'll call "await self.done(RESULT)" 581 | 582 | @as_task 583 | async def on_digit_1(self evt): 584 | try: 585 | res = await MenuOne(self) 586 | except Exception as err: 587 | raise # do whatever you want with the error 588 | else: 589 | pass # do whatever you want with RESULT 590 | 591 | You **must** decorate your handler with :func:`as_task` or otherwise 592 | delegate this call to another task. If you don't, event handling **will** 593 | deadlock. You'll also get an error message that your event handler 594 | takes too long. 595 | 596 | Alternately, use :class:`AsyncEvtHandler` and `on_result`. 597 | """ 598 | 599 | async def _await(self): 600 | """This does not use context management, because we want to get errors.""" 601 | await self._run_with_tg() 602 | 603 | if isinstance(self._result, Exception): 604 | raise self._result 605 | return self._result 606 | 607 | 608 | class DTMFHandler: 609 | """A handler mix-in that dispatches DTMF tones to specific handlers. 610 | 611 | This is not a stand-alone class – use as a mix-in to ``EvtHandler``, 612 | ``ChannelState``, or ``BridgeState``. 613 | """ 614 | 615 | async def on_ChannelDtmfReceived(self, evt): 616 | """Dispatch DTMF events. 617 | 618 | Calls ``on_dtmf_{0-9,A-D,Star,Pound}`` methods. (Note capitalization.) 619 | If that doesn't exist and a letter is dialled, call ``on_dtmf_letter``. 620 | If that doesn't exist and a digit is dialled, call ``on_dtmf_digit``. 621 | If that doesn't exist either, call ``on_dtmf``. 622 | If that doesn't exist either, punt to calling state machine. 623 | """ 624 | 625 | digit = evt.digit 626 | if digit == '#': 627 | digit = 'Pound' 628 | elif digit == '*': 629 | digit = 'Star' 630 | proc = getattr(self, 'on_dtmf_' + digit, None) 631 | if proc is None and digit >= '0' and digit <= '9': 632 | proc = getattr(self, 'on_dtmf_digit', None) 633 | if proc is None and digit >= 'A' and digit <= 'D': 634 | proc = getattr(self, 'on_dtmf_letter', None) 635 | if proc is None: 636 | proc = getattr(self, 'on_dtmf', None) 637 | 638 | if proc is None: 639 | log.info("Unhandled DTMF %s on %s", evt.digit, self) 640 | return False 641 | else: 642 | p = proc(evt) 643 | if inspect.iscoroutine(p): 644 | p = await p 645 | return p 646 | 647 | 648 | class _ThingEvtHandler(BaseEvtHandler): 649 | async def run(self, *, task_status=None): 650 | if self._tg is None: 651 | raise RuntimeError("I do not have a task group. Use 'async with' or 'start_task'.") 652 | handler = self.ref.on_event("*", self.handle) 653 | try: 654 | await super().run(task_status=task_status) 655 | finally: 656 | handler.close() 657 | 658 | 659 | class ChannelState(_ThingEvtHandler): 660 | """This is the generic state machine for a single channel.""" 661 | _src = 'channel' 662 | last_cause = None 663 | 664 | def __init__(self, channel): 665 | self.channel = channel 666 | super().__init__(channel.client) 667 | 668 | def _repr(self): 669 | res = super()._repr() 670 | try: 671 | res.append(("ch_state", self.channel.state)) 672 | except AttributeError: 673 | pass 674 | if self.last_cause is not None: 675 | res.append(("cause", self.last_cause)) 676 | return res 677 | 678 | async def on_DialResult(self, evt): 679 | if evt.dialstatus != "ANSWER": 680 | raise DialFailed(evt.dialstatus, self.last_cause) 681 | 682 | async def on_ChannelHangupRequest(self, evt): 683 | """kills the channel""" 684 | try: 685 | self.last_cause = evt.cause 686 | except AttributeError: 687 | pass 688 | 689 | async def on_ChannelDestroyed(self, evt): 690 | await self.done() 691 | 692 | async def on_StasisEnd(self, evt): 693 | await self.done() 694 | 695 | 696 | class BridgeState(_ThingEvtHandler): 697 | """ 698 | This is the generic state machine for a bridge. 699 | 700 | The bridge is always auto-destroyed when its handler ends. 701 | """ 702 | _src = 'bridge' 703 | TYPE = "mixing" 704 | calls = set() 705 | bridge = None 706 | 707 | def __init__(self, bridge, **kw): 708 | self.bridge = bridge 709 | super().__init__(bridge.client, **kw) 710 | 711 | @classmethod 712 | def new(cls, client, *a, type="mixing", name=None, **kw): 713 | """ 714 | Create a new bridge with this state machine. 715 | 716 | Always use as `async with …`. 717 | 718 | Arguments other than "client" and "type" are passed to the constructor. 719 | """ 720 | s = object.__new__(cls) 721 | s.client = client 722 | s._base_tg = kw.get('taskgroup', client.taskgroup) 723 | s._bridge_args = dict(type=type, bridgeId=client.generate_id("B")) 724 | if name is not None: 725 | s._bridge_args["name"] = name 726 | s._bridge_kw = kw 727 | return s.task 728 | 729 | async def _task_setup(self): 730 | if self.bridge is None: 731 | self.__init__(await self.client.bridges.create(**self._bridge_args), **self._bridge_kw) 732 | del self._bridge_args 733 | del self._bridge_kw 734 | return await super()._task_setup() 735 | 736 | async def _task_teardown(self, *tb): 737 | with anyio.fail_after(2, shield=True): 738 | await self.teardown() 739 | return await super()._task_teardown(*tb) 740 | 741 | # Any unprocessed events get relegated to the parent 742 | while True: 743 | try: 744 | with anyio.fail_after(0.001): 745 | if self._qr is None: 746 | break 747 | evt = self._qr.get() 748 | except TimeoutError: 749 | break 750 | await self._handle_prev(evt) 751 | 752 | async def add(self, channel): 753 | """Add a new channel to this bridge.""" 754 | self._add_monitor(channel) 755 | await self.bridge.addChannel(channel=channel.id) 756 | await channel.wait_bridged(self.bridge) 757 | 758 | async def on_channel_added(self, channel): 759 | """Hook, called after a channel has been added successfully.""" 760 | pass 761 | 762 | async def remove(self, channel): 763 | """Remove a channel from this bridge.""" 764 | await self.bridge.removeChannel(channel=channel.id) 765 | await channel.wait_bridged(None) 766 | 767 | async def _dial(self, State=ChannelState, **kw): 768 | """Helper to start a call""" 769 | ch_id = self.client.generate_id("C") 770 | log.debug("DIAL %s", kw.get('endpoint', 'unknown')) 771 | ch = await self.client.channels.originate( 772 | channelId=ch_id, 773 | app=self.client.app, 774 | appArgs=["dialed", kw.get('endpoint', 'unknown')], 775 | **kw 776 | ) 777 | self.calls.add(ch) 778 | ch.remember() 779 | self._add_monitor(ch) 780 | return ch 781 | 782 | async def dial(self, State=None, **kw): 783 | """ 784 | Originate a call. Add the called channel to this bridge. 785 | 786 | State: the state machine (factory) to run the new channel under. 787 | 788 | Returns a state instance (if given), or the channel (if not). 789 | """ 790 | 791 | ch = await self._dial(**kw) 792 | try: 793 | await ch.wait_up() 794 | except BaseException: 795 | with anyio.move_on_after(2, shield=True) as s: 796 | await ch.hang_up() 797 | await ch.wait_down() 798 | raise 799 | 800 | if State is None: 801 | return ch 802 | else: 803 | s = State(ch) 804 | await s.start_task() 805 | return s 806 | 807 | def calling(self, State=None, timeout=None, **kw): 808 | """ 809 | Context manager for an outgoing call. 810 | 811 | The context is entered as the call is established. It is 812 | auto-terminated when the context ends. 813 | 814 | Usage:: 815 | 816 | async with bridge.calling(endpoint="SIP/foo/0123456789", timeout=60) as channel: 817 | await channel.play(media='sound:hello-world') 818 | 819 | The timeout only applies to the call setup. 820 | 821 | If a state machine (factory) is passed in, it will be instantiated 822 | run during the call. 823 | 824 | """ 825 | return CallManager(self, State=State, timeout=timeout, **kw) 826 | 827 | async def on_StasisStart(self, evt): 828 | """Hook for channel creation. Connects the channel to this bridge. 829 | 830 | Call when overriding!""" 831 | ch = evt.channel 832 | await self.bridge.addChannel(channel=ch.id) 833 | 834 | async def on_connected(self, channel): 835 | """Callback when an outgoing call is answered. 836 | 837 | Default: answer all (incoming) channels that are still in RING 838 | """ 839 | for ch in self.bridge.channels: 840 | if ch.state == "Ring": 841 | await ch.answer() 842 | 843 | async def on_timeout(self): 844 | """Timeout handler. Default: terminate the state machine.""" 845 | raise StopAsyncIteration 846 | 847 | async def on_BridgeMerged(self, evt): 848 | if evt.bridge is not self.bridge: 849 | raise StopAsyncIteration 850 | 851 | async def on_ChannelEnteredBridge(self, evt): 852 | # We need to keep track of the channel's state 853 | ch = evt.channel 854 | try: 855 | self.calls.remove(ch) 856 | except KeyError: 857 | pass 858 | await self.on_channel_added(ch) 859 | if ch.state == "Up": # and ch.prev_state != "Up": 860 | await self.on_connected(ch) 861 | 862 | async def on_ChannelLeftBridge(self, evt): 863 | await self._chan_dead(evt) 864 | 865 | def _add_monitor(self, ch): 866 | """Listen to non-bridge events on the channel""" 867 | if not hasattr(ch, '_bridge_evt'): 868 | ch._bridge_evt = ch.on_event("*", self._chan_evt) 869 | 870 | async def _chan_evt(self, evt): 871 | """Dispatcher for forwarding a channel's events to this bridge.""" 872 | if getattr(evt, 'bridge', None) is self: 873 | log.debug("Dispatch hasBRIDGE:%s for %s", evt.type, self) 874 | return # already calling us via regular dispatch 875 | await self.handle(evt) 876 | 877 | async def on_ChannelStateChange(self, evt): 878 | """calls self._chan_state_change""" 879 | await self._chan_state_change(evt) 880 | 881 | async def on_ChannelConnectedLine(self, evt): 882 | """calls self._chan_state_change""" 883 | await self._chan_state_change(evt) 884 | 885 | async def on_ChannelDestroyed(self, evt): 886 | """calls self._chan_dead""" 887 | self._set_cause(evt) 888 | await self._chan_dead(evt) 889 | 890 | async def on_ChannelHangupRequest(self, evt): 891 | """kills the channel""" 892 | self._set_cause(evt) 893 | try: 894 | await evt.channel.hang_up() 895 | except Exception as exc: 896 | log.warning("Hangup %s: %s", evt.channel, exc) 897 | 898 | async def on_channel_end(self, ch, evt=None): 899 | """ 900 | The connection to this channel ended. 901 | 902 | Overrideable, but do call ``await super().on_channel_end(ch,evt)`` first. 903 | """ 904 | try: 905 | self.calls.remove(ch) 906 | except KeyError: 907 | pass 908 | 909 | def _set_cause(self, evt): 910 | """Set the hangup cause for this bridge's channels""" 911 | try: 912 | cc = evt.cause 913 | except AttributeError: 914 | pass 915 | else: 916 | cc = CAUSE_MAP.get(cc, "normal") 917 | for c in self.bridge.channels | self.calls: 918 | c.set_reason(cc) 919 | 920 | async def _chan_dead(self, evt): 921 | ch = evt.channel 922 | 923 | if not hasattr(ch, '_bridge_evt'): 924 | return 925 | 926 | # remove the listener 927 | ch._bridge_evt.close() 928 | del ch._bridge_evt 929 | 930 | await self.on_channel_end(ch, evt) 931 | 932 | async def _chan_state_change(self, evt): 933 | """react to channel state change""" 934 | ch = evt.channel 935 | log.debug("StateChange %s %s", self, ch) 936 | if ch not in self.bridge.channels: 937 | return 938 | if ch.state == "Up": 939 | await self.on_connected(ch) 940 | 941 | async def teardown(self, hangup_reason="normal"): 942 | """Removes all channels from the bridge and destroys it. 943 | 944 | All remaining channels are hung up. 945 | 946 | This method is typically called when leaving the bridge's context 947 | manager. If you want to keep it online, e.g. for being able to 948 | cleanly restart a PBX without downtime, you may override this -- 949 | but you're then responsible for recovering state after restarting, 950 | and you still need to clean up bridges that are no longer needed. 951 | 952 | """ 953 | if self.bridge is None: 954 | return 955 | with anyio.move_on_after(2, shield=True) as s: 956 | log.info("TEARDOWN %s %s", self, self.bridge.channels) 957 | for ch in self.bridge.channels | self.calls: 958 | try: 959 | await ch.hang_up(reason=hangup_reason) 960 | except Exception as exc: 961 | log.info("%s gone: %s", ch, exc) 962 | 963 | try: 964 | await self.bridge.removeChannel(channel=ch.id) 965 | except Exception as exc: 966 | log.info("%s detached: %s", ch, exc) 967 | 968 | try: 969 | await self.bridge.destroy() 970 | except HTTPStatusError: 971 | pass 972 | 973 | 974 | class HangupBridgeState(BridgeState): 975 | """A bridge controller that hangs up all channels and deletes its 976 | bridge as soon as there is only one active channel left. 977 | """ 978 | 979 | async def on_channel_end(self, ch, evt=None): 980 | await super().on_channel_end(ch, evt) 981 | if _count(1 for c in (self.bridge.channels | self.calls) if c.state in {'Down', 'Up','Ringing'}) < 2: 982 | for c in self.bridge.channels: 983 | await c.hang_up() 984 | await self.done() 985 | 986 | 987 | class ToplevelChannelState(ChannelState): 988 | """A channel state machine that unconditionally hangs up its channel on exception""" 989 | 990 | async def run(self, *, task_status=None): 991 | """Task for this state. Hangs up the channel on exit.""" 992 | try: 993 | await super().run(task_status=task_status) 994 | except ChannelExit: 995 | pass 996 | except StateError: 997 | pass 998 | finally: 999 | with anyio.fail_after(2, shield=True) as s: 1000 | await self.channel.handle_exit() 1001 | 1002 | async def hang_up(self, reason="normal"): 1003 | self.channel.set_reason(reason) 1004 | await self.channel.hang_up() 1005 | 1006 | async def done(self): 1007 | await self.channel.hang_up() 1008 | await super().done() 1009 | 1010 | 1011 | class OutgoingChannelState(ToplevelChannelState): 1012 | """A channel state machine that waits for an initial StasisStart event before proceeding""" 1013 | 1014 | async def run(self, *, task_status=None): 1015 | async for evt in self.channel: 1016 | if evt.type != "StatisStart": 1017 | raise StateError(evt) 1018 | break 1019 | await super().run(task_status=task_status) 1020 | 1021 | 1022 | class CallManager: 1023 | state = None 1024 | channel = None 1025 | 1026 | def __init__(self, bridge, State=None, timeout=None, **kw): 1027 | self.bridge = bridge 1028 | self.State = State 1029 | self.timeout = timeout 1030 | self.kw = kw 1031 | 1032 | async def __aenter__(self): 1033 | timeout = self.timeout 1034 | if timeout is None: 1035 | timeout = math.inf 1036 | 1037 | with anyio.fail_after(timeout): 1038 | self.channel = ch = self.bridge.dial(**self.kw) 1039 | if self.State is not None: 1040 | try: 1041 | self.state = state = self.State(ch) 1042 | await state.start_task() 1043 | except BaseException: 1044 | with anyio.CancelScope(shield=True): 1045 | await ch.hangup() 1046 | raise 1047 | 1048 | async def __aexit__(self, *exc): 1049 | with anyio.fail_after(2, shield=True): 1050 | if self.state is None: 1051 | await self.state.hang_up() 1052 | else: 1053 | await self.channel.hang_up() 1054 | 1055 | 1056 | ### A couple of helper classes 1057 | 1058 | 1059 | class _ReadNumber(DTMFHandler): 1060 | _digit_timer = None 1061 | _digit_deadline = None 1062 | _total_timer = None 1063 | _total_deadline = None 1064 | 1065 | def __init__( 1066 | self, 1067 | prev, 1068 | playback=None, 1069 | timeout=60, 1070 | first_digit_timeout=None, 1071 | digit_timeout=10, 1072 | max_len=15, 1073 | min_len=5 1074 | ): 1075 | if first_digit_timeout is None: 1076 | first_digit_timeout = digit_timeout 1077 | self.total_timeout = timeout 1078 | self.digit_timeout = digit_timeout 1079 | self.first_digit_timeout = first_digit_timeout 1080 | self.min_len = min_len 1081 | self.max_len = max_len 1082 | self.playback = playback 1083 | 1084 | super().__init__(prev) 1085 | 1086 | async def add_digit(self, digit): 1087 | """ 1088 | Add this digit to the current number. 1089 | 1090 | The default clears the number on '*' and returns it on '#', 1091 | assuming that the length restrictions are obeyed. 1092 | 1093 | This method may call `await self.done` with the dialled number, update 1094 | `self.num`, or raise an exception. A string is used to replace the 1095 | current temporary number. 1096 | 1097 | This method may be a coroutine. 1098 | """ 1099 | if digit == '*': 1100 | self.num = "" 1101 | elif digit == '#': 1102 | if len(self.num) < self.min_len: 1103 | raise NumberTooShortError(self.num) 1104 | await self.done(self.num) 1105 | else: 1106 | self.num += digit 1107 | if len(self.num) > self.max_len: 1108 | raise NumberTooLongError(self.num) 1109 | 1110 | async def _stop_playing(self): 1111 | if self.playback is not None: 1112 | pb, self.playback = self.playback, None 1113 | try: 1114 | await pb.stop() 1115 | except HTTPStatusError: 1116 | pass 1117 | 1118 | async def _digit_timer_(self, evt: anyio.abc.Event=None): 1119 | self._digit_deadline = self.first_digit_timeout + await anyio.current_time() 1120 | with anyio.CancelScope() as sc: 1121 | self._digit_timer = sc 1122 | if evt is not None: 1123 | evt.set() 1124 | while True: 1125 | delay = self._digit_deadline - await anyio.current_time() 1126 | if delay <= 0: 1127 | await self._stop_playing() 1128 | raise DigitTimeoutError(self.num) from None 1129 | await anyio.sleep(delay) 1130 | 1131 | async def _total_timer_(self, evt: anyio.abc.Event=None): 1132 | self._total_deadline = self.total_timeout + await anyio.current_time() 1133 | with anyio.CancelScope() as sc: 1134 | self._total_timer = sc 1135 | if evt is not None: 1136 | evt.set() 1137 | while True: 1138 | delay = self._total_deadline - await anyio.current_time() 1139 | if delay <= 0: 1140 | await self._stop_playing() 1141 | raise NumberTimeoutError(self.num) from None 1142 | await anyio.sleep(delay) 1143 | 1144 | async def done(self, res): 1145 | await super().done(res) 1146 | self._digit_timer.cancel() 1147 | self._total_timer.cancel() 1148 | 1149 | async def on_start(self): 1150 | await super().on_start() 1151 | self.num = "" 1152 | self.taskgroup.start_soon(self._digit_timer_) 1153 | self.taskgroup.start_soon(self._total_timer_) 1154 | 1155 | async def on_dtmf_letter(self, evt): 1156 | """Ignore DTMF letters (A-D).""" 1157 | pass 1158 | 1159 | async def on_dtmf(self, evt): 1160 | await self._stop_playing() 1161 | res = await self.add_digit(evt.digit) 1162 | if inspect.iscoroutine(res): 1163 | res = await res 1164 | if isinstance(res, str): 1165 | self.num = res 1166 | await self.set_timeout() 1167 | 1168 | async def set_timeout(self): 1169 | self._digit_deadline = (await anyio.current_time() 1170 | ) + (self.digit_timeout if self.num else self.first_digit_timeout) 1171 | 1172 | 1173 | class SyncReadNumber(_ReadNumber, SyncEvtHandler): 1174 | """ 1175 | This event handler receives and returns a sequence of digits. 1176 | The pound key terminates the sequence. The star key restarts. 1177 | 1178 | Sync version. 1179 | """ 1180 | pass 1181 | 1182 | 1183 | class AsyncReadNumber(_ReadNumber, AsyncEvtHandler): 1184 | """ 1185 | This event handler receives and returns a sequence of digits. 1186 | The pound key terminates the sequence. The star key restarts. 1187 | 1188 | Async version. 1189 | """ 1190 | pass 1191 | 1192 | 1193 | class SyncPlay(SyncEvtHandler): 1194 | """ 1195 | This event handler plays a sound and returns when it has finished. 1196 | 1197 | Sync version. There is no async version because you get an event with the result anyway. 1198 | """ 1199 | 1200 | def __init__(self, prev, media): 1201 | super().__init__(prev) 1202 | self.media = media 1203 | 1204 | async def on_start(self): 1205 | await super().on_start() 1206 | self.cb = self.ref.on_event("PlaybackFinished", self.on_play_end) 1207 | p = await self.ref.play(media=self.media) 1208 | await p.wait_playing() 1209 | 1210 | async def on_play_end(self, evt): 1211 | if evt.playback.media_uri == self.media: 1212 | self.cb.close() 1213 | await self.done() 1214 | -------------------------------------------------------------------------------- /asyncari/util.py: -------------------------------------------------------------------------------- 1 | # 2 | """ 3 | Helper state machines 4 | """ 5 | 6 | __all__ = [ 7 | "NumberError", 8 | "NumberLengthError", 9 | "NumberTooShortError", 10 | "NumberTooLongError", 11 | "NumberTimeoutError", 12 | "TotalTimeoutError", 13 | "DigitTimeoutError", 14 | "mayNotExist", 15 | ] 16 | 17 | 18 | def singleton(cls): 19 | return cls() 20 | 21 | 22 | class NumberError(RuntimeError): 23 | """Base class for things that can go wrong entering a number. 24 | Attributes: 25 | number: 26 | The (partial, wrong, …) number that's been dialled so far. 27 | """ 28 | 29 | def __init__(self, num): 30 | self.number = num 31 | 32 | 33 | class NumberLengthError(NumberError): 34 | pass 35 | 36 | 37 | class NumberTooShortError(NumberLengthError): 38 | pass 39 | 40 | 41 | class NumberTooLongError(NumberLengthError): 42 | pass 43 | 44 | 45 | class NumberTimeoutError(NumberError): 46 | pass 47 | 48 | 49 | class TotalTimeoutError(NumberTimeoutError): 50 | pass 51 | 52 | 53 | class DigitTimeoutError(NumberTimeoutError): 54 | pass 55 | 56 | 57 | from httpx import HTTPStatusError 58 | NOT_FOUND = 404 59 | 60 | 61 | @singleton 62 | class mayNotExist: 63 | def __enter__(self): 64 | return self 65 | 66 | def __exit__(self, c, e, t): 67 | if e is None: 68 | return 69 | if isinstance(e, HTTPStatusError) and e.response.status_code == NOT_FOUND: 70 | return True 71 | if isinstance(e, KeyError): 72 | return True 73 | -------------------------------------------------------------------------------- /ci/rtd-requirements.txt: -------------------------------------------------------------------------------- 1 | # RTD is currently installing 1.5.3, which has a bug in :lineno-match: 2 | sphinx >= 1.7.0 3 | sphinx_rtd_theme 4 | sphinxcontrib-trio 5 | -------------------------------------------------------------------------------- /ci/test-requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | pytest-cov 3 | -------------------------------------------------------------------------------- /ci/travis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | # See https://github.com/python-trio/trio/issues/334 6 | YAPF_VERSION=0.17.0 7 | 8 | if [ "$TRAVIS_OS_NAME" = "osx" ]; then 9 | curl -Lo macpython.pkg https://www.python.org/ftp/python/${MACPYTHON}/python-${MACPYTHON}-macosx10.6.pkg 10 | sudo installer -pkg macpython.pkg -target / 11 | ls /Library/Frameworks/Python.framework/Versions/*/bin/ 12 | PYTHON_EXE=/Library/Frameworks/Python.framework/Versions/*/bin/python3 13 | sudo $PYTHON_EXE -m pip install virtualenv 14 | $PYTHON_EXE -m virtualenv testenv 15 | source testenv/bin/activate 16 | fi 17 | 18 | if [ "$USE_PYPY_NIGHTLY" = "1" ]; then 19 | curl -fLo pypy.tar.bz2 http://buildbot.pypy.org/nightly/py3.5/pypy-c-jit-latest-linux64.tar.bz2 20 | if [ ! -s pypy.tar.bz2 ]; then 21 | # We know: 22 | # - curl succeeded (200 response code; -f means "exit with error if 23 | # server returns 4xx or 5xx") 24 | # - nonetheless, pypy.tar.bz2 does not exist, or contains no data 25 | # This isn't going to work, and the failure is not informative of 26 | # anything involving this package. 27 | ls -l 28 | echo "PyPy3 nightly build failed to download – something is wrong on their end." 29 | echo "Skipping testing against the nightly build for right now." 30 | exit 0 31 | fi 32 | tar xaf pypy.tar.bz2 33 | # something like "pypy-c-jit-89963-748aa3022295-linux64" 34 | PYPY_DIR=$(echo pypy-c-jit-*) 35 | PYTHON_EXE=$PYPY_DIR/bin/pypy3 36 | ($PYTHON_EXE -m ensurepip \ 37 | && $PYTHON_EXE -m pip install virtualenv \ 38 | && $PYTHON_EXE -m virtualenv testenv) \ 39 | || (echo "pypy nightly is broken; skipping tests"; exit 0) 40 | source testenv/bin/activate 41 | fi 42 | 43 | if [ "$USE_PYPY_RELEASE_VERSION" != "" ]; then 44 | curl -fLo pypy.tar.bz2 https://bitbucket.org/squeaky/portable-pypy/downloads/pypy3.5-${USE_PYPY_RELEASE_VERSION}-linux_x86_64-portable.tar.bz2 45 | tar xaf pypy.tar.bz2 46 | # something like "pypy3.5-5.7.1-beta-linux_x86_64-portable" 47 | PYPY_DIR=$(echo pypy3.5-*) 48 | PYTHON_EXE=$PYPY_DIR/bin/pypy3 49 | $PYTHON_EXE -m ensurepip 50 | $PYTHON_EXE -m pip install virtualenv 51 | $PYTHON_EXE -m virtualenv testenv 52 | source testenv/bin/activate 53 | fi 54 | 55 | pip install -U pip setuptools wheel 56 | 57 | if [ "$CHECK_FORMATTING" = "1" ]; then 58 | pip install yapf==${YAPF_VERSION} 59 | if ! yapf -rpd setup.py asyncari; then 60 | cat <NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/source/_static/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/M-o-a-T/asyncari/b9211fa7a8a02d3733558c9001ec46396a861a51/docs/source/_static/.gitkeep -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # Documentation build configuration file, created by 5 | # sphinx-quickstart on Sat Jan 21 19:11:14 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # 20 | import os 21 | import sys 22 | # So autodoc can import our package 23 | sys.path.insert(0, os.path.abspath('../..')) 24 | 25 | # Warn about all references to unknown targets 26 | nitpicky = True 27 | # Except for these ones, which we expect to point to unknown targets: 28 | nitpick_ignore = [ 29 | # Format is ("sphinx reference type", "string"), e.g.: 30 | ("py:obj", "bytes-like"), 31 | ] 32 | 33 | autodoc_inherit_docstrings = False 34 | 35 | # -- General configuration ------------------------------------------------ 36 | 37 | # If your documentation needs a minimal Sphinx version, state it here. 38 | # 39 | # needs_sphinx = '1.0' 40 | 41 | # Add any Sphinx extension module names here, as strings. They can be 42 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 43 | # ones. 44 | extensions = [ 45 | 'sphinx.ext.autodoc', 46 | 'sphinx.ext.intersphinx', 47 | 'sphinx.ext.coverage', 48 | 'sphinx.ext.napoleon', 49 | 'sphinxcontrib_trio', 50 | ] 51 | 52 | intersphinx_mapping = { 53 | "python": ('https://docs.python.org/3', None), 54 | "trio": ('https://trio.readthedocs.io/en/stable', None), 55 | "anyio": ('https://anyio.readthedocs.io/en/stable', None), 56 | } 57 | 58 | autodoc_member_order = "bysource" 59 | 60 | # Add any paths that contain templates here, relative to this directory. 61 | templates_path = [] 62 | 63 | # The suffix(es) of source filenames. 64 | # You can specify multiple suffix as a list of string: 65 | # 66 | # source_suffix = ['.rst', '.md'] 67 | source_suffix = '.rst' 68 | 69 | # The master toctree document. 70 | master_doc = 'index' 71 | 72 | # General information about the project. 73 | project = 'asyncari' 74 | copyright = 'The asyncari authors' 75 | author = 'The asyncari authors' 76 | 77 | # The version info for the project you're documenting, acts as replacement for 78 | # |version| and |release|, also used in various other places throughout the 79 | # built documents. 80 | # 81 | # The short X.Y version. 82 | import asyncari 83 | version = asyncari.__version__ 84 | # The full version, including alpha/beta/rc tags. 85 | release = version 86 | 87 | # The language for content autogenerated by Sphinx. Refer to documentation 88 | # for a list of supported languages. 89 | # 90 | # This is also used if you do content translation via gettext catalogs. 91 | # Usually you set "language" from the command line for these cases. 92 | language = None 93 | 94 | # List of patterns, relative to source directory, that match files and 95 | # directories to ignore when looking for source files. 96 | # This patterns also effect to html_static_path and html_extra_path 97 | exclude_patterns = [] 98 | 99 | # The name of the Pygments (syntax highlighting) style to use. 100 | pygments_style = 'sphinx' 101 | 102 | # The default language for :: blocks 103 | highlight_language = 'python3' 104 | 105 | # If true, `todo` and `todoList` produce output, else they produce nothing. 106 | todo_include_todos = False 107 | 108 | 109 | # -- Options for HTML output ---------------------------------------------- 110 | 111 | # The theme to use for HTML and HTML Help pages. See the documentation for 112 | # a list of builtin themes. 113 | # 114 | #html_theme = 'alabaster' 115 | 116 | # We have to set this ourselves, not only because it's useful for local 117 | # testing, but also because if we don't then RTD will throw away our 118 | # html_theme_options. 119 | import sphinx_rtd_theme 120 | html_theme = 'sphinx_rtd_theme' 121 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 122 | 123 | # Theme options are theme-specific and customize the look and feel of a theme 124 | # further. For a list of options available for each theme, see the 125 | # documentation. 126 | # 127 | html_theme_options = { 128 | # default is 2 129 | # show deeper nesting in the RTD theme's sidebar TOC 130 | # https://stackoverflow.com/questions/27669376/ 131 | # I'm not 100% sure this actually does anything with our current 132 | # versions/settings... 133 | "navigation_depth": 4, 134 | "logo_only": True, 135 | } 136 | 137 | # Add any paths that contain custom static files (such as style sheets) here, 138 | # relative to this directory. They are copied after the builtin static files, 139 | # so a file named "default.css" will overwrite the builtin "default.css". 140 | html_static_path = ['_static'] 141 | 142 | 143 | # -- Options for HTMLHelp output ------------------------------------------ 144 | 145 | # Output file base name for HTML help builder. 146 | htmlhelp_basename = 'asyncari-doc' 147 | 148 | 149 | # -- Options for LaTeX output --------------------------------------------- 150 | 151 | latex_elements = { 152 | # The paper size ('letterpaper' or 'a4paper'). 153 | # 154 | # 'papersize': 'letterpaper', 155 | 156 | # The font size ('10pt', '11pt' or '12pt'). 157 | # 158 | # 'pointsize': '10pt', 159 | 160 | # Additional stuff for the LaTeX preamble. 161 | # 162 | # 'preamble': '', 163 | 164 | # Latex figure (float) alignment 165 | # 166 | # 'figure_align': 'htbp', 167 | } 168 | 169 | # Grouping the document tree into LaTeX files. List of tuples 170 | # (source start file, target name, title, 171 | # author, documentclass [howto, manual, or own class]). 172 | latex_documents = [ 173 | (master_doc, 'asyncari.tex', 'Trio Documentation', 174 | author, 'manual'), 175 | ] 176 | 177 | 178 | # -- Options for manual page output --------------------------------------- 179 | 180 | # One entry per manual page. List of tuples 181 | # (source start file, name, description, authors, manual section). 182 | man_pages = [ 183 | (master_doc, 'asyncari', 'asyncari Documentation', 184 | [author], 1) 185 | ] 186 | 187 | 188 | # -- Options for Texinfo output ------------------------------------------- 189 | 190 | # Grouping the document tree into Texinfo files. List of tuples 191 | # (source start file, target name, title, author, 192 | # dir menu entry, description, category) 193 | texinfo_documents = [ 194 | (master_doc, 'asyncari', 'asyncari Documentation', 195 | author, 'asyncari', 'An AnyIO-ified adapter for the Asterisk ARI interface', 196 | 'Miscellaneous'), 197 | ] 198 | -------------------------------------------------------------------------------- /docs/source/history.rst: -------------------------------------------------------------------------------- 1 | Release history 2 | =============== 3 | 4 | .. currentmodule:: asyncari 5 | 6 | .. towncrier release notes start 7 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. documentation master file, created by 2 | sphinx-quickstart on Sat Jan 21 19:11:14 2017. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | 7 | ============================================================= 8 | asyncari: A Trio-ified adapter for the Asterisk ARI interface 9 | ============================================================= 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | history.rst 15 | 16 | ==================== 17 | Indices and tables 18 | ==================== 19 | 20 | * :ref:`genindex` 21 | * :ref:`modindex` 22 | * :ref:`search` 23 | * :ref:`glossary` 24 | -------------------------------------------------------------------------------- /docs/source/intro.rst: -------------------------------------------------------------------------------- 1 | =============================== 2 | Using ARI with Python and AnyIO 3 | =============================== 4 | 5 | First, open a connection to Asterisk:: 6 | 7 | async with asyncari.connect('http://localhost:8088', 'test', 8 | 'test_user', 'test_pass') as client: 9 | async for 10 | client.on_channel_event('StasisStart', on_start) 11 | client.on_channel_event('StasisEnd', on_end) 12 | # Running the WebSocket 13 | await anyio.sleep_forever() 14 | 15 | 16 | .. autofunction: asyncari.connect 17 | 18 | The ``client`` object exposes all methods and objects necessary. 19 | -------------------------------------------------------------------------------- /examples/bell.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | """ 4 | We're implementing a door bell using Moat-KV, Asterisk, and hot glue. 5 | 6 | You need 7 | * a door bell. Moat-KV tells us when somebody presses it. 8 | * a door station. We call it with SIP. It doesn't call us. 9 | * any number of phones. We call them when the door bell rings, and they 10 | connect to the door when they call us. 11 | * a door opener. We tell Moat-KV to trigger it. 12 | * an Asterisk server with ARI enabled. 13 | * this macro+context in `extensions.ael`, assuming you keep this script's 14 | default program name of "bell": 15 | 16 | macro py(app,typ,ext) { 17 | Stasis(${app},${typ},${ext}); 18 | Hangup(); 19 | return; 20 | } 21 | context ext_bell { 22 | s => &py(bell,test,s); 23 | i => &py(bell,test,${INVALID_EXTEN}); 24 | } 25 | 26 | * this dialplan entry, which calls the door: 27 | 28 | 555! => &go(ext_bell,${EXTEN:3}); 29 | 30 | (where '3' is the length of the prefix) 31 | 32 | 33 | Start this script. It will register with Asterisk and listen to the 34 | "bell" Moat-KV value (supposed to be a Boolean). When that is set to `True` 35 | it'll connect to the door station, play a sound if set, then ring all 36 | phones until one answers. 37 | 38 | Conversely, if you call the door it'll connect your phone to it. If you 39 | call the door while somebody else is talking, you'll join the existing call. 40 | 41 | When the MoaT-KV "lock" value is True, calling the phones will be skipped. 42 | 43 | If the DTMF code is entered, the door opener is triggered. All other DTMF 44 | codes will be ignored. 45 | """ 46 | 47 | # 48 | # Copyright (c) 2013, Digium, Inc. 49 | # Copyright (c) 2018-2023, Matthias Urlichs 50 | # 51 | import asyncari 52 | import sys 53 | import anyio 54 | import logging 55 | import asyncclick as click 56 | from contextlib import asynccontextmanager 57 | from functools import partial 58 | from asyncari.state import ToplevelChannelState, HangupBridgeState, DTMFHandler, as_task, DialFailed, SyncPlay 59 | from asyncari.model import ChannelExit, StateError 60 | from moat.util import read_cfg, attrdict, combine_dict, P 61 | from moat.kv.client import open_client 62 | import uuid 63 | from httpx import HTTPStatusError 64 | import signal 65 | 66 | from pprint import pprint 67 | 68 | import os 69 | 70 | CFG = attrdict( 71 | # Replacements via moat-kv 72 | replace = attrdict( 73 | data = attrdict(), # name > entry in "calls" 74 | tree = P("phone.s.redirect"), 75 | ), 76 | kv = attrdict( 77 | auth = "password name=asterisk password=you_guess", 78 | host = "127.0.0.1", 79 | init_timeout = 5, 80 | name = "Bell", 81 | port = 27586, 82 | ssl = False, 83 | ), 84 | door = attrdict( 85 | phone = "SIP/door", 86 | opener = P("home.ass.dyn.switch.door.cmd"), 87 | code = "0", 88 | audio = "/var/local/asterisk/hello" 89 | ), 90 | calls = attrdict( 91 | # std = attrdict( 92 | # bell = P("home.ass.dyn.binary_sensor.bell.state"), 93 | # audio = "/var/lib/asterisk/sounds/custom/door-std", 94 | # phones = [ 95 | # 'SIP/phone1', 96 | # 'SIP/phone2', 97 | # ], 98 | # open = False, # if set, auto-open the door 99 | # ), 100 | ), 101 | max_time = 300, # 5min 102 | watchdog = attrdict( 103 | path = P("home.ass.ping.hass.reply"), 104 | timeout = 200, 105 | ), 106 | done = attrdict( # list of signals that should stop calls 107 | # door = attrdict( 108 | # state = P("home.ass.dyn.switch.door.state"), 109 | # triggered = True, # caused by the door opener? 110 | # kill = False, # stop ongoing calls? 111 | # ), 112 | ), 113 | asterisk = attrdict( 114 | host = '127.0.0.1', 115 | port = 8088, 116 | username = 'asterisk', 117 | password = 'asterisk', 118 | app = 'bell', 119 | 120 | bridge_name = "Bell" 121 | ), 122 | ) 123 | 124 | # This demonstrates incoming DTMF recognition on both legs of a call 125 | 126 | async def bridge_cleanup(obj): 127 | for b in await obj.ari.bridges.list(): 128 | if b.name == obj.cfg.asterisk.bridge_name: 129 | for ch in b.channels: 130 | await ch.destroy() 131 | await b.destroy() 132 | 133 | class DoorState(ToplevelChannelState, DTMFHandler): 134 | """ 135 | State machine: The door. 136 | """ 137 | def __init__(self, obj, channel, *, audio=False, opener=None): 138 | """ 139 | @channel: chan to the door 140 | @audio: False: do nothing; True: play beep; 141 | None: play configured audio; string: play specified file 142 | """ 143 | self.obj = obj 144 | self.audio = audio 145 | self.opener = opener 146 | super().__init__(channel) 147 | 148 | async def do_open(self): 149 | await anyio.sleep(self.opener) 150 | await self.obj.dkv.set(self.obj.cfg.door.opener, value=True, idem=False) 151 | await anyio.sleep(1) 152 | self.obj.block_done = False 153 | 154 | async def player(self): 155 | if self.opener is not False: 156 | # self.obj.block_done = True # ?? 157 | if isinstance(self.opener, (int,float)): 158 | self.obj.task.start_soon(self.do_open) 159 | 160 | f = self.audio 161 | if f is False: 162 | self.play_done.set() 163 | elif f is True: 164 | playback_id = str(uuid.uuid4()) 165 | self.obj.log.warning("Play beep") 166 | p = await self.channel.playWithId(playbackId=playback_id,media="tone:beep;tonezone=de") 167 | await anyio.sleep(0.7) 168 | try: 169 | await p.stop() 170 | except HTTPStatusError: 171 | pass 172 | else: 173 | if f is None: 174 | f = self.obj.cfg.door.audio 175 | self.obj.log.warning("Play sound %s", f) 176 | await SyncPlay(self, media="sound:"+f) 177 | self.obj.log.warning("Play sound done") 178 | 179 | if self.opener is True: 180 | await self.obj.dkv.set(self.obj.cfg.door.opener, value=True, idem=False) 181 | await anyio.sleep(1) 182 | self.obj.block_done = False 183 | 184 | 185 | async def on_start(self): 186 | self.obj.task.start_soon(self.player) 187 | await super().on_start() 188 | 189 | async def on_DialResult(self, evt): 190 | await super().on_DialResult(evt) 191 | # answered OK 192 | self.obj.door.state = True 193 | 194 | async def on_dtmf(self,evt): 195 | print("*DTMF*EXT*",evt.digit) 196 | 197 | class _CallState(ToplevelChannelState, DTMFHandler): 198 | """ 199 | State machine: phone. 200 | """ 201 | def __init__(self, obj, n, channel): 202 | self.obj = obj 203 | self.n = n 204 | self.dtmf = "" 205 | super().__init__(channel) 206 | 207 | async def on_dtmf(self,evt): 208 | print("*DTMF*INT*",evt.digit) 209 | self.dtmf += evt.digit 210 | code = self.obj.cfg.door.code 211 | if self.dtmf == code: 212 | self.obj.door.opened = True 213 | await self.obj.dkv.set(self.obj.cfg.door.opener, value=True, idem=False) 214 | else: 215 | # if the code is 223 and somebody keys in 2223, the door should open 216 | for i in range(len(self.dtmf)): 217 | if code.startswith(self.dtmf[i:]): 218 | self.dtmf = self.dtmf[i:] 219 | return 220 | # not a prefix if we get here 221 | self.dtmf = "" 222 | 223 | async def on_DialResult(self, evt): 224 | try: 225 | await super().on_DialResult(evt) 226 | except DialFailed: 227 | if self.n is not None: 228 | self.obj.calls.data[self.n] = None 229 | if any(self.obj.calls.data): 230 | return 231 | # Ugh, nobody home 232 | raise 233 | 234 | else: 235 | if self.obj.calls.data is None: 236 | return 237 | if self.n is not None: 238 | self.obj.calls.data[self.n] = None 239 | for i,cs in enumerate(self.obj.calls.data): 240 | if cs is not None and (self.n is None or i != self.n): 241 | cs.cancel() 242 | 243 | 244 | class CalleeState(_CallState, DTMFHandler): 245 | """ 246 | State machine: the door calls somebody. 247 | """ 248 | pass 249 | 250 | class CallerState(_CallState): 251 | """ 252 | State machine: somebody calls the door. 253 | """ 254 | def __init__(self, obj, channel): 255 | super().__init__(obj, None, channel) 256 | 257 | @as_task 258 | async def on_start(self): 259 | await super().on_start() 260 | 261 | async with with_bridge(self.obj) as br: 262 | await self.channel.answer() 263 | evt = anyio.Event() 264 | async def sub(): 265 | playback_id = str(uuid.uuid4()) 266 | p = await self.channel.playWithId(playbackId=playback_id,media="tone:beep;tonezone=de") 267 | await anyio.sleep(0.7) 268 | await p.stop() 269 | try: 270 | await br.add(self.channel) 271 | except HTTPStatusError as exc: 272 | self.obj.log.warning("SETUP ERROR %s", exc) 273 | evt.set() 274 | self.obj.task.start_soon(sub) 275 | 276 | await door_call(self.obj, audio=True) 277 | await evt.wait() 278 | if self.obj.door.state is not None: 279 | await self.channel.answer() 280 | await self.channel.wait_not_bridged() 281 | 282 | 283 | async def _run_bridge(obj, *, task_status): 284 | br = None 285 | try: 286 | obj.log.info("Setting up bridge") 287 | with anyio.CancelScope() as sc: 288 | async with HangupBridgeState.new(obj.ari, name=obj.cfg.asterisk.bridge_name) as br: 289 | obj.bridge.br = br 290 | obj.bridge.scope = sc 291 | task_status.started() 292 | with anyio.move_on_after(obj.cfg.max_time): 293 | await br # waits for end 294 | except BaseException as exc: 295 | obj.log.exception("Bridge? %r",exc) 296 | raise 297 | 298 | finally: 299 | obj.log.info("Stopping bridge") 300 | obj.bridge.br = None 301 | obj.bridge.scope = None 302 | obj.bridge.cnt = 0 303 | obj.door.state = None 304 | obj.door.opened = False 305 | obj.door.called = set() 306 | if br is not None: 307 | for ch in br.bridge.channels: 308 | await ch.hang_up() 309 | await br.teardown() 310 | obj.log.info("Stopped bridge") 311 | 312 | 313 | @asynccontextmanager 314 | async def with_bridge(obj): 315 | if obj.bridge.br is None: 316 | async with obj.bridge.lock: 317 | if obj.bridge.br is None: 318 | if obj.bridge.cnt > 0: 319 | raise RuntimeError("Bridge count %d" % (obj.bridge.cnt,)) 320 | await obj.task.start(_run_bridge, obj) 321 | if obj.bridge.br is None: 322 | raise RuntimeError("No bridge") 323 | 324 | obj.bridge.cnt += 1 325 | try: 326 | yield obj.bridge.br 327 | finally: 328 | obj.bridge.cnt -= 1 329 | if obj.bridge.cnt == 0 and obj.bridge.scope is not None: 330 | obj.bridge.scope.cancel() 331 | 332 | async def monitor_redirect(obj): 333 | old = {} 334 | 335 | obj.log.info("Update? on %s", obj.cfg.replace.tree) 336 | async with obj.dkv.watch(obj.cfg.replace.tree, min_depth=1, max_depth=1, fetch=True) as mon: 337 | async for msg in mon: 338 | if "path" not in msg: 339 | obj.log.debug("UPDATE START") 340 | continue 341 | op = msg.path[-1] 342 | if op not in obj.cfg.calls: 343 | obj.log.warning("UPDATE %s unknown", op) 344 | continue 345 | if op not in old: 346 | old[op] = obj.cfg.calls[op].copy() 347 | 348 | # clean up 349 | d = obj.cfg.calls[op] 350 | d.update(old[op]) 351 | ex = set() 352 | for k in list(d.keys()): 353 | if k not in old[op]: 354 | del d[k] 355 | 356 | val = msg.get("value", None) 357 | if val is None: 358 | obj.log.debug("UPDATE %s cleared", op) 359 | elif isinstance(val, dict): 360 | obj.log.debug("UPDATE %s %r", op, val) 361 | obj.cfg.calls[op].update(val) 362 | elif not isinstance(val, str): 363 | obj.log.debug("UPDATE %s %r", op, val) 364 | elif msg.value in obj.cfg.replace.data: 365 | obj.log.debug("UPDATE %s %r", op, val) 366 | obj.cfg.calls[op].update(obj.cfg.replace.data[val]) 367 | else: 368 | obj.log.warning("UPDATE %s Unknown %r", op, msg) 369 | 370 | async def monitor_phone_calls(obj): 371 | """Wait for StasisStart events, indicating that a phone calls the 372 | bridge. 373 | """ 374 | 375 | obj.bridge = attrdict() 376 | obj.bridge.task = None 377 | obj.bridge.br = None 378 | obj.bridge.scope = None 379 | obj.bridge.lock = anyio.Lock() 380 | obj.bridge.cnt = 0 381 | obj.door = attrdict() 382 | obj.door.state = None 383 | obj.door.opened = False 384 | obj.door.called = set() 385 | obj.calls = attrdict() 386 | obj.calls.evt = None 387 | obj.calls.data = None 388 | 389 | # Answer and put in the holding bridge 390 | async with obj.ari.on_channel_event('StasisStart') as listener: 391 | async for objs, event in listener: 392 | if event['args'][0] == 'dialed': 393 | continue 394 | incoming = objs['channel'] 395 | cs = CallerState(obj, incoming) 396 | await cs.start_task() 397 | 398 | async def _call(obj,n,dest,cid): 399 | """ 400 | Call handler to a single phone. 401 | """ 402 | with anyio.CancelScope() as sc: 403 | obj.calls.data[n] = sc 404 | try: 405 | ch = await obj.bridge.br.dial(endpoint=dest, State=partial(CalleeState,obj,n), callerId=cid) 406 | await ch.channel.wait_bridged() 407 | obj.log.info("Connected %d to %s",n,dest) 408 | obj.block_done = True # don't hang up when the button is pressed 409 | obj.calls.evt.set() 410 | for d in obj.calls.data: 411 | if d is not None: 412 | d.cancel() 413 | except ChannelExit: 414 | obj.calls.data[n] = None 415 | if not any(obj.calls.data): 416 | # all calls failed 417 | obj.calls.evt.set() 418 | finally: 419 | obj.calls.data[n] = None 420 | 421 | async def door_call(obj, audio=True, opener=None): 422 | """ 423 | Call the door. 424 | 425 | Returns (and sets door.state.evt) as soon as the door is connected. 426 | """ 427 | if obj.door.state is not None: 428 | await obj.door.state.wait() 429 | if obj.door.state is None: # error 430 | raise RuntimeError("Door call failed") 431 | return 432 | 433 | obj.door.state = e = anyio.Event() 434 | async with with_bridge(obj) as br: 435 | try: 436 | r = await br.dial(endpoint=obj.cfg.door.phone, State=partial(DoorState,obj,audio=audio,opener=opener)) 437 | return r 438 | except BaseException: 439 | obj.door.state = None 440 | raise 441 | finally: 442 | e.set() 443 | 444 | 445 | async def call_phones(obj,name,c): 446 | """ 447 | Call a number of phones (including zero). 448 | """ 449 | if not c['phones']: 450 | obj.log.info("from door: %s: No phones", name) 451 | return 452 | obj.log.info("from door: %s: Calling phones", name) 453 | cid = c.get("caller",{}) 454 | cid = attrdict(name=cid.get("name","?"), number=cid.get("nr","")) 455 | cid = f"{cid.name} <{cid.number}>" 456 | for dest in c['phones']: 457 | if dest in obj.door.called: 458 | obj.log.info("from door: %s: %s: already called", name, dest) 459 | continue 460 | obj.log.info("from door: %s: %s: calling", name, dest) 461 | n = len(obj.calls.data) 462 | obj.calls.data.append(None) 463 | obj.bridge.br._tg.start_soon(_call,obj,n,dest,cid) 464 | 465 | with anyio.move_on_after(obj.cfg.max_time): 466 | await obj.calls.evt.wait() 467 | for cs in obj.calls.data: 468 | if cs is not None: 469 | cs.cancel() 470 | 471 | async def monitor_watchdog(obj): 472 | wd = obj.cfg.watchdog 473 | evt = anyio.Event() 474 | 475 | async def mon(): 476 | nonlocal evt 477 | async with obj.dkv.watch(wd.path, max_depth=0, fetch=False) as mon: 478 | async for evt in mon: 479 | evt.set() 480 | evt = anyio.Event() 481 | raise RuntimeError("Watchdog ended") 482 | 483 | async with anyio.create_task_group() as tg: 484 | tg.start(mon) 485 | while True: 486 | with anyio.fail_after(wd.timeout): 487 | await evt.wait() 488 | 489 | 490 | 491 | async def call_from_door(obj,name,c): 492 | """ 493 | Somebody pressed the button on the door. 494 | 495 | We connect to the door, then start to call phones. 496 | """ 497 | if obj.door.state is not None: 498 | obj.log.info("from door: %s: already connected",name) 499 | await call_phones(obj,name,c) 500 | return 501 | 502 | try: 503 | obj.log.info("from door: %s: Calling door",name) 504 | async with with_bridge(obj) as br: 505 | try: 506 | opener = c["open"] 507 | except KeyError: 508 | opener = None 509 | else: 510 | if opener is True: 511 | opener = obj.cfg.door.opener 512 | 513 | try: 514 | fn = c["audio"] 515 | except KeyError: 516 | fn = obj.cfg.door.audio 517 | 518 | try: 519 | r = await door_call(obj, audio=fn, opener=opener) 520 | except ChannelExit as exc: 521 | obj.log.exception("NOT CALLED %r",exc) 522 | return 523 | obj.log.info("from door: %s: Connected to door (%r)", name, br.bridge.channels) 524 | obj.calls.data = [] 525 | obj.calls.evt = anyio.Event() 526 | 527 | await call_phones(obj,name,c) 528 | if r.channel.bridge is None: 529 | chs = br.bridge.channels.copy() 530 | pp = [] 531 | for ch in chs: 532 | playback_id = str(uuid.uuid4()) 533 | pp.append(await ch.playWithId(playbackId=playback_id,media="tone:stutter;tonezone=de")) 534 | try: 535 | await r.channel.wait_bridged() 536 | except StateError as exc: 537 | obj.log.warning("from door: no channel (%r). Disconnecting.", exc) 538 | return 539 | 540 | for p in pp: 541 | await p.stop() 542 | 543 | if sum(ch.state == 'Up' for ch in br.bridge.channels) >= 2: 544 | # call established 545 | obj.log.info("from door: OK. Waiting for hangup") 546 | await br 547 | 548 | else: 549 | obj.log.info("from door: not OK. Disconnecting.") 550 | # TODO Play a sound or something? 551 | 552 | finally: 553 | obj.log.info("from door: Terminating door") 554 | obj.door.state = None 555 | 556 | async def monitor_call(obj,name,c): 557 | """ 558 | Monitor call buttons 559 | """ 560 | async with obj.dkv.watch(c['bell'], max_depth=0, fetch=False) as mon: 561 | async for evt in mon: 562 | if not evt.get("value",False): 563 | continue 564 | if obj.door.state is not None: 565 | continue 566 | obj.ari.taskgroup.start_soon(call_from_door, obj,name,c) 567 | 568 | async def monitor_done(obj,name,c): 569 | """ 570 | Monitor action result signals 571 | """ 572 | try: 573 | obj.log.info("Watch? %s",c['state']) 574 | async with obj.dkv.watch(c['state'], max_depth=0, fetch=False) as mon: 575 | async for evt in mon: 576 | if obj.block_done: 577 | obj.log.info("Watch %s BLOCK %s",c['state'],evt) 578 | continue 579 | if not evt.get("value",False): 580 | obj.log.info("Watch %s NOVAL %s",c['state'],evt) 581 | continue 582 | if obj.door.state is None: 583 | obj.log.info("Watch %s NONE %s",c['state'],evt) 584 | continue 585 | if c.get('triggered',False) and obj.door.opened: 586 | obj.log.info("Watch %s TRIG %s",c['state'],evt) 587 | obj.door.opened = False 588 | continue 589 | if c.get('kill',False): 590 | obj.log.info("Watch %s KILL %s",c['state'],evt) 591 | obj.bridge.br.hang_up() 592 | else: 593 | for cs in obj.calls.data: 594 | if cs is not None: 595 | obj.log.info("Watch %s CANCEL %s",c['state'],evt) 596 | cs.cancel() 597 | else: 598 | obj.log.info("Watch %s XCANCEL %s",c['state'],evt) 599 | else: 600 | obj.log.info("Watch %s NOCANCEL %s",c['state'],evt) 601 | except Exception as exc: 602 | obj.log.exception("Owch %r %r",name,c) 603 | 604 | async def monitor_sig(obj): 605 | with anyio.open_signal_receiver(signal.SIGUSR1) as mon: 606 | async for _ in mon: 607 | pprint(obj, sys.stderr) 608 | 609 | @click.command() 610 | @click.option("-v", "--verbose", count=True, help="Be more verbose. Can be used multiple times.") 611 | @click.option("-q", "--quiet", count=True, help="Be less verbose. Opposite of '--verbose'.") 612 | @click.option("-c", "--cfg", type=click.Path("r"), default=None, help="Configuration file (YAML).") 613 | @click.pass_context 614 | async def main(ctx, verbose,quiet,cfg): 615 | verbose = verbose-quiet+1 616 | logging.basicConfig(level=[logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG][min(verbose,3)]) 617 | ctx.obj = obj = attrdict() 618 | obj.redir = dict() 619 | cf = read_cfg("bell", cfg) 620 | cf = CFG if cf is None else combine_dict(cf, CFG, cls=attrdict) 621 | ast = cf.asterisk 622 | ast.url = f'http://{ast.host}:{ast.port}/' 623 | obj.cfg = cf 624 | obj.log = logging.getLogger("bell") 625 | 626 | obj.ari = await ctx.with_async_resource(asyncari.connect(ast.url, ast.app, ast.username,ast.password)) 627 | 628 | obj.dkv = await ctx.with_async_resource(open_client(conn=cf.kv)) 629 | obj.block_done = False 630 | 631 | await bridge_cleanup(obj) 632 | 633 | async with anyio.create_task_group() as obj.task: 634 | for name,c in obj.cfg.calls.items(): 635 | obj.task.start_soon(monitor_call, obj,name,c) 636 | for name,c in obj.cfg.done.items(): 637 | obj.task.start_soon(monitor_done, obj,name,c) 638 | # client.taskgroup.start_soon(monitor_calls, client) 639 | obj.task.start_soon(monitor_redirect, obj) 640 | obj.task.start_soon(monitor_sig, obj) 641 | if obj.cfg.watchdog.timeout: 642 | obj.task.start_soon(monitor_watchdog, obj) 643 | await monitor_phone_calls(obj) 644 | 645 | if __name__ == "__main__": 646 | try: 647 | main(_anyio_backend="trio") 648 | except KeyboardInterrupt: 649 | pass 650 | 651 | -------------------------------------------------------------------------------- /examples/cleanup_bridges.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | """This program removes unused bridges. 4 | """ 5 | 6 | # 7 | # Copyright (c) 2018, Matthias Urlichs 8 | # 9 | import asyncari 10 | import anyio 11 | from httpx import HTTPStatusError 12 | 13 | import logging 14 | logger = logging.getLogger(__name__) 15 | 16 | from pprint import pprint 17 | 18 | import os 19 | ast_host = os.getenv("AST_HOST", 'localhost') 20 | ast_port = int(os.getenv("AST_ARI_PORT", 8088)) 21 | ast_url = os.getenv("AST_URL", 'http://%s:%d/'%(ast_host,ast_port)) 22 | ast_username = os.getenv("AST_USER", 'asterisk') 23 | ast_password = os.getenv("AST_PASS", 'asterisk') 24 | ast_app = os.getenv("AST_APP", 'hello') 25 | 26 | async def clean_bridges(client): 27 | # 28 | # Find (or create) a holding bridge. 29 | # 30 | for b in await client.bridges.list(): 31 | if b.channels: 32 | continue 33 | try: 34 | await b.destroy() 35 | except HTTPStatusError as exc: 36 | print(b.id,exc) 37 | else: 38 | print(b.id,"… deleted") 39 | 40 | async def main(): 41 | async with asyncari.connect(ast_url, ast_app, ast_username,ast_password) as client: 42 | await clean_bridges(client) 43 | 44 | if __name__ == "__main__": 45 | logging.basicConfig(level=logging.INFO) 46 | anyio.run(main) 47 | 48 | -------------------------------------------------------------------------------- /examples/example.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | """Brief example of using the channel API with a state machine. 4 | 5 | This app will answer any channel sent to Stasis(hello), and play "Hello, 6 | world" to the channel. For any DTMF events received, the number is played back 7 | to the channel. Press # to hang up, and * for a special message. 8 | """ 9 | 10 | # 11 | # Copyright (c) 2013, Digium, Inc. 12 | # Copyright (c) 2018, Matthias Urlichs 13 | # 14 | 15 | import asyncari 16 | from asyncari.state import ToplevelChannelState, DTMFHandler 17 | import anyio 18 | import logging 19 | from httpx import HTTPStatusError 20 | 21 | import os 22 | ast_host = os.getenv("AST_HOST", 'localhost') 23 | ast_port = int(os.getenv("AST_ARI_PORT", 8088)) 24 | ast_url = os.getenv("AST_URL", 'http://%s:%d/'%(ast_host,ast_port)) 25 | ast_username = os.getenv("AST_USER", 'asterisk') 26 | ast_password = os.getenv("AST_PASS", 'asterisk') 27 | ast_app = os.getenv("AST_APP", 'hello') 28 | 29 | class State(ToplevelChannelState, DTMFHandler): 30 | do_hang = False 31 | 32 | async def on_start(self): 33 | await self.channel.play(media='sound:hello-world') 34 | 35 | async def on_dtmf_Star(self, evt): 36 | self.do_hang = True 37 | await self.channel.play(media='sound:vm-goodbye') 38 | 39 | async def on_dtmf_Pound(self, evt): 40 | await self.channel.play(media='sound:asterisk-friend') 41 | 42 | async def on_dtmf(self, evt): 43 | await self.channel.play(media='sound:digits/%s' % evt.digit) 44 | 45 | async def on_PlaybackFinished(self, evt): 46 | if self.do_hang: 47 | try: 48 | await self.channel.continueInDialplan() 49 | except HTTPStatusError: 50 | pass 51 | 52 | async def on_start(client): 53 | 54 | """Callback for StasisStart events. 55 | 56 | On new channels, register the on_dtmf callback, answer the channel and 57 | play "Hello, world" 58 | 59 | :param channel: Channel DTMF was received from. 60 | :param event: Event. 61 | """ 62 | async with client.on_channel_event('StasisStart') as listener: 63 | async for objs, event in listener: 64 | channel = objs['channel'] 65 | await channel.answer() 66 | client.taskgroup.start_soon(State(channel).start_task) 67 | 68 | async def main(): 69 | async with asyncari.connect(ast_url, ast_app, ast_username,ast_password) as client: 70 | client.taskgroup.start_soon(on_start, client) 71 | # Run the WebSocket 72 | async for m in client: 73 | print("** EVENT **", m) 74 | 75 | if __name__ == "__main__": 76 | logging.basicConfig(level=logging.DEBUG) 77 | try: 78 | anyio.run(main, backend="trio") 79 | except KeyboardInterrupt: 80 | pass 81 | -------------------------------------------------------------------------------- /examples/originate_example.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | """Example demonstrating ARI channel origination. This will dial an 4 | endpoint when a Stasis call arrives, and connect the call. 5 | 6 | """ 7 | 8 | # 9 | # Copyright (c) 2013, Digium, Inc. 10 | # Copyright (c) 2018, Matthias Urlichs 11 | # 12 | import asyncari 13 | import anyio 14 | import logging 15 | from asyncari.state import ToplevelChannelState, HangupBridgeState, DTMFHandler, as_task 16 | from asyncari.model import ChannelExit 17 | 18 | from pprint import pprint 19 | 20 | import os 21 | ast_host = os.getenv("AST_HOST", 'localhost') 22 | ast_port = int(os.getenv("AST_ARI_PORT", 8088)) 23 | ast_url = os.getenv("AST_URL", 'http://%s:%d/'%(ast_host,ast_port)) 24 | ast_username = os.getenv("AST_USER", 'asterisk') 25 | ast_password = os.getenv("AST_PASS", 'asterisk') 26 | ast_app = os.getenv("AST_APP", 'hello') 27 | ast_outgoing = os.getenv("AST_OUTGOING", 'SIP/blink') 28 | 29 | # This demonstrates incoming DTMF recognition on both legs of a call 30 | 31 | class CallState(ToplevelChannelState, DTMFHandler): 32 | async def on_dtmf(self,evt): 33 | print("*DTMF*EXT*",evt.digit) 34 | 35 | class CallerState(ToplevelChannelState, DTMFHandler): 36 | @as_task 37 | async def on_start(self): 38 | async with HangupBridgeState.new(self.client) as br: 39 | await br.add(self.channel) 40 | await br.dial(endpoint=ast_outgoing, State=CallState) 41 | await self.channel.wait_bridged() 42 | await self.channel.wait_not_bridged() 43 | async def on_dtmf(self,evt): 44 | print("*DTMF*INT*",evt.digit) 45 | 46 | async def on_start(client): 47 | """Callback for StasisStart events. 48 | 49 | When an incoming channel starts, put it in the holding bridge and 50 | originate a channel to connect to it. When that channel answers, create a 51 | bridge and put both of them into it. 52 | 53 | :param incoming: 54 | :param event: 55 | """ 56 | 57 | # Answer and put in the holding bridge 58 | async with client.on_channel_event('StasisStart') as listener: 59 | async for objs, event in listener: 60 | if event['args'][0] == 'dialed': 61 | continue 62 | incoming = objs['channel'] 63 | cs = CallerState(incoming) 64 | await cs.start_task() 65 | 66 | async def main(): 67 | async with asyncari.connect(ast_url, ast_app, ast_username,ast_password) as client: 68 | client.taskgroup.start_soon(on_start, client) 69 | async for m in client: 70 | #print("** EVENT **", m) 71 | pprint(("** EVENT **", m, vars(m))) 72 | 73 | if __name__ == "__main__": 74 | logging.basicConfig(level=logging.DEBUG) 75 | try: 76 | anyio.run(main) 77 | except KeyboardInterrupt: 78 | pass 79 | 80 | -------------------------------------------------------------------------------- /examples/playback.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | """Example demonstrating using the returned object from an API call. 4 | 5 | This app plays demo-contrats on any channel sent to Stasis(hello). DTMF keys 6 | are used to control the playback. 7 | """ 8 | 9 | # 10 | # Copyright (c) 2013, Digium, Inc. 11 | # 12 | 13 | import anyio 14 | import asyncari 15 | import sys 16 | import logging 17 | 18 | import os 19 | ast_host = os.getenv("AST_HOST", 'localhost') 20 | ast_port = int(os.getenv("AST_ARI_PORT", 8088)) 21 | ast_url = os.getenv("AST_URL", 'http://%s:%d/'%(ast_host,ast_port)) 22 | ast_username = os.getenv("AST_USER", 'asterisk') 23 | ast_password = os.getenv("AST_PASS", 'asterisk') 24 | ast_app = os.getenv("AST_APP", 'hello') 25 | 26 | async def on_start(objs, event): 27 | """Callback for StasisStart events. 28 | 29 | On new channels, answer, play demo-congrats, and register a DTMF listener. 30 | 31 | :param channel: Channel DTMF was received from. 32 | :param event: Event. 33 | """ 34 | channel = objs['channel'] 35 | await channel.answer() 36 | playback = await channel.play(media='sound:demo-congrats') 37 | 38 | async def on_dtmf(event): 39 | """Callback for DTMF events. 40 | 41 | DTMF events control the playback operation. 42 | 43 | :param channel: Channel DTMF was received on. 44 | :param event: Event. 45 | """ 46 | # Since the callback was registered to a specific channel, we can 47 | # control the playback object we already have in scope. 48 | # TODO: if paused: unpause before doing anything else 49 | digit = event['digit'] 50 | if digit == '5': 51 | await playback.control(operation='pause') 52 | elif digit == '8': 53 | await playback.control(operation='unpause') 54 | elif digit == '4': 55 | await playback.control(operation='reverse') 56 | elif digit == '6': 57 | await playback.control(operation='forward') 58 | elif digit == '2': 59 | await playback.control(operation='restart') 60 | elif digit == '#': 61 | await playback.stop() 62 | await event.channel.continueInDialplan() 63 | else: 64 | print >> sys.stderr, "Unknown DTMF %s" % digit 65 | 66 | channel.on_event('ChannelDtmfReceived', on_dtmf) 67 | 68 | async def main(): 69 | async with ( 70 | asyncari.connect(ast_url, ast_app, ast_username,ast_password) as client_, 71 | anyio.create_task_group() as tg, 72 | ): 73 | global client 74 | client = client_ 75 | @tg.start_soon 76 | async def mon_start(): 77 | async with client.on_channel_event("StasisStart") as listener: 78 | async for objs, event in listener: 79 | tg.start_soon(on_start, objs, event) 80 | 81 | # Run the WebSocket 82 | async for m in client: 83 | print("** EVENT **", m) 84 | 85 | if __name__ == "__main__": 86 | logging.basicConfig(level=logging.DEBUG) 87 | anyio.run(main) 88 | 89 | -------------------------------------------------------------------------------- /examples/zoomcall: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | """Zoom SIP API 4 | 5 | The Zoom SIP dialin is ugly. The announcement is almost unhearable and the 6 | thing reacts strangely when you enter a wrong meeting password. 7 | 8 | This script implements a reasonable dial-in. 9 | """ 10 | 11 | # 12 | # Copyright (c) 2018, Matthias Urlichs 13 | # 14 | 15 | import asyncari 16 | from asyncari.state import ToplevelChannelState, as_task, HangupBridgeState 17 | from asyncari.state import SyncReadNumber, SyncPlay 18 | from asyncari.util import NumberError 19 | import anyio 20 | from functools import partial 21 | import jwt 22 | from datetime import datetime,timedelta 23 | import sys 24 | 25 | import asks 26 | from httpx import HTTPStatusError 27 | from time import time 28 | from pprint import pprint 29 | 30 | import os 31 | ast_host = os.getenv("AST_HOST", 'localhost') 32 | ast_port = int(os.getenv("AST_ARI_PORT", 8088)) 33 | ast_url = os.getenv("AST_URL", 'http://%s:%d/'%(ast_host,ast_port)) 34 | ast_username = os.getenv("AST_USER", 'asterisk') 35 | ast_password = os.getenv("AST_PASS", 'asterisk') 36 | ast_app = os.getenv("AST_APP", 'zoom') 37 | 38 | zoom_url = "https://COMPANY.zoom.us/v2/" 39 | zoom_key = "YourKeyForTheZoomAPIx" 40 | zoom_secret = "xYourSecretForTheZoomAPIxDontxSharex" 41 | 42 | sound_dir="zoomcall" # needs to be in, or linked from, /usr/share/asterisk/sounds 43 | 44 | import logging 45 | logger = logging.getLogger(__name__) 46 | 47 | NOT_FOUND = 404 48 | 49 | def generateJWT(): 50 | # Zoom API credentials from https://developer.zoom.us/me/ 51 | token = dict(iss=zoom_key, exp=int(time()) + 60) 52 | return jwt.encode(token, zoom_secret, algorithm='HS256') 53 | 54 | 55 | async def getZoomPassword(meetingID): 56 | n = datetime.now() 57 | 58 | r=await asks.get(zoom_url+"meetings/"+meetingID, 59 | headers=[(b"Authorization", b"Bearer "+generateJWT ())]) 60 | r.raise_for_status() 61 | return r.json()['pstn_password'] 62 | 63 | class ZoomState(ToplevelChannelState): 64 | """ 65 | Channel handler to connect to Zoom. 66 | 67 | Arguments: 68 | room: 69 | Room number. If not given, ask. 70 | password: 71 | Password for the room. Ignored if the room doesn't have one. 72 | If True, connect to the room even if it has a password. 73 | If None, ask. 74 | 75 | XXX TODO: this is a ToplevelChannelState and as such is not composeable. 76 | That should be fixed if you want to do this as part of an IVR. 77 | """ 78 | 79 | def __init__(self, channel, room=None, password=None): 80 | super().__init__(channel) 81 | self.room = room 82 | self.password = password 83 | 84 | @as_task 85 | async def on_start(self): 86 | """ 87 | This is a simple call handler. It asks for a meeting ID (if none 88 | given), checks with Zoom whether the channel exists, asks for a 89 | password (if one is set and the handler is not instructed to skip 90 | it), then calls the ZOOM sip gateway. 91 | """ 92 | 93 | # We want to be nice to our callers here; some get confused when we 94 | # answer immediately (or too fast). 95 | try: 96 | await channel.ring() 97 | await anyio.sleep(0.5) 98 | await channel.answer() 99 | except HTTPStatusError: 100 | # Just ignore this call. 101 | # Since this is a ToplevelStateChannel, it will auto-hangup when we're done with it 102 | # (unless we tell it not to). 103 | self.done() 104 | return 105 | 106 | # We could do the same thing as the following block with a bunch of event handlers, 107 | # but this way is much more readable. 108 | 109 | # If you want to go the async way, the "Sync…" handlers won't work; use their "Async…" 110 | # equivalent. Mixing both styles in the same state machine may cause unwanted effects. 111 | 112 | try: 113 | num = self.room 114 | pwi = self.password 115 | 116 | if num is None: 117 | pb = await self.channel.play(media='sound:'+sound_dir+'/enter_room_number') 118 | try: 119 | num = await SyncReadNumber(self, playback=pb, min_len=8, max_len=11, timeout=20) 120 | except NumberTimeoutError: 121 | return await SyncPlay(self, media='sound:'+sound_dir+'/timeout') 122 | except NumberError: 123 | return await SyncPlay(self, media='sound:'+sound_dir+'/no_such_meeting') 124 | # These "return await" calls don't hang up by themselves; that's what the 125 | # "finally: self.done()" is for. 126 | 127 | try: 128 | pw = await getZoomPassword(num) 129 | except HTTPStatusError as exc: 130 | if exc.response.status_code == NOT_FOUND: 131 | return await SyncPlay(self, media='sound:'+sound_dir+'/no_such_meeting') 132 | else: 133 | return await SyncPlay(self, media='sound:'+sound_dir+'/internal_error') 134 | 135 | if pw: 136 | if not pwi and not pw.isdigit(): 137 | return await SyncPlay(self, media='sound:'+sound_dir+'/no_phone_access') 138 | 139 | if not pwi: 140 | pb = await self.channel.play(media='sound:'+sound_dir+'/enter_room_password') 141 | try: 142 | pwi = await SyncReadNumber(self, playback=pb, min_len=len(pw)//2, max_len=len(pw)*3, timeout=20) 143 | except NumberError: 144 | return await SyncPlay(self, media='sound:'+sound_dir+'/wrong_password') 145 | 146 | if pwi is not True and pw != pwi: 147 | return await SyncPlay(self, media='sound:'+sound_dir+'/wrong_password') 148 | num += "."+pw 149 | 150 | # Now we're all set, so we need a bridge. 151 | async with HangupBridgeState.new(self.client, nursery=self.nursery) as br: 152 | # This is a HangupBridge, thus it will de-bridge and hangup all 153 | # channels as soon as one of them hangs up. Otherwise we'd have to 154 | # create a subclass that has an event handler for on_channel_end. 155 | 156 | await br.add(self.channel) 157 | try: 158 | await br.dial(endpoint="SIP/"+num+"@zoom", State=ToplevelChannelState) 159 | except Exception: 160 | # Playing an error will happen in the Exception handler below. 161 | raise 162 | else: 163 | # … and until we're no longer connected 164 | await self.channel.wait_not_bridged() 165 | 166 | except Exception: 167 | logger.exception("Owch") 168 | if self.channel.state == "Up": 169 | await SyncPlay(self, media='sound:'+sound_dir+'/internal_error') 170 | 171 | # Crashing through here would take the whole app with it. 172 | 173 | finally: 174 | # always hang up 175 | self.done() 176 | 177 | 178 | async def on_start(objs, event, client): 179 | """ 180 | Callback for StasisStart events. 181 | 182 | """ 183 | channel = objs['channel'] 184 | if event['args'][0] == 'dialed': 185 | return # we originated this call; TODO use a nicer method 186 | 187 | # Actually asyncari also gets StasisStart for outgoing channels, but those are already caught for us. 188 | await ZoomState(channel).start_task() 189 | 190 | async def main(): 191 | async with asyncari.connect(ast_url, ast_app, ast_username,ast_password) as client: 192 | # Our main entry point. 193 | client.on_channel_event('StasisStart', client) 194 | 195 | # Run the WebSocket. Just for laughs ^W debugging we also print all events here, 196 | # though a simple "anyio.sleep(math.inf)" would be sufficient 197 | async for m in client: 198 | print("** EVENT **", m) 199 | 200 | if __name__ == "__main__": 201 | logging.basicConfig(level=logging.DEBUG) 202 | try: 203 | anyio.run(main) 204 | except KeyboardInterrupt: 205 | pass 206 | -------------------------------------------------------------------------------- /newsfragments/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/M-o-a-T/asyncari/b9211fa7a8a02d3733558c9001ec46396a861a51/newsfragments/.gitkeep -------------------------------------------------------------------------------- /newsfragments/README.rst: -------------------------------------------------------------------------------- 1 | Adding newsfragments 2 | ==================== 3 | 4 | This directory collects "newsfragments": short files that each contain 5 | a snippet of ReST-formatted text that will be added to the next 6 | release notes. This should be a description of aspects of the change 7 | (if any) that are relevant to users. (This contrasts with your commit 8 | message and PR description, which are a description of the change as 9 | relevant to people working on the code itself.) 10 | 11 | Each file should be named like ``..rst``, where 12 | ```` is an issue numbers, and ```` is one of: 13 | 14 | * ``feature`` 15 | * ``bugfix`` 16 | * ``doc`` 17 | * ``removal`` 18 | * ``misc`` 19 | 20 | So for example: ``123.feature.rst``, ``456.bugfix.rst`` 21 | 22 | If your PR fixes an issue, use that number here. If there is no issue, 23 | then after you submit the PR and get the PR number you can add a 24 | newsfragment using that instead. 25 | 26 | Note that the ``towncrier`` tool will automatically 27 | reflow your text, so don't try to do any fancy formatting. You can 28 | install ``towncrier`` and then run ``towncrier --draft`` if you want 29 | to get a preview of how your change will look in the final release 30 | notes. 31 | 32 | 33 | Making releases 34 | =============== 35 | 36 | ``pip install towncrier``, then run ``towncrier``. (You can use 37 | ``towncrier --draft`` to get a preview of what this will do.) 38 | 39 | You can configure ``towncrier`` (for example: customizing the 40 | different types of changes) by modifying ``pyproject.toml``. 41 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=64", "setuptools-scm[toml]>=7.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "asyncari" 7 | authors = [ 8 | { name = "Matthias Urlichs", email = "matthias@urlichs.de" }, 9 | ] 10 | license = { file = "LICENSE" } 11 | description = "Asynchronous adapter to the Asterisk ARI interface" 12 | readme = "README.rst" 13 | classifiers = [ 14 | "Development Status :: 4 - Beta", 15 | "Framework :: AnyIO", 16 | "Framework :: AsyncIO", 17 | "Framework :: Trio", 18 | "Intended Audience :: Developers", 19 | "Topic :: Software Development :: Libraries :: Python Modules", 20 | "Operating System :: OS Independent", 21 | "Programming Language :: Python :: 3", 22 | "Topic :: Communications :: Telephony", 23 | ] 24 | keywords = [ "asterisk", "ari" ] 25 | urls = { Homepage = "https://github.com/M-o-a-T/asyncari" } 26 | dependencies = [ 27 | "httpx", 28 | "anyio >= 4.6", 29 | "attrs >= 18", 30 | "asyncwebsockets", 31 | "asyncswagger11", 32 | ] 33 | dynamic = ["version"] 34 | 35 | [project.optional-dependencies] 36 | test = [ 37 | "pytest", 38 | ] 39 | 40 | [tool.setuptools_scm] 41 | 42 | [tool.flake8] 43 | max-line-length=99 44 | ignore="E402,E731,E127,E502,E123,W503" 45 | 46 | [tool.towncrier] 47 | package = "asyncari" 48 | filename = "docs/source/history.rst" 49 | directory = "newsfragments" 50 | underlines = ["-", "~", "^"] 51 | issue_format = "`#{issue} `__" 52 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/M-o-a-T/asyncari/b9211fa7a8a02d3733558c9001ec46396a861a51/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # XX this should switch to using pytest-trio as soon as pytest-trio is 2 | # released... 3 | 4 | import inspect 5 | import pytest 6 | from trio.testing import MockClock, trio_test 7 | 8 | @pytest.fixture 9 | def mock_clock(): 10 | return MockClock() 11 | 12 | 13 | @pytest.fixture 14 | def autojump_clock(): 15 | return MockClock(autojump_threshold=0) 16 | 17 | 18 | @pytest.hookimpl(tryfirst=True) 19 | def pytest_pyfunc_call(pyfuncitem): 20 | if inspect.iscoroutinefunction(pyfuncitem.obj): 21 | pyfuncitem.obj = trio_test(pyfuncitem.obj) 22 | -------------------------------------------------------------------------------- /tests/test_example.py: -------------------------------------------------------------------------------- 1 | import trio 2 | 3 | # We can just use 'async def test_*' to define async tests. 4 | # This also uses a virtual clock fixture, so time passes quickly and 5 | # predictably. 6 | async def test_sleep_with_autojump_clock(autojump_clock): 7 | assert trio.current_time() == 0 8 | 9 | for i in range(10): 10 | print("Sleeping {} seconds".format(i)) 11 | start_time = trio.current_time() 12 | await trio.sleep(i) 13 | end_time = trio.current_time() 14 | 15 | assert end_time - start_time == i 16 | --------------------------------------------------------------------------------