├── .github └── workflows │ ├── python-package.yml │ └── python-publish.yml ├── .gitignore ├── LICENSE ├── README.md ├── aio_pool ├── __init__.py └── pool.py ├── poetry.lock ├── pyproject.toml └── tests └── test.py /.github/workflows/python-package.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Python package 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | os: [ubuntu-latest, macos-latest, windows-latest] 20 | python-version: [3.6, 3.7, 3.8, 3.9] 21 | 22 | steps: 23 | - uses: actions/checkout@v2 24 | - name: Set up Python ${{ matrix.python-version }} 25 | uses: actions/setup-python@v2 26 | with: 27 | python-version: ${{ matrix.python-version }} 28 | - name: Install dependencies 29 | run: | 30 | python -m pip install --upgrade pip 31 | python -m pip install flake8 pytest 32 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 33 | - name: Lint with flake8 34 | run: | 35 | # stop the build if there are Python syntax errors or undefined names 36 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 37 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 38 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 39 | - name: Run unittest 40 | run: | 41 | python -m unittest discover tests 42 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: Upload Python Package 5 | 6 | on: 7 | release: 8 | types: [created] 9 | 10 | jobs: 11 | deploy: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Set up Python 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: '3.x' 21 | - name: Install dependencies 22 | run: | 23 | python -m pip install --upgrade pip 24 | pip install poetry 25 | - name: Build and publish 26 | run: | 27 | poetry config http-basic.pypi ${{ secrets.PYPI_USERNAME }} ${{ secrets.PYPI_PASSWORD }} 28 | poetry publish --build 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # IDEs 132 | .vscode 133 | .idea 134 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Itayazolay 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # aio-pool 2 | Extending Python's `multiporcessing.Pool` to support coroutine functions. 3 | Can be useful for when using a server with very high bandwidth or doing both very large IO and CPU tasks at the same time. 4 | 5 | All methods of `multiprocessing.Pool` are supported. 6 | All paramters for multiprocessing.Pool are supported. 7 | 8 | Install using pip: 9 | ```bash 10 | pip install aio-pool 11 | ``` 12 | ## examples: 13 | Setting concurrency limit. This means each process can run with up to 8 concurrent tasks at a time. 14 | ```python 15 | import asyncio 16 | from aio_pool import AioPool 17 | 18 | 19 | async def powlong(a): 20 | await asyncio.sleep(1) 21 | return a**2 22 | 23 | if __name__ == '__main__': 24 | with AioPool(processes=2, concurrency_limit=8) as pool: 25 | results = pool.map(powlong, [i for i in range(16)]) # Should take 2 seconds (2*8). 26 | print(results) 27 | 28 | ``` 29 | 30 | Async initliazers are also suppported. 31 | 32 | ```python 33 | import asyncio 34 | from aio_pool import AioPool 35 | 36 | async def start(message): 37 | await asyncio.sleep(1) 38 | print(message) 39 | 40 | async def powlong(a): 41 | await asyncio.sleep(1) 42 | return a**2 43 | 44 | if __name__ == '__main__': 45 | with AioPool(processes=2, 46 | concurrency_limit=8, 47 | initializer=start, 48 | init_args=("Started with AioPool", )) as pool: 49 | results = pool.map(powlong, [i for i in range(16)]) # Should take 2 seconds (2*8). 50 | print(results) 51 | 52 | ``` 53 | 54 | By default, AioPool also set up a default executor for any non-async tasks. 55 | The size can be determined by `threadpool_size` arguemnt, which defaults to 1. 56 | None default event loops(`uvloop` for example) are supported as well, using the `loop_initializer` argument. 57 | Also, non-async functions are supported by default, as the AioPool worker identify if the function is async or not. 58 | If the function is not async, it runs inside the threadpool, to allow the requested concurrency. 59 | This means that order of execution is not guaranteed, even if the function is not async. 60 | However, the order of results is guaranteed through the pool API (map, starmap, apply, etc...). 61 | 62 | ```python 63 | from aio_pool import AioPool 64 | import uvloop 65 | 66 | with AioPool(loop_initializer=uvloop.new_event_loop, threadpool_size=4) pool: 67 | pool.map(print, [i for i in range(8)]) 68 | ``` 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /aio_pool/__init__.py: -------------------------------------------------------------------------------- 1 | from aio_pool.pool import AioPool 2 | 3 | __all__ = ["AioPool"] 4 | -------------------------------------------------------------------------------- /aio_pool/pool.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import sys 4 | from asyncio.base_events import BaseEventLoop 5 | from asyncio.coroutines import iscoroutinefunction 6 | from asyncio.locks import Semaphore 7 | from asyncio.tasks import Task 8 | from collections import deque 9 | from concurrent.futures import ThreadPoolExecutor 10 | from functools import singledispatch 11 | from multiprocessing.pool import ( # type: ignore # noqa 12 | ExceptionWithTraceback, MaybeEncodingError, Pool, 13 | _helper_reraises_exception, mapstar, starmapstar) 14 | from typing import Any, Awaitable, Callable, Deque, Optional, Set, Tuple, Union 15 | 16 | __all__ = ["AioPool"] 17 | 18 | 19 | logger = logging.getLogger("aiopool") 20 | logger.addHandler(logging.NullHandler()) 21 | 22 | 23 | 24 | async def _create_bounded_task(func, args, kwds: dict, sem: Semaphore, loop: "BaseEventLoop", iscoroutinefunction=iscoroutinefunction): 25 | if iscoroutinefunction(func): 26 | task = await _create_bounded_task_coro(func, args, kwds, sem, loop) 27 | elif func is mapstar: 28 | task = await _create_bounded_task_mapstar(func, args, kwds, sem, loop) 29 | elif func is starmapstar: 30 | task = await _create_bounded_task_starmapstar(func, args, kwds, sem, loop) 31 | else: 32 | task = await _create_bounded_task_thread(func, args, kwds, sem, loop) 33 | return task 34 | 35 | async def _create_bounded_task_thread(func, args, kwds: dict, sem, loop: "BaseEventLoop"): 36 | await sem.acquire() 37 | task = loop.run_in_executor(None, lambda: func(*args, **kwds)) 38 | task.add_done_callback(lambda t: sem.release()) 39 | return task 40 | 41 | async def _create_bounded_task_coro(func, args, kwds: dict, sem: Semaphore, loop: "BaseEventLoop"): 42 | await sem.acquire() 43 | task = loop.create_task(func(*args, **kwds)) 44 | task.add_done_callback(lambda t: sem.release()) 45 | return task 46 | 47 | async def _create_bounded_task_mapstar(func: mapstar, args, kwds: dict, sem: Semaphore, loop: "BaseEventLoop"): 48 | underlying_func = args[0][0] 49 | underlying_params = args[0][1] 50 | results: Deque[Awaitable[Any]] = deque([]) 51 | append = results.append 52 | for params in underlying_params: 53 | append(await _create_bounded_task( 54 | underlying_func, (params, ), {}, sem, loop)) 55 | return asyncio.gather(*results, return_exceptions=True) 56 | 57 | async def _create_bounded_task_starmapstar(func: starmapstar, args, kwds: dict, sem: Semaphore, loop: "BaseEventLoop"): 58 | underlying_func = args[0][0] 59 | underlying_params = args[0][1] 60 | results = deque([]) 61 | append = results.append 62 | for params in underlying_params: 63 | append(await _create_bounded_task( 64 | underlying_func, params, {}, sem, loop)) 65 | return asyncio.gather(*results) 66 | 67 | 68 | async def task_wrapper( 69 | job, i, func, 70 | task: Awaitable[Any], 71 | put: Callable[[Any], Awaitable[None]], 72 | wrap_exception: bool = False, 73 | ) -> None: 74 | try: 75 | result = (True, await task) 76 | except Exception as e: 77 | if wrap_exception and func is not _helper_reraises_exception: 78 | e = ExceptionWithTraceback(e, e.__traceback__) 79 | result = (False, e) 80 | try: 81 | await put((job, i, result)) 82 | except Exception as e: 83 | wrapped = MaybeEncodingError(e, result[1]) 84 | logger.debug("Possible encoding error while sending result: %s" % (wrapped)) 85 | await put((job, i, (False, wrapped))) 86 | 87 | 88 | async def _run_worker( 89 | get: Callable[[], Awaitable[Any]], 90 | put: Callable[[Any], Awaitable[None]], 91 | loop: asyncio.BaseEventLoop, 92 | initializer=None, 93 | initargs=(), 94 | maxtasks=None, 95 | wrap_exception=False, 96 | concurrency_limit=128, 97 | iscoroutinefunction=asyncio.iscoroutinefunction, 98 | ) -> None: 99 | if initializer is not None: 100 | if iscoroutinefunction(initializer): 101 | await initializer(*initargs) 102 | else: 103 | initializer(*initargs) 104 | completed = 0 105 | sem_concurrency_limit = asyncio.BoundedSemaphore(concurrency_limit) 106 | 107 | tasks: Set[Task[Any]] = set() 108 | 109 | def remove_task(t: Task, *, tasks: Set[Task] = tasks) -> None: 110 | tasks.remove(t) 111 | 112 | while maxtasks is None or (maxtasks and completed < maxtasks): 113 | async with sem_concurrency_limit: 114 | try: 115 | task = await get() 116 | except (EOFError, OSError): 117 | logger.debug("worker got EOFError or OSError -- exiting") 118 | for task in tasks: 119 | task.cancel() 120 | tasks.clear() # Don't wait for anything. 121 | break 122 | 123 | if task is None: 124 | logger.debug("worker got sentinel -- exiting") 125 | break 126 | 127 | job, i, func, args, kwds = task 128 | task = await _create_bounded_task(func, args, kwds, sem=sem_concurrency_limit, loop=loop) 129 | 130 | new_task = loop.create_task( 131 | task_wrapper(job, i, func, 132 | task, 133 | put=put, 134 | wrap_exception=wrap_exception, 135 | ) 136 | ) 137 | tasks.add(new_task) 138 | new_task.add_done_callback(remove_task) 139 | 140 | if tasks: 141 | await asyncio.gather(*tasks, return_exceptions=True) 142 | logger.debug("worker exiting after %d tasks" % completed) 143 | 144 | 145 | def worker( 146 | inqueue, 147 | outqueue, 148 | initializer=None, 149 | initargs=(), 150 | loop_initializer=asyncio.new_event_loop, 151 | threads=1, 152 | maxtasks: Optional[int] = None, 153 | wrap_exception: bool = False, 154 | concurrency_limit=128, 155 | ) -> None: 156 | loop: asyncio.BaseEventLoop = loop_initializer() 157 | asyncio.set_event_loop(loop) 158 | worker_tp = ThreadPoolExecutor(threads, thread_name_prefix="Worker_TP_") 159 | loop.set_default_executor(worker_tp) 160 | get_tp = ThreadPoolExecutor(1, thread_name_prefix="GetTask_TP_") 161 | put_tp = ThreadPoolExecutor(1, thread_name_prefix="PutTask_TP_") 162 | 163 | async def get_task(*, loop=loop, tp=get_tp, queue=inqueue) -> tuple: 164 | return await loop.run_in_executor(tp, queue.get) 165 | 166 | async def put_result(result, *, loop=loop, tp=put_tp, queue=outqueue) -> None: 167 | return await loop.run_in_executor(tp, queue.put, result) 168 | 169 | try: 170 | loop.run_until_complete( 171 | _run_worker( 172 | get_task, 173 | put_result, 174 | loop=loop, 175 | initializer=initializer, 176 | initargs=initargs, 177 | maxtasks=maxtasks, 178 | wrap_exception=wrap_exception, 179 | concurrency_limit=concurrency_limit, 180 | ) 181 | ) 182 | except Exception as err: 183 | logger.exception("worker got exception %s", err) 184 | finally: 185 | logger.debug("shutdown workers") 186 | get_tp.shutdown() 187 | put_tp.shutdown() 188 | worker_tp.shutdown() 189 | logger.debug("shutdown asyncgens") 190 | loop.run_until_complete(loop.shutdown_asyncgens()) 191 | if loop.is_running(): 192 | loop.close() 193 | logger.debug("Worker done") 194 | 195 | 196 | class AioPool(Pool): 197 | def __init__( 198 | self, 199 | processes: Optional[int] = None, 200 | initializer: Optional[Callable[..., Union[Awaitable[Any], Any]]] = None, 201 | initargs: Tuple[Any, ...] = (), 202 | maxtasksperchild: int = None, 203 | context=None, 204 | loop_initializer: Callable[[], BaseEventLoop] = None, 205 | pool_size: int = 1, 206 | concurrency_limit: int = 128, 207 | ) -> None: 208 | """Process pool implementation that support async functions. 209 | Support the same funcitonalilty as the original process pool. 210 | 211 | Args: 212 | processes: number of processes to run, same behaviour as Pool. 213 | Defaults to None. 214 | initializer: Initializer function that being executed first by each process. 215 | Can be async. Optional. Defaults to None. 216 | initargs: Arguments to pass to initializer. Defaults to (). 217 | maxtasksperchild: max tasks per process. same behaviour as Pool. Defaults to None. 218 | context: determine how to start the child processes. same behaviour as Pool. Defaults to None. 219 | loop_initializer: Function that create the new event loop. Defaults to None. 220 | pool_size: size for the default pool for the event loop in the new process. Defaults to 1. 221 | concurrency_limit: Maximume concurrent tasks to run in each process. Defaults to 128. 222 | """ 223 | self._loop_initializer = loop_initializer or asyncio.new_event_loop 224 | if pool_size <= 0: 225 | raise ValueError("Thread pool size must be at least 1") 226 | self._pool_size = pool_size 227 | if concurrency_limit < 1: 228 | raise ValueError("Conccurency limit must be at least 1.") 229 | self._concurrency_limit = concurrency_limit 230 | super().__init__(processes, initializer, initargs, maxtasksperchild, context) 231 | 232 | if sys.version_info.minor < 8: 233 | 234 | def _repopulate_pool(self) -> None: 235 | """Bring the number of pool processes up to the specified number, 236 | for use after reaping workers which have exited. 237 | """ 238 | for _ in range(self._processes - len(self._pool)): 239 | w = self.Process( 240 | target=worker, 241 | args=( 242 | self._inqueue, 243 | self._outqueue, 244 | self._initializer, 245 | self._initargs, 246 | self._loop_initializer, 247 | self._pool_size, 248 | self._maxtasksperchild, 249 | self._wrap_exception, 250 | self._concurrency_limit, 251 | ), 252 | ) 253 | self._pool.append(w) 254 | w.name = w.name.replace("Process", "PoolWorker") 255 | w.daemon = True 256 | w.start() 257 | logger.debug("added worker") 258 | 259 | elif sys.version_info.minor >= 8: 260 | 261 | def _repopulate_pool(self) -> None: 262 | return self._repopulate_pool_static( 263 | self._ctx, 264 | self.Process, 265 | self._processes, 266 | self._pool, 267 | self._inqueue, 268 | self._outqueue, 269 | self._initializer, 270 | self._initargs, 271 | self._loop_initializer, 272 | self._maxtasksperchild, 273 | self._wrap_exception, 274 | self._pool_size, 275 | self._concurrency_limit, 276 | ) 277 | 278 | @staticmethod 279 | def _repopulate_pool_static( 280 | ctx, 281 | Process, 282 | processes, 283 | pool, 284 | inqueue, 285 | outqueue, 286 | initializer, 287 | initargs, 288 | loop_initializer, 289 | maxtasksperchild, 290 | wrap_exception, 291 | pool_size, 292 | concurrency_limit, 293 | ) -> None: 294 | """Bring the number of pool processes up to the specified number, 295 | for use after reaping workers which have exited. 296 | """ 297 | for i in range(processes - len(pool)): 298 | w = Process( 299 | ctx, 300 | target=worker, 301 | args=( 302 | inqueue, 303 | outqueue, 304 | initializer, 305 | initargs, 306 | loop_initializer, 307 | pool_size, 308 | maxtasksperchild, 309 | wrap_exception, 310 | concurrency_limit, 311 | ), 312 | ) 313 | w.name = w.name.replace("Process", "PoolWorker") 314 | w.daemon = True 315 | w.start() 316 | pool.append(w) 317 | logger.debug("added worker") 318 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | category = "dev" 3 | description = "Fast implementation of asyncio event loop on top of libuv" 4 | marker = "sys_platform != \"win32\"" 5 | name = "uvloop" 6 | optional = true 7 | python-versions = "*" 8 | version = "0.15.1" 9 | 10 | [package.extras] 11 | dev = ["Cython (>=0.29.20,<0.30.0)", "pytest (>=3.6.0)", "Sphinx (>=1.7.3,<1.8.0)", "sphinxcontrib-asyncio (>=0.2.0,<0.3.0)", "sphinx-rtd-theme (>=0.2.4,<0.3.0)", "aiohttp", "flake8 (>=3.8.4,<3.9.0)", "psutil", "pycodestyle (>=2.6.0,<2.7.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"] 12 | docs = ["Sphinx (>=1.7.3,<1.8.0)", "sphinxcontrib-asyncio (>=0.2.0,<0.3.0)", "sphinx-rtd-theme (>=0.2.4,<0.3.0)"] 13 | test = ["aiohttp", "flake8 (>=3.8.4,<3.9.0)", "psutil", "pycodestyle (>=2.6.0,<2.7.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"] 14 | 15 | [metadata] 16 | content-hash = "1be6d5e3e834bf6445009a4401d0217c3f3c931eb889df997218f7c622f2d191" 17 | lock-version = "1.0" 18 | python-versions = "^3.6" 19 | 20 | [metadata.files] 21 | uvloop = [ 22 | {file = "uvloop-0.15.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:9541dc3f391941796ae95c9c3bb16b813acf9e3d4beebfd3b623f1acb22d318d"}, 23 | {file = "uvloop-0.15.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e178c255622d928d464187e3ceba94db88465f6b17909c651483fb73af8d8b85"}, 24 | {file = "uvloop-0.15.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:47ec567151070ed770211d359ad9250b59368548c60212c7ef6dda3f5b1778f6"}, 25 | {file = "uvloop-0.15.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:66881fe8a2187334c4dd5010c56310bdf32fe426613f9ca727f090bc31280624"}, 26 | {file = "uvloop-0.15.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:e72779681f839b6a069d7e7a9f7962a1d1927612c5c2e33071415478bdc1b91b"}, 27 | {file = "uvloop-0.15.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:ed073d24e0c383c24d17d3a2bb209b999ff0a8130e89b7c3f033db9e0c3bd04f"}, 28 | {file = "uvloop-0.15.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:236a3c31096e0845029856f7bc07a938340c2cdb35d9d39b38c9253b672bf948"}, 29 | {file = "uvloop-0.15.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ca8a9e982f0bfbe331f41902cdd721c6e749e4685a403685e792b86a584f5969"}, 30 | {file = "uvloop-0.15.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1ae1ad731c8c0dcee80e0ecf06274f0f7293244d2cef81fa2747321a370a6aba"}, 31 | {file = "uvloop-0.15.1.tar.gz", hash = "sha256:7846828112bfb49abc5fdfc47d0e4dfd7402115c9fde3c14c31818cfbeeb63dc"}, 32 | ] 33 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "aio-pool" 3 | version = "0.2.0" 4 | description = "Extending Python's process pool to support async functions." 5 | authors = ["Itay Azolay "] 6 | license = "Apache-2.0" 7 | readme = "README.md" 8 | repository = "https://github.com/Itayazolay/aio-pool" 9 | classifiers = [ 10 | "Development Status :: 4 - Beta", 11 | "Intended Audience :: Developers", 12 | "Programming Language :: Python :: 3", 13 | "Programming Language :: Python :: 3.6", 14 | "Programming Language :: Python :: 3.7", 15 | "Programming Language :: Python :: 3.8", 16 | "Programming Language :: Python :: 3.9", 17 | "Topic :: Software Development :: Libraries", 18 | "Topic :: Software Development :: Libraries :: Python Modules", 19 | "Framework :: AsyncIO" 20 | ] 21 | 22 | [tool.poetry.dependencies] 23 | python = "^3.6" 24 | 25 | [tool.poetry.dev-dependencies] 26 | uvloop = { version="^0", optional=true, markers = "sys_platform != 'win32'"} 27 | 28 | [build-system] 29 | requires = ["poetry>=0.12"] 30 | build-backend = "poetry.masonry.api" 31 | -------------------------------------------------------------------------------- /tests/test.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import asyncio 4 | from concurrent.futures import ProcessPoolExecutor 5 | from aio_pool import AioPool 6 | 7 | 8 | import unittest 9 | 10 | 11 | async def initliazer_async(param): 12 | get_initializer_param.param = param 13 | 14 | 15 | def initializer_sync(param): 16 | get_initializer_param.param = param 17 | 18 | 19 | async def get_initializer_param(): 20 | return get_initializer_param.param 21 | 22 | 23 | def pow2_sync(n): 24 | return n ** 2 25 | 26 | 27 | async def pow2_async(n): 28 | return n ** 2 29 | 30 | 31 | def mul_sync(a, b): 32 | return a * b 33 | 34 | 35 | async def mul_async(a, b): 36 | return a * b 37 | 38 | 39 | class TestAIOPool(unittest.TestCase): 40 | def setUp(self) -> None: 41 | logging.basicConfig(level=logging.DEBUG) 42 | os.environ["PYTHONASYNCIODEBUG"] = "1" 43 | 44 | def test_initializer(self): 45 | param_val = True 46 | with AioPool( 47 | processes=1, initializer=initliazer_async, initargs=(param_val,) 48 | ) as pool: 49 | res = pool.apply(get_initializer_param) 50 | self.assertEqual(res, param_val) 51 | with AioPool( 52 | processes=1, initializer=initializer_sync, initargs=(param_val,) 53 | ) as pool: 54 | res = pool.apply(get_initializer_param) 55 | self.assertEqual(res, param_val) 56 | 57 | def test_map_sync(self): 58 | with AioPool(processes=2, concurrency_limit=2) as pool: 59 | inputs = [i for i in range(40)] 60 | expected = [i ** 2 for i in inputs] 61 | for chunksize in [1, 2, 4]: 62 | result = pool.map(pow2_sync, inputs, chunksize=chunksize) 63 | self.assertListEqual(expected, result) 64 | 65 | def test_map_async(self): 66 | with AioPool(processes=2, concurrency_limit=2) as pool: 67 | inputs = [i for i in range(40)] 68 | expected = [i ** 2 for i in inputs] 69 | for chunksize in [1, 2, 4]: 70 | result = pool.map(pow2_async, inputs, chunksize=chunksize) 71 | self.assertListEqual(expected, result) 72 | 73 | def test_starmap_sync(self): 74 | with AioPool(processes=2, concurrency_limit=2) as pool: 75 | inputs = [(i, i // 2) for i in range(40)] 76 | expected = [mul_sync(a, b) for a, b in inputs] 77 | for chunksize in [1, 2, 4]: 78 | result = pool.starmap(mul_sync, inputs, chunksize=chunksize) 79 | self.assertListEqual(expected, result) 80 | 81 | def test_starmap_async(self): 82 | with AioPool(processes=2, concurrency_limit=2) as pool: 83 | inputs = [(i, i // 2) for i in range(40)] 84 | expected = [mul_sync(a, b) for a, b in inputs] 85 | for chunksize in [1, 2, 4]: 86 | result = pool.starmap(mul_async, inputs, chunksize=chunksize) 87 | self.assertListEqual(expected, result) 88 | 89 | if __name__ == "__main__": 90 | unittest.main() 91 | --------------------------------------------------------------------------------